Пример #1
0
def test_find_config():
    files = write_files({
        'base.conf':
        dedent("""\
            [datacube]
            db_hostname: fakehost.test.lan
        """),
        'override.conf':
        dedent("""\
            [datacube]
            db_hostname: overridden.test.lan
            db_database: overridden_db
        """)
    })

    # One config file
    config = LocalConfig.find(paths=[str(files.joinpath('base.conf'))])
    assert config['db_hostname'] == 'fakehost.test.lan'
    # Not set: uses default
    assert config['db_database'] == 'datacube'

    # Now two config files, with the latter overriding earlier options.
    config = LocalConfig.find(paths=[
        str(files.joinpath('base.conf')),
        str(files.joinpath('override.conf'))
    ])
    assert config['db_hostname'] == 'overridden.test.lan'
    assert config['db_database'] == 'overridden_db'
Пример #2
0
def test_cfg_from_env(monkeypatch):
    def set_env(**kw):
        _clear_cfg_env(monkeypatch)
        for e, v in kw.items():
            monkeypatch.setenv(e, v)

    set_env(DATACUBE_DB_URL='postgresql://*****:*****@host.tld:3344/db')
    cfg = LocalConfig.find()
    assert '3344' in str(cfg)
    assert '3344' in repr(cfg)
    assert cfg['db_username'] == 'uu'
    assert cfg['db_password'] == ' pass@'
    assert cfg['db_hostname'] == 'host.tld'
    assert cfg['db_database'] == 'db'
    assert cfg['db_port'] == '3344'

    set_env(DB_DATABASE='dc2',
            DB_HOSTNAME='remote.db',
            DB_PORT='4433',
            DB_USERNAME='******',
            DB_PASSWORD='******')
    cfg = LocalConfig.find()
    assert cfg['db_username'] == 'dcu'
    assert cfg['db_password'] == 'gg'
    assert cfg['db_hostname'] == 'remote.db'
    assert cfg['db_database'] == 'dc2'
    assert cfg['db_port'] == '4433'
Пример #3
0
def index_connect(local_config: LocalConfig = None,
                  application_name: str = None,
                  validate_connection: bool = True) -> Index:
    """
    Create a Data Cube Index that can connect to a PostgreSQL server

    It contains all the required connection parameters, but doesn't actually
    check that the server is available.

    :param application_name: A short, alphanumeric name to identify this application.
    :param local_config: Config object to use. (optional)
    :param validate_connection: Validate database connection and schema immediately
    :raises datacube.index.Exceptions.IndexSetupError:
    """
    if local_config is None:
        local_config = LocalConfig.find()

    driver_name = local_config.get('index_driver', 'default')
    index_driver = index_driver_by_name(driver_name)
    if not index_driver:
        raise RuntimeError(
            "No index driver found for %r. %s available: %s" %
            (driver_name, len(index_drivers()), ', '.join(index_drivers())))

    return index_driver.connect_to_index(
        local_config,
        application_name=application_name,
        validate_connection=validate_connection)
Пример #4
0
def test_auto_config(monkeypatch, tmpdir):
    from pathlib import Path

    cfg_file = Path(str(tmpdir / "dc.cfg"))
    assert cfg_file.exists() is False
    cfg_file_name = str(cfg_file)

    _clear_cfg_env(monkeypatch)
    monkeypatch.setenv('DATACUBE_CONFIG_PATH', cfg_file_name)

    assert auto_config() == cfg_file_name
    assert cfg_file.exists() is True

    monkeypatch.setenv('DB_HOSTNAME', 'should-not-be-used.local')
    # second run should skip overwriting
    assert auto_config() == cfg_file_name

    config = LocalConfig.find(paths=cfg_file_name)
    assert config['db_hostname'] == ''
    assert config['db_database'] == 'datacube'

    cfg_file.unlink()
    assert cfg_file.exists() is False
    _clear_cfg_env(monkeypatch)

    monkeypatch.setenv('DATACUBE_CONFIG_PATH', cfg_file_name)
    monkeypatch.setenv('DB_HOSTNAME', 'some.db')
    monkeypatch.setenv('DB_USERNAME', 'user')

    assert auto_config() == cfg_file_name
    config = LocalConfig.find(paths=cfg_file_name)
    assert config['db_hostname'] == 'some.db'
    assert config['db_database'] == 'datacube'
    assert config['db_username'] == 'user'
Пример #5
0
def index_connect(local_config=None,
                  application_name=None,
                  validate_connection=True):
    # type: (LocalConfig, str, bool) -> Index
    """
    Create a Data Cube Index that can connect to a PostgreSQL server

    It contains all the required connection parameters, but doesn't actually
    check that the server is available.

    :param application_name: A short, alphanumeric name to identify this application.
    :param local_config: Config object to use.
    :type local_config: :py:class:`datacube.config.LocalConfig`, optional
    :param validate_connection: Validate database connection and schema immediately
    :raises datacube.index.postgres._api.EnvironmentError:
    :rtype: datacube.index.index.Index
    """
    if local_config is None:
        local_config = LocalConfig.find()

    index_driver = index_driver_by_name(
        local_config.get('index_driver', 'default'))

    return index_driver.connect_to_index(
        local_config,
        application_name=application_name,
        validate_connection=validate_connection)
Пример #6
0
def check(local_config: LocalConfig):
    """
    Verify & view current configuration
    """
    def echo_field(name, value):
        echo('{:<15}'.format(name + ':') + style(str(value), bold=True))

    echo_field('Version', datacube.__version__)
    echo_field('Config files', ','.join(local_config.files_loaded))
    echo_field(
        'Host', '{}:{}'.format(local_config['db_hostname'] or 'localhost',
                               local_config.get('db_port', None) or '5432'))

    echo_field('Database', local_config['db_database'])
    echo_field('User', local_config['db_username'])
    echo_field('Environment', local_config['env'])
    echo_field('Index Driver', local_config['index_driver'])

    echo()
    echo('Valid connection:\t', nl=False)
    try:
        index = index_connect(local_config=local_config)
        echo(style('YES', bold=True))
        for role, user, description in index.users.list_users():
            if user == local_config['db_username']:
                echo('You have %s privileges.' %
                     style(role.upper(), bold=True))
    except OperationalError as e:
        handle_exception('Error Connecting to Database: %s', e)
    except IndexSetupError as e:
        handle_exception('Database not initialised: %s', e)
Пример #7
0
def db_connect(cfg=None):
    """ Create database connection from datacube config.

        cfg:
          None -- use default datacube config
          str  -- use config with a given name

          LocalConfig -- use loaded config object
    """
    from datacube.config import LocalConfig
    import psycopg2

    if isinstance(cfg, str) or cfg is None:
        cfg = LocalConfig.find(env=cfg)

    cfg_remap = dict(dbname='db_database',
                     user='******',
                     password='******',
                     host='db_hostname',
                     port='db_port')

    pg_cfg = {k: cfg.get(cfg_name, None)
              for k, cfg_name in cfg_remap.items()}

    return psycopg2.connect(**pg_cfg)
Пример #8
0
def test_multiple_environment_config(tmpdir):
    config_path = tmpdir.join('second.conf')

    config_path.write("""
[DEFAULT]
db_username: test_user
index_driver: default

[default]
db_hostname: db.opendatacube.test

[test_alt]
db_hostname: alt-db.opendatacube.test
    """)

    config_path = str(config_path)

    config = LocalConfig.find([config_path])
    assert config['db_hostname'] == 'db.opendatacube.test'
    alt_config = LocalConfig.find([config_path], env='test_alt')
    assert alt_config['db_hostname'] == 'alt-db.opendatacube.test'

    # Make sure the correct config is passed through the API
    # Parsed config:
    db_url = 'postgresql://{user}@db.opendatacube.test:5432/datacube'.format(
        user=config['db_username'])
    alt_db_url = 'postgresql://{user}@alt-db.opendatacube.test:5432/datacube'.format(
        user=config['db_username'])

    with Datacube(config=config, validate_connection=False) as dc:
        assert str(dc.index.url) == db_url

    # When none specified, default environment is loaded
    with Datacube(config=str(config_path), validate_connection=False) as dc:
        assert str(dc.index.url) == db_url
    # When specific environment is loaded
    with Datacube(config=config_path,
                  env='test_alt',
                  validate_connection=False) as dc:
        assert str(dc.index.url) == alt_db_url

    # An environment that isn't in any config files
    with pytest.raises(ValueError):
        with Datacube(config=config_path,
                      env='undefined-env',
                      validate_connection=False) as dc:
            pass
Пример #9
0
def null_config():
    """Provides a :class:`LocalConfig` configured with suitable config file paths.

    .. seealso::

        The :func:`integration_config_paths` fixture sets up the config files.
    """
    return LocalConfig.find(CONFIG_FILE_PATHS, env="null_driver")
Пример #10
0
def local_config(datacube_env_name):
    """Provides a :class:`LocalConfig` configured with suitable config file paths.

    .. seealso::

        The :func:`integration_config_paths` fixture sets up the config files.
    """
    return LocalConfig.find(CONFIG_FILE_PATHS, env=datacube_env_name)
Пример #11
0
def create_views(dc):
    try:
        from datacube.config import LocalConfig
        odc_cfg = LocalConfig.find()
        dbname = odc_cfg.get("db_database")
    except ImportError:
        dbname = os.environ.get("DB_DATABASE")
    run_sql(dc, "extent_views/create", database=dbname)
Пример #12
0
def test_search_returning(index: Index, local_config: LocalConfig,
                          pseudo_ls8_type: DatasetType,
                          pseudo_ls8_dataset: Dataset,
                          ls5_dataset_w_children) -> None:

    assert index.datasets.count() == 4, "Expected four test datasets"

    # Expect one product with our one dataset.
    results = list(
        index.datasets.search_returning(
            ('id', 'sat_path', 'sat_row'),
            platform='LANDSAT_8',
            instrument='OLI_TIRS',
        ))
    assert len(results) == 1
    id_, path_range, sat_range = results[0]
    assert id_ == pseudo_ls8_dataset.id
    # TODO: output nicer types?
    assert path_range == NumericRange(Decimal('116'), Decimal('116'), '[]')
    assert sat_range == NumericRange(Decimal('74'), Decimal('84'), '[]')

    results = list(
        index.datasets.search_returning(
            (
                'id',
                'metadata_doc',
            ),
            platform='LANDSAT_8',
            instrument='OLI_TIRS',
        ))
    assert len(results) == 1
    id_, document = results[0]
    assert id_ == pseudo_ls8_dataset.id
    assert document == pseudo_ls8_dataset.metadata_doc

    my_username = local_config.get('db_username', DEFAULT_DB_USER)

    # Mixture of document and native fields
    results = list(
        index.datasets.search_returning(
            ('id', 'creation_time', 'format', 'label'),
            platform='LANDSAT_8',
            indexed_by=my_username,
        ))
    assert len(results) == 1

    id_, creation_time, format_, label = results[0]

    assert id_ == pseudo_ls8_dataset.id
    assert format_ == 'PSEUDOMD'

    # It's always UTC in the document
    expected_time = creation_time.astimezone(tz.tzutc()).replace(tzinfo=None)
    assert expected_time.isoformat(
    ) == pseudo_ls8_dataset.metadata_doc['creation_dt']
    assert label == pseudo_ls8_dataset.metadata_doc['ga_label']
Пример #13
0
def test_multiple_environment_config(tmpdir):
    config_path = tmpdir.join('second.conf')

    config_path.write("""
[user]
default_environment: test_default

[test_default]
db_hostname: db.opendatacube.test

[test_alt]
db_hostname: alt-db.opendatacube.test
    """)

    config_path = str(config_path)

    config = LocalConfig.find([config_path])
    assert config.db_hostname == 'db.opendatacube.test'
    alt_config = LocalConfig.find([config_path], env='test_alt')
    assert alt_config.db_hostname == 'alt-db.opendatacube.test'

    # Lazily connect: they shouldn't try to connect during this test as we're not using the API
    args = dict(validate_connection=False)

    # Make sure the correct config is passed through the API
    # Parsed config:
    db_url = 'postgresql://{user}@db.opendatacube.test:5432/datacube'.format(user=config.db_username)
    alt_db_url = 'postgresql://{user}@alt-db.opendatacube.test:5432/datacube'.format(user=config.db_username)

    with Datacube(config=config, **args) as dc:
        assert str(dc.index.url) == db_url

    # When none specified, default environment is loaded
    with Datacube(config=str(config_path), **args) as dc:
        assert str(dc.index.url) == db_url
    # When specific environment is loaded
    with Datacube(config=config_path, env='test_alt', **args) as dc:
        assert str(dc.index.url) == alt_db_url

    # An environment that isn't in any config files
    with pytest.raises(ValueError):
        with Datacube(config=config_path, env='undefined-env', **args) as dc:
            pass
Пример #14
0
def get_config(username):
    config = configparser.ConfigParser()
    config['datacube'] = {
        'db_password': settings.DATABASES['default']['PASSWORD'],
        'db_connection_timeout': '60',
        'db_username': settings.DATABASES['default']['USER'],
        'db_database': username,
        'db_hostname': settings.MASTER_NODE
    }

    return LocalConfig(config)
Пример #15
0
def get_config(username):
    config = configparser.ConfigParser()
    config['datacube'] = {
        'db_password': '******',
        'db_connection_timeout': '60',
        'db_username': '******',
        'db_database': username,
        'db_hostname': settings.MASTER_NODE
    }

    return LocalConfig(config)
Пример #16
0
    def from_config(cls, config=LocalConfig.find(), application_name=None, validate_db=True):
        app_name = cls._expand_app_name(application_name)

        return PostgresDb.connect(
            config.db_hostname,
            config.db_database,
            config.db_username,
            config.db_password,
            config.db_port,
            application_name=app_name,
            validate=validate_db
        )
Пример #17
0
def test_find_config():
    files = util.write_files({
        'base.conf': """[datacube]
db_hostname: fakehost.test.lan
        """,
        'override.conf': """[datacube]
db_hostname: overridden.test.lan
db_database: overridden_db
        """
    })

    # One config file
    config = LocalConfig.find(paths=[str(files.joinpath('base.conf'))])
    assert config.db_hostname == 'fakehost.test.lan'
    # Not set: uses default
    assert config.db_database == 'datacube'

    # Now two config files, with the latter overriding earlier options.
    config = LocalConfig.find(paths=[str(files.joinpath('base.conf')),
                                     str(files.joinpath('override.conf'))])
    assert config.db_hostname == 'overridden.test.lan'
    assert config.db_database == 'overridden_db'
Пример #18
0
def connect(local_config=LocalConfig.find(), application_name=None, validate_connection=True):
    """
    Connect to the index. Default Postgres implementation.

    :param application_name: A short, alphanumeric name to identify this application.
    :param local_config: Config object to use.
    :type local_config: :py:class:`datacube.config.LocalConfig`, optional
    :rtype: Index
    :raises: datacube.index.postgres._api.EnvironmentError
    """
    return Index(
        PostgresDb.from_config(local_config, application_name=application_name, validate_db=validate_connection),
        local_config
    )
Пример #19
0
def test_get_locations():
    files = util.write_files({
        'base.conf': """[locations]
ls7_ortho: file:///tmp/test/ls7_ortho
t_archive: file:///tmp/test/t_archive
        """,
        'override.conf': """[locations]
t_archive: file:///tmp/override
        """
    })

    config = LocalConfig.find(paths=[str(files.joinpath('base.conf'))])
    assert config.location_mappings == {
        'ls7_ortho': 'file:///tmp/test/ls7_ortho',
        't_archive': 'file:///tmp/test/t_archive'
    }

    config = LocalConfig.find(paths=[str(files.joinpath('base.conf')),
                                     str(files.joinpath('override.conf'))])
    assert config.location_mappings == {
        'ls7_ortho': 'file:///tmp/test/ls7_ortho',
        't_archive': 'file:///tmp/override'
    }
Пример #20
0
def config_from_settings():
    """Create or load a Datacube configuration from the django settings"""
    if hasattr(settings, 'DATACUBE_CONF_PATH'):
        return settings.DATACUBE_CONF_PATH
    config = configparser.ConfigParser()
    config['datacube'] = {
        'db_password': settings.DATABASES['default']['PASSWORD'],
        'db_connection_timeout': '60',
        'db_username': settings.DATABASES['default']['USER'],
        'db_database': settings.DATABASES['default']['NAME'],
        'db_hostname': settings.DATABASES['default']['HOST']
    }

    return LocalConfig(config)
Пример #21
0
    def from_config(cls, config=None, application_name=None, validate_connection=True):
        config = LocalConfig.find() if config is None else config

        app_name = cls._expand_app_name(application_name)

        return PostgresDb.create(
            config.db_hostname,
            config.db_database,
            config.db_username,
            config.db_password,
            config.db_port,
            application_name=app_name,
            validate=validate_connection,
            pool_timeout=config.db_connection_timeout
        )
Пример #22
0
def db_connect(cfg=None):
    from datacube.config import LocalConfig
    import psycopg2

    if isinstance(cfg, str) or cfg is None:
        cfg = LocalConfig.find(env=cfg)

    cfg_remap = dict(dbname='db_database',
                     user='******',
                     password='******',
                     host='db_hostname',
                     port='db_port')

    pg_cfg = {k: cfg.get(cfg_name, None) for k, cfg_name in cfg_remap.items()}

    return psycopg2.connect(**pg_cfg)
Пример #23
0
def connect(local_config=LocalConfig.find(),
            application_name=None,
            validate_connection=True):
    """
    Connect to the index. Default Postgres implementation.

    :param application_name: A short, alphanumeric name to identify this application.
    :param local_config: Config object to use.
    :type local_config: :py:class:`datacube.config.LocalConfig`, optional
    :rtype: Index
    :raises: datacube.index.postgres._api.EnvironmentError
    """
    return Index(
        PostgresDb.from_config(local_config,
                               application_name=application_name,
                               validate_db=validate_connection), local_config)
Пример #24
0
def connect(local_config=None, application_name=None, validate_connection=True):
    """
    Connect to the index. Default Postgres implementation.

    :param application_name: A short, alphanumeric name to identify this application.
    :param local_config: Config object to use.
    :type local_config: :py:class:`datacube.config.LocalConfig`, optional
    :param validate_connection: Validate database connection and schema immediately
    :rtype: Index
    :raises datacube.index.postgres._api.EnvironmentError:
    """
    if local_config is None:
        local_config = LocalConfig.find()

    return Index(
        PostgresDb.from_config(local_config, application_name=application_name, validate_connection=validate_connection)
    )
Пример #25
0
def run_ingestion(ingestion_definition):
    """Kick off the standard system database ingestion process using a user defined configuration

    Args:
        ingestion_definition: dict representing a Data Cube ingestion def produced using the utils func.

    Returns:
        The primary key of the new dataset type.
    """
    conf_path = os.environ.get('DATACUBE_CONFIG_PATH')
    index = index_connect(local_config=LocalConfig.find([conf_path]))

    source_type, output_type = ingest.make_output_type(index, ingestion_definition)
    ingestion_work.delay(output_type, source_type, ingestion_definition)

    index.close()
    return output_type.id
Пример #26
0
def run_ingestion(ingestion_definition):
    """Kick off the standard system database ingestion process using a user defined configuration

    Args:
        ingestion_definition: dict representing a Data Cube ingestion def produced using the utils func.

    Returns:
        The primary key of the new dataset type.
    """
    conf_path = '/home/' + settings.LOCAL_USER + '/Datacube/NE-GeoCloud/config/.datacube.conf'
    index = index_connect(local_config=LocalConfig.find([conf_path]))

    source_type, output_type = ingest.make_output_type(index, ingestion_definition)
    ingestion_work.delay(output_type, source_type, ingestion_definition)

    index.close()
    return output_type.id
Пример #27
0
        def inner(*args, **kwargs):
            obj = click.get_current_context().obj

            paths = obj.get("config_files", None)
            # If the user is overriding the defaults
            specific_environment = obj.get("config_environment")
            parsed_config = None

            try:
                parsed_config = LocalConfig.find(paths=paths,
                                                 env=specific_environment)
            except ValueError as e:
                if specific_environment:
                    raise click.ClickException(
                        f"No datacube config found for '{specific_environment}'"
                    ) from e
                elif required:
                    raise click.ClickException(
                        "No datacube config found") from e

            return fn(parsed_config, *args, **kwargs)
Пример #28
0
def ingestion_work(output_type, source_type, ingestion_definition):
    """Run the ingestion process for a user defined configuration

    Args:
        output_type, source_type: types produced by ingest.make_output_type
        ingestion_definition: dict representing a Data Cube ingestion def produced using the utils func.
    """
    conf_path = os.environ.get('DATACUBE_CONFIG_PATH')
    index = index_connect(local_config=LocalConfig.find([conf_path]))

    tasks = ingest.create_task_list(index, output_type, None, source_type, ingestion_definition)

    # this is a dry run
    # paths = [ingest.get_filename(ingestion_definition, task['tile_index'], task['tile'].sources) for task in tasks]
    # ingest.check_existing_files(paths)

    # this actually ingests stuff
    successful, failed = ingest.process_tasks(index, ingestion_definition, source_type, output_type, tasks, 3200,
                                              get_executor(None, None))

    index.close()
    return 0
Пример #29
0
    def __init__(self, driver_manager, index=None, *args, **kargs):
        """Initialise the generic index.

        :param index: An index object behaving like
          :class:`datacube.index._api.Index` and used for testing
          purposes only. In the current implementation, only the
          `index._db` variable is used, and is passed to the index
          initialisation method, that should basically replace the
          existing DB connection with that variable.
        :param args: Optional positional arguments to be passed to the
          index on initialisation. Caution: In the current
          implementation all parameters get passed to all potential
          indexes.
        :param kargs: Optional keyword arguments to be passed to the
          index on initialisation. Caution: In the current
          implementation all parameters get passed to all potential
          indexes.

        """
        self.logger = logging.getLogger(self.__class__.__name__)
        if index is None:
            local_config = kargs[
                'local_config'] if 'local_config' in kargs else None
            application_name = kargs[
                'application_name'] if 'application_name' in kargs else None
            validate_connection = kargs[
                'validate_connection'] if 'validate_connection' in kargs else True
            if local_config is None:
                local_config = LocalConfig.find()
            db = PostgresDb.from_config(
                local_config,
                application_name=application_name,
                validate_connection=validate_connection)
        else:
            db = index._db  # pylint: disable=protected-access
        super(Index, self).__init__(driver_manager, db)
Пример #30
0
 def read_remote_config(self):
     remote_config = ConfigParser()
     remote_config.read_string(_DEFAULT_CONF)
     with self.sftp.open('.datacube.conf') as fin:
         remote_config.read_file(fin)
     self.remote_dc_config = LocalConfig(remote_config)
Пример #31
0
def test_find_defaults():
    config = LocalConfig.find(paths=[])
    assert config.db_hostname == ''
    assert config.db_database == 'datacube'
Пример #32
0
def local_config(integration_config_paths):
    return LocalConfig.find(integration_config_paths)
Пример #33
0
 def normalise_config(config):
     if config is None:
         return LocalConfig.find(env=env)
     if isinstance(config, string_types):
         return LocalConfig.find([config], env=env)
     return config
Пример #34
0
def local_config(integration_config_paths):
    return LocalConfig.find(integration_config_paths)