Exemplo n.º 1
0
def resolve_directories(version: str, cluster_path: str, data_root: str = None) -> Directories:
    """
    Determines directories to find the opensearch binary as well as where to store the instance data.

    :param version: the full OpenSearch/Elasticsearch version (to resolve the install dir)
    :param cluster_path: the path between data_root and the actual data directories
    :param data_root: the root of the data dir (will be resolved to TMP_PATH or DATA_DIR by default)
    :returns: a Directories data structure
    """
    # where to find cluster binary and the modules
    engine_type, install_version = versions.get_install_type_and_version(version)
    if engine_type == EngineType.OpenSearch:
        install_dir = install.get_opensearch_install_dir(install_version)
    else:
        # Elasticsearch version
        install_dir = install.get_elasticsearch_install_dir(install_version)

    modules_dir = os.path.join(install_dir, "modules")

    if data_root is None:
        if config.dirs.data:
            data_root = config.dirs.data
        else:
            data_root = config.dirs.tmp

    if engine_type == EngineType.OpenSearch:
        data_path = os.path.join(data_root, "opensearch", cluster_path)
    else:
        data_path = os.path.join(data_root, "elasticsearch", cluster_path)

    tmp_dir = os.path.join(data_path, "tmp")
    data_dir = os.path.join(data_path, "data")
    backup_dir = os.path.join(data_path, "backup")

    return Directories(install_dir, tmp_dir, modules_dir, data_dir, backup_dir)
Exemplo n.º 2
0
def resolve_directories(version: str,
                        cluster_path: str,
                        data_root: str = None) -> Directories:
    """
    Determines directories to find the elasticsearch binary as well as where to store the instance data.

    :param version: the elasticsearch version (to resolve the install dir)
    :param cluster_path: the path between data_root and the actual data directories
    :param data_root: the root of the data dir (will be resolved to TMP_PATH or DATA_DIR by default)
    :returns: a Directories data structure
    """
    # where to find elasticsearch binary and the modules
    install_dir = install.get_elasticsearch_install_dir(version)
    modules_dir = os.path.join(install_dir, "modules")

    if data_root is None:
        if config.DATA_DIR:
            data_root = config.DATA_DIR
        else:
            data_root = config.TMP_FOLDER

    data_path = os.path.join(data_root, "elasticsearch", cluster_path)

    tmp_dir = os.path.join(data_path, "tmp")
    data_dir = os.path.join(data_path, "data")
    backup_dir = os.path.join(data_path, "backup")

    return Directories(install_dir, tmp_dir, modules_dir, data_dir, backup_dir)
Exemplo n.º 3
0
def start_elasticsearch(port=None,
                        version=None,
                        delete_data=True,
                        asynchronous=False,
                        update_listener=None):
    if STATE.get('_thread_'):
        return STATE['_thread_']

    port = port or config.PORT_ELASTICSEARCH
    # delete Elasticsearch data that may be cached locally from a previous test run
    delete_all_elasticsearch_data(version)

    install.install_elasticsearch(version)
    backend_port = get_free_tcp_port()
    base_dir = install.get_elasticsearch_install_dir(version)
    es_data_dir = os.path.join(base_dir, 'data')
    es_tmp_dir = os.path.join(base_dir, 'tmp')
    es_mods_dir = os.path.join(base_dir, 'modules')
    if config.DATA_DIR:
        delete_data = False
        es_data_dir = '%s/elasticsearch' % config.DATA_DIR
    # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments,
    # hence we use the default bind address 127.0.0.0 and put a proxy in front of it
    backup_dir = os.path.join(config.TMP_FOLDER, 'es_backup')
    cmd = (
        ('%s/bin/elasticsearch ' +
         '-E http.port=%s -E http.publish_port=%s -E http.compression=false ' +
         '-E path.data=%s -E path.repo=%s') %
        (base_dir, backend_port, backend_port, es_data_dir, backup_dir))
    if os.path.exists(os.path.join(es_mods_dir, 'x-pack-ml')):
        cmd += ' -E xpack.ml.enabled=false'
    env_vars = {
        'ES_JAVA_OPTS': os.environ.get('ES_JAVA_OPTS', '-Xms200m -Xmx600m'),
        'ES_TMPDIR': es_tmp_dir
    }
    LOG.debug('Starting local Elasticsearch (%s port %s)' %
              (get_service_protocol(), port))
    if delete_data:
        rm_rf(es_data_dir)
    # fix permissions
    chmod_r(base_dir, 0o777)
    mkdir(es_data_dir)
    chmod_r(es_data_dir, 0o777)
    mkdir(es_tmp_dir)
    chmod_r(es_tmp_dir, 0o777)
    # start proxy and ES process
    proxy = start_proxy_for_service('elasticsearch',
                                    port,
                                    backend_port,
                                    update_listener,
                                    quiet=True,
                                    params={'protocol_version': 'HTTP/1.0'})
    STATE['_proxy_'] = proxy
    if is_root():
        cmd = "su localstack -c '%s'" % cmd
    thread = do_run(cmd, asynchronous, env_vars=env_vars)
    STATE['_thread_'] = thread
    return thread
Exemplo n.º 4
0
    def _resolve_directories(self) -> Directories:
        # determine various directory paths
        base_dir = install.get_elasticsearch_install_dir(self.version)

        es_tmp_dir = os.path.join(base_dir, "tmp")
        es_mods_dir = os.path.join(base_dir, "modules")
        if config.DATA_DIR:
            es_data_dir = os.path.join(config.DATA_DIR, "elasticsearch")
        else:
            es_data_dir = os.path.join(base_dir, "data")
        backup_dir = os.path.join(config.TMP_FOLDER, "es_backup")

        return Directories(base_dir, es_tmp_dir, es_mods_dir, es_data_dir, backup_dir)
Exemplo n.º 5
0
def delete_all_elasticsearch_data(version):
    """ This function drops ALL data in the local Elasticsearch data folder. Use with caution! """
    base_dir = install.get_elasticsearch_install_dir(version)
    data_dir = os.path.join(base_dir, 'data', 'elasticsearch', 'nodes')
    rm_rf(data_dir)