Ejemplo n.º 1
0
    def create_env_vars_file_flag(
            cls, env_vars: Dict) -> Tuple[List[str], Optional[str]]:
        if not env_vars:
            return [], None
        result = []
        env_vars = dict(env_vars)
        env_file = None
        if len(str(env_vars)) > cls.MAX_ENV_ARGS_LENGTH:
            # default ARG_MAX=131072 in Docker - let's create an env var file if the string becomes too long...
            env_file = cls.mountable_tmp_file()
            env_content = ""
            for name, value in dict(env_vars).items():
                if len(value) > cls.MAX_ENV_ARGS_LENGTH:
                    # each line in the env file has a max size as well (error "bufio.Scanner: token too long")
                    continue
                env_vars.pop(name)
                value = value.replace("\n", "\\")
                env_content += f"{cls.format_env_vars(name, value)}\n"
            save_file(env_file, env_content)
            result += ["--env-file", env_file]

        env_vars_res = [
            item for k, v in env_vars.items()
            for item in ["-e", cls.format_env_vars(k, v)]
        ]
        result += env_vars_res
        return result, env_file
Ejemplo n.º 2
0
def download_and_extract(archive_url,
                         target_dir,
                         retries=0,
                         sleep=3,
                         tmp_archive=None):
    mkdir(target_dir)

    _, ext = os.path.splitext(tmp_archive or archive_url)

    tmp_archive = tmp_archive or new_tmp_file()
    if not os.path.exists(tmp_archive) or os.path.getsize(tmp_archive) <= 0:
        # create temporary placeholder file, to avoid duplicate parallel downloads
        save_file(tmp_archive, "")
        for i in range(retries + 1):
            try:
                download(archive_url, tmp_archive)
                break
            except Exception:
                time.sleep(sleep)

    if ext == ".zip":
        unzip(tmp_archive, target_dir)
    elif ext in [".bz2", ".gz", ".tgz"]:
        untar(tmp_archive, target_dir)
    else:
        raise Exception(f"Unsupported archive format: {ext}")
Ejemplo n.º 3
0
def create_zip_file(file_path,
                    zip_file=None,
                    get_content=False,
                    content_root=None,
                    mode="w"):
    """
    Creates a zipfile to the designated file_path.

    By default, a new zip file is created but the mode parameter can be used to append to an existing zip file
    """
    base_dir = file_path
    if not os.path.isdir(file_path):
        base_dir = tempfile.mkdtemp(prefix=ARCHIVE_DIR_PREFIX)
        shutil.copy(file_path, base_dir)
        TMP_FILES.append(base_dir)
    tmp_dir = tempfile.mkdtemp(prefix=ARCHIVE_DIR_PREFIX)
    full_zip_file = zip_file
    if not full_zip_file:
        zip_file_name = "archive.zip"
        full_zip_file = os.path.join(tmp_dir, zip_file_name)
    # special case where target folder is empty -> create empty zip file
    if is_empty_dir(base_dir):
        # see https://stackoverflow.com/questions/25195495/how-to-create-an-empty-zip-file#25195628
        content = (
            b"PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
        )
        if get_content:
            return content
        save_file(full_zip_file, content)
        return full_zip_file

    # create zip file
    if is_debian():
        # todo: extend CLI with the new parameters
        create_zip_file_cli(source_path=file_path,
                            base_dir=base_dir,
                            zip_file=full_zip_file)
    else:
        create_zip_file_python(
            source_path=file_path,
            base_dir=base_dir,
            zip_file=full_zip_file,
            content_root=content_root,
            mode=mode,
        )
    if not get_content:
        TMP_FILES.append(tmp_dir)
        return full_zip_file
    with open(full_zip_file, "rb") as file_obj:
        zip_file_content = file_obj.read()
    rm_dir(tmp_dir)
    return zip_file_content
Ejemplo n.º 4
0
def install_opensearch(version=None):
    # locally import to avoid having a dependency on ASF when starting the CLI
    from localstack.aws.api.opensearch import EngineType
    from localstack.services.opensearch import versions

    if not version:
        version = OPENSEARCH_DEFAULT_VERSION

    version = get_opensearch_install_version(version)
    install_dir = get_opensearch_install_dir(version)
    installed_executable = os.path.join(install_dir, "bin", "opensearch")
    if not os.path.exists(installed_executable):
        with OS_INSTALL_LOCKS.setdefault(version, threading.Lock()):
            if not os.path.exists(installed_executable):
                log_install_msg(f"OpenSearch ({version})")
                opensearch_url = versions.get_download_url(
                    version, EngineType.OpenSearch)
                install_dir_parent = os.path.dirname(install_dir)
                mkdir(install_dir_parent)
                # download and extract archive
                tmp_archive = os.path.join(
                    config.dirs.tmp,
                    f"localstack.{os.path.basename(opensearch_url)}")
                download_and_extract_with_retry(opensearch_url, tmp_archive,
                                                install_dir_parent)
                opensearch_dir = glob.glob(
                    os.path.join(install_dir_parent, "opensearch*"))
                if not opensearch_dir:
                    raise Exception(
                        f"Unable to find OpenSearch folder in {install_dir_parent}"
                    )
                shutil.move(opensearch_dir[0], install_dir)

                for dir_name in ("data", "logs", "modules", "plugins",
                                 "config/scripts"):
                    dir_path = os.path.join(install_dir, dir_name)
                    mkdir(dir_path)
                    chmod_r(dir_path, 0o777)

    # patch JVM options file - replace hardcoded heap size settings
    jvm_options_file = os.path.join(install_dir, "config", "jvm.options")
    if os.path.exists(jvm_options_file):
        jvm_options = load_file(jvm_options_file)
        jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9.]+$)",
                                      r"# \1",
                                      jvm_options,
                                      flags=re.MULTILINE)
        if jvm_options != jvm_options_replaced:
            save_file(jvm_options_file, jvm_options_replaced)
Ejemplo n.º 5
0
def _save_cert_keys(client_cert_key: Tuple[str, str]) -> Tuple[str, str]:
    """
    Save the given cert / key into files and returns their filename

    :param client_cert_key: tuple with (client_cert, client_key)
    :return: tuple of paths to files containing (client_cert, client_key)
    """
    cert_file = client_cert_key[0]
    if not os.path.exists(cert_file):
        cert_file = new_tmp_file()
        save_file(cert_file, client_cert_key[0])
    key_file = client_cert_key[1]
    if not os.path.exists(key_file):
        key_file = new_tmp_file()
        save_file(key_file, client_cert_key[1])
    return cert_file, key_file
Ejemplo n.º 6
0
def _do_start_ssl_proxy_with_client_auth(port: int, target: PortOrUrl,
                                         client_cert_key: Tuple[str, str]):
    # prepare cert files (TODO: check whether/how we can pass cert strings to requests.request(..) directly)
    cert_file = client_cert_key[0]
    if not os.path.exists(cert_file):
        cert_file = new_tmp_file()
        save_file(cert_file, client_cert_key[0])
    key_file = client_cert_key[1]
    if not os.path.exists(key_file):
        key_file = new_tmp_file()
        save_file(key_file, client_cert_key[1])
    cert_params = (cert_file, key_file)

    # start proxy
    requests_kwargs = {"cert": cert_params}
    result = _do_start_ssl_proxy_with_listener(port,
                                               target,
                                               requests_kwargs=requests_kwargs)
    return result
Ejemplo n.º 7
0
def install_dynamodb_local():
    if not os.path.exists(INSTALL_PATH_DDB_JAR):
        log_install_msg("DynamoDB")
        # download and extract archive
        tmp_archive = os.path.join(tempfile.gettempdir(), "localstack.ddb.zip")
        download_and_extract_with_retry(DYNAMODB_JAR_URL, tmp_archive,
                                        INSTALL_DIR_DDB)

    # fix logging configuration for DynamoDBLocal
    log4j2_config = """<Configuration status="WARN">
      <Appenders>
        <Console name="Console" target="SYSTEM_OUT">
          <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
        </Console>
      </Appenders>
      <Loggers>
        <Root level="WARN"><AppenderRef ref="Console"/></Root>
      </Loggers>
    </Configuration>"""
    log4j2_file = os.path.join(INSTALL_DIR_DDB, "log4j2.xml")
    save_file(log4j2_file, log4j2_config)
    run_safe(lambda: run(["zip", "-u", "DynamoDBLocal.jar", "log4j2.xml"],
                         cwd=INSTALL_DIR_DDB))

    # download agent JAR
    if not os.path.exists(DDB_AGENT_JAR_PATH):
        download(DDB_AGENT_JAR_URL, DDB_AGENT_JAR_PATH)
    if not os.path.exists(JAVASSIST_JAR_PATH):
        download(JAVASSIST_JAR_URL, JAVASSIST_JAR_PATH)
    # ensure that javassist.jar is in the manifest classpath
    run(["unzip", "-o", "DynamoDBLocal.jar", "META-INF/MANIFEST.MF"],
        cwd=INSTALL_DIR_DDB)
    manifest_file = os.path.join(INSTALL_DIR_DDB, "META-INF", "MANIFEST.MF")
    manifest = load_file(manifest_file)
    if "javassist.jar" not in manifest:
        manifest = manifest.replace("Class-Path:", "Class-Path: javassist.jar",
                                    1)
        save_file(manifest_file, manifest)
        run(["zip", "-u", "DynamoDBLocal.jar", "META-INF/MANIFEST.MF"],
            cwd=INSTALL_DIR_DDB)
Ejemplo n.º 8
0
def create_config_file(
    config_file,
    executableName,
    streamName,
    applicationName,
    credentialsProvider=None,
    region_name=None,
    **kwargs,
):
    if not credentialsProvider:
        credentialsProvider = "DefaultAWSCredentialsProviderChain"
    region_name = region_name or aws_stack.get_region()
    content = """
        executableName = %s
        streamName = %s
        applicationName = %s
        AWSCredentialsProvider = %s
        processingLanguage = python/2.7
        parentShardPollIntervalMillis = 2000
        regionName = %s
    """ % (
        executableName,
        streamName,
        applicationName,
        credentialsProvider,
        region_name,
    )
    # optional properties
    for key, value in kwargs.items():
        content += """
            %s = %s""" % (
            key,
            value,
        )
    content = content.replace("    ", "")
    save_file(config_file, content)
Ejemplo n.º 9
0
def create_lambda_archive(
    script: str,
    get_content: bool = False,
    libs: List[str] = None,
    runtime: str = None,
    file_name: str = None,
    exclude_func: Callable[[str], bool] = None,
):
    """Utility method to create a Lambda function archive"""
    if libs is None:
        libs = []
    runtime = runtime or LAMBDA_DEFAULT_RUNTIME

    with tempfile.TemporaryDirectory(prefix=ARCHIVE_DIR_PREFIX) as tmp_dir:
        file_name = file_name or get_handler_file_from_name(
            LAMBDA_DEFAULT_HANDLER, runtime=runtime)
        script_file = os.path.join(tmp_dir, file_name)
        if os.path.sep in script_file:
            mkdir(os.path.dirname(script_file))
            # create __init__.py files along the path to allow Python imports
            path = file_name.split(os.path.sep)
            for i in range(1, len(path)):
                save_file(os.path.join(tmp_dir, *(path[:i] + ["__init__.py"])),
                          "")
        save_file(script_file, script)
        chmod_r(script_file, 0o777)
        # copy libs
        for lib in libs:
            paths = [lib, "%s.py" % lib]
            try:
                module = importlib.import_module(lib)
                paths.append(module.__file__)
            except Exception:
                pass
            target_dir = tmp_dir
            root_folder = os.path.join(LOCALSTACK_VENV_FOLDER,
                                       "lib/python*/site-packages")
            if lib == "localstack":
                paths = ["localstack/*.py", "localstack/utils"]
                root_folder = LOCALSTACK_ROOT_FOLDER
                target_dir = os.path.join(tmp_dir, lib)
                mkdir(target_dir)
            for path in paths:
                file_to_copy = path if path.startswith("/") else os.path.join(
                    root_folder, path)
                for file_path in glob.glob(file_to_copy):
                    name = os.path.join(target_dir,
                                        file_path.split(os.path.sep)[-1])
                    if os.path.isdir(file_path):
                        copy_dir(file_path, name)
                    else:
                        shutil.copyfile(file_path, name)

        if exclude_func:
            for dirpath, folders, files in os.walk(tmp_dir):
                for name in list(folders) + list(files):
                    full_name = os.path.join(dirpath, name)
                    relative = os.path.relpath(full_name, start=tmp_dir)
                    if exclude_func(relative):
                        rm_rf(full_name)

        # create zip file
        result = create_zip_file(tmp_dir, get_content=get_content)
        return result
Ejemplo n.º 10
0
def install_stepfunctions_local():
    if not os.path.exists(INSTALL_PATH_STEPFUNCTIONS_JAR):
        # pull the JAR file from the Docker image, which is more up-to-date than the downloadable JAR file
        if not DOCKER_CLIENT.has_docker():
            # TODO: works only when a docker socket is available -> add a fallback if running without Docker?
            LOG.warning(
                "Docker not available - skipping installation of StepFunctions dependency"
            )
            return
        log_install_msg("Step Functions")
        mkdir(INSTALL_DIR_STEPFUNCTIONS)
        DOCKER_CLIENT.pull_image(IMAGE_NAME_SFN_LOCAL)
        docker_name = "tmp-ls-sfn"
        DOCKER_CLIENT.run_container(
            IMAGE_NAME_SFN_LOCAL,
            remove=True,
            entrypoint="",
            name=docker_name,
            detach=True,
            command=["sleep", "15"],
        )
        time.sleep(5)
        DOCKER_CLIENT.copy_from_container(
            docker_name,
            local_path=dirs.static_libs,
            container_path="/home/stepfunctionslocal/")

        path = Path(f"{dirs.static_libs}/stepfunctionslocal/")
        for file in path.glob("*.jar"):
            file.rename(Path(INSTALL_DIR_STEPFUNCTIONS) / file.name)
        rm_rf(str(path))

    classes = [
        SFN_PATCH_CLASS1,
        SFN_PATCH_CLASS2,
        SFN_PATCH_CLASS_REGION,
        SFN_PATCH_CLASS_STARTER,
        SFN_PATCH_CLASS_ASYNC2SERVICEAPI,
        SFN_PATCH_CLASS_DESCRIBEEXECUTIONPARSED,
        SFN_PATCH_FILE_METAINF,
    ]
    for patch_class in classes:
        patch_url = f"{SFN_PATCH_URL_PREFIX}/{patch_class}"
        add_file_to_jar(patch_class,
                        patch_url,
                        target_jar=INSTALL_PATH_STEPFUNCTIONS_JAR)

    # special case for Manifest file - extract first, replace content, then update in JAR file
    manifest_file = os.path.join(INSTALL_DIR_STEPFUNCTIONS, "META-INF",
                                 "MANIFEST.MF")
    if not os.path.exists(manifest_file):
        content = run([
            "unzip", "-p", INSTALL_PATH_STEPFUNCTIONS_JAR,
            "META-INF/MANIFEST.MF"
        ])
        content = re.sub("Main-Class: .+",
                         "Main-Class: cloud.localstack.StepFunctionsStarter",
                         content)
        classpath = " ".join([os.path.basename(jar) for jar in JAR_URLS])
        content = re.sub(r"Class-Path: \. ", f"Class-Path: {classpath} . ",
                         content)
        save_file(manifest_file, content)
        run(
            ["zip", INSTALL_PATH_STEPFUNCTIONS_JAR, "META-INF/MANIFEST.MF"],
            cwd=INSTALL_DIR_STEPFUNCTIONS,
        )

    # download additional jar libs
    for jar_url in JAR_URLS:
        target = os.path.join(INSTALL_DIR_STEPFUNCTIONS,
                              os.path.basename(jar_url))
        if not file_exists_not_empty(target):
            download(jar_url, target)

    # download aws-sdk lambda handler
    target = os.path.join(INSTALL_DIR_STEPFUNCTIONS,
                          "localstack-internal-awssdk", "awssdk.zip")
    if not file_exists_not_empty(target):
        download(SFN_AWS_SDK_LAMBDA_ZIP_FILE, target)
Ejemplo n.º 11
0
def install_elasticsearch(version=None):
    # locally import to avoid having a dependency on ASF when starting the CLI
    from localstack.aws.api.opensearch import EngineType
    from localstack.services.opensearch import versions

    if not version:
        version = ELASTICSEARCH_DEFAULT_VERSION

    version = get_elasticsearch_install_version(version)
    install_dir = get_elasticsearch_install_dir(version)
    installed_executable = os.path.join(install_dir, "bin", "elasticsearch")
    if not os.path.exists(installed_executable):
        log_install_msg(f"Elasticsearch ({version})")
        es_url = versions.get_download_url(version, EngineType.Elasticsearch)
        install_dir_parent = os.path.dirname(install_dir)
        mkdir(install_dir_parent)
        # download and extract archive
        tmp_archive = os.path.join(config.dirs.tmp,
                                   f"localstack.{os.path.basename(es_url)}")
        download_and_extract_with_retry(es_url, tmp_archive,
                                        install_dir_parent)
        elasticsearch_dir = glob.glob(
            os.path.join(install_dir_parent, "elasticsearch*"))
        if not elasticsearch_dir:
            raise Exception(
                f"Unable to find Elasticsearch folder in {install_dir_parent}")
        shutil.move(elasticsearch_dir[0], install_dir)

        for dir_name in ("data", "logs", "modules", "plugins",
                         "config/scripts"):
            dir_path = os.path.join(install_dir, dir_name)
            mkdir(dir_path)
            chmod_r(dir_path, 0o777)

        # install default plugins
        for plugin in ELASTICSEARCH_PLUGIN_LIST:
            plugin_binary = os.path.join(install_dir, "bin",
                                         "elasticsearch-plugin")
            plugin_dir = os.path.join(install_dir, "plugins", plugin)
            if not os.path.exists(plugin_dir):
                LOG.info("Installing Elasticsearch plugin %s", plugin)

                def try_install():
                    output = run([plugin_binary, "install", "-b", plugin])
                    LOG.debug("Plugin installation output: %s", output)

                # We're occasionally seeing javax.net.ssl.SSLHandshakeException -> add download retries
                download_attempts = 3
                try:
                    retry(try_install, retries=download_attempts - 1, sleep=2)
                except Exception:
                    LOG.warning(
                        "Unable to download Elasticsearch plugin '%s' after %s attempts",
                        plugin,
                        download_attempts,
                    )
                    if not os.environ.get("IGNORE_ES_DOWNLOAD_ERRORS"):
                        raise

    # delete some plugins to free up space
    for plugin in ELASTICSEARCH_DELETE_MODULES:
        module_dir = os.path.join(install_dir, "modules", plugin)
        rm_rf(module_dir)

    # disable x-pack-ml plugin (not working on Alpine)
    xpack_dir = os.path.join(install_dir, "modules", "x-pack-ml", "platform")
    rm_rf(xpack_dir)

    # patch JVM options file - replace hardcoded heap size settings
    jvm_options_file = os.path.join(install_dir, "config", "jvm.options")
    if os.path.exists(jvm_options_file):
        jvm_options = load_file(jvm_options_file)
        jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9.]+$)",
                                      r"# \1",
                                      jvm_options,
                                      flags=re.MULTILINE)
        if jvm_options != jvm_options_replaced:
            save_file(jvm_options_file, jvm_options_replaced)
Ejemplo n.º 12
0
def generate_processor_script(events_file, log_file=None):
    script_file = os.path.join(tempfile.gettempdir(),
                               "kclipy.%s.processor.py" % short_uid())
    if log_file:
        log_file = "'%s'" % log_file
    else:
        log_file = "None"
    content = """#!/usr/bin/env python
import os, sys, glob, json, socket, time, logging, subprocess, tempfile
logging.basicConfig(level=logging.INFO)
for path in glob.glob('%s/lib/python*/site-packages'):
    sys.path.insert(0, path)
sys.path.insert(0, '%s')
from localstack.config import DEFAULT_ENCODING
from localstack.utils.kinesis import kinesis_connector
from localstack.utils.time import timestamp
events_file = '%s'
log_file = %s
error_log = os.path.join(tempfile.gettempdir(), 'kclipy.error.log')
if __name__ == '__main__':
    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)

    num_tries = 3
    sleep_time = 2
    error = None
    for i in range(0, num_tries):
        try:
            sock.connect(events_file)
            error = None
            break
        except Exception as e:
            error = e
            if i < num_tries:
                msg = '%%s: Unable to connect to UNIX socket. Retrying.' %% timestamp()
                subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True)
                time.sleep(sleep_time)
    if error:
        print("WARN: Unable to connect to UNIX socket after retrying: %%s" %% error)
        raise error

    def receive_msg(records, checkpointer, shard_id):
        try:
            # records is a list of amazon_kclpy.messages.Record objects -> convert to JSON
            records_dicts = [j._json_dict for j in records]
            message_to_send = {'shard_id': shard_id, 'records': records_dicts}
            string_to_send = '%%s\\n' %% json.dumps(message_to_send)
            bytes_to_send = string_to_send.encode(DEFAULT_ENCODING)
            sock.send(bytes_to_send)
        except Exception as e:
            msg = "WARN: Unable to forward event: %%s" %% e
            print(msg)
            subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True)
    kinesis_connector.KinesisProcessor.run_processor(log_file=log_file, processor_func=receive_msg)
    """ % (
        LOCALSTACK_VENV_FOLDER,
        LOCALSTACK_ROOT_FOLDER,
        events_file,
        log_file,
    )
    save_file(script_file, content)
    chmod_r(script_file, 0o755)
    TMP_FILES.append(script_file)
    return script_file
Ejemplo n.º 13
0
 def log(self, s):
     s = "%s\n" % s
     if self.log_file:
         save_file(self.log_file, s, append=True)