Ejemplo n.º 1
0
 def run_app_sync(*args, loop=None, shutdown_event=None):
     kwargs = {}
     config = Config()
     cert_file_name, key_file_name = ssl_creds or (None, None)
     if cert_file_name:
         kwargs["certfile"] = cert_file_name
         config.certfile = cert_file_name
     if key_file_name:
         kwargs["keyfile"] = key_file_name
         config.keyfile = key_file_name
     setup_quart_logging()
     config.bind = [f"{bind_address}:{port}" for bind_address in bind_addresses]
     config.workers = len(bind_addresses)
     loop = loop or ensure_event_loop()
     run_kwargs = {}
     if shutdown_event:
         run_kwargs["shutdown_trigger"] = shutdown_event.wait
     try:
         try:
             return loop.run_until_complete(serve(app, config, **run_kwargs))
         except Exception as e:
             LOG.info(
                 "Error running server event loop on port %s: %s %s",
                 port,
                 e,
                 traceback.format_exc(),
             )
             if "SSL" in str(e):
                 c_exists = os.path.exists(cert_file_name)
                 k_exists = os.path.exists(key_file_name)
                 c_size = len(load_file(cert_file_name)) if c_exists else 0
                 k_size = len(load_file(key_file_name)) if k_exists else 0
                 LOG.warning(
                     "Unable to create SSL context. Cert files exist: %s %s (%sB), %s %s (%sB)",
                     cert_file_name,
                     c_exists,
                     c_size,
                     key_file_name,
                     k_exists,
                     k_size,
                 )
             raise
     finally:
         try:
             _cancel_all_tasks(loop)
             loop.run_until_complete(loop.shutdown_asyncgens())
         finally:
             asyncio.set_event_loop(None)
             loop.close()
Ejemplo n.º 2
0
    def on_get(self, request):
        from localstack.utils.aws.aws_stack import get_valid_regions

        deploy_html_file = os.path.join(constants.MODULE_MAIN_PATH, "services",
                                        "cloudformation", "deploy.html")
        deploy_html = load_file(deploy_html_file)
        req_params = request.values
        params = {
            "stackName": "stack1",
            "templateBody": "{}",
            "errorMessage": "''",
            "regions": json.dumps(sorted(list(get_valid_regions()))),
        }

        download_url = req_params.get("templateURL")
        if download_url:
            try:
                LOG.debug(
                    "Attempting to download CloudFormation template URL: %s",
                    download_url)
                template_body = requests.get(download_url).text
                template_body = parse_json_or_yaml(template_body)
                params["templateBody"] = json.dumps(template_body)
            except Exception as e:
                msg = f"Unable to download CloudFormation template URL: {e}"
                LOG.info(msg)
                params["errorMessage"] = json.dumps(msg.replace("\n", " - "))

        # using simple string replacement here, for simplicity (could be replaced with, e.g., jinja)
        for key, value in params.items():
            deploy_html = deploy_html.replace(f"<{key}>", value)

        return Response(deploy_html, mimetype="text/html")
Ejemplo n.º 3
0
    def test_cdk_bootstrap_redeploy(self, is_change_set_finished,
                                    cleanup_stacks, cleanup_changesets):
        """Test that simulates a sequence of commands executed by CDK when running 'cdk bootstrap' twice"""

        base_folder = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                   "..")
        requests_file = os.path.join(base_folder, "files",
                                     "cdk-bootstrap-requests.json")
        operations = json.loads(load_file(requests_file))

        change_set_name = "cdk-deploy-change-set-a4b98b18"
        stack_name = "CDKToolkit-a4b98b18"
        try:
            headers = aws_stack.mock_aws_request_headers("cloudformation")
            base_url = config.get_edge_url()
            for op in operations:
                url = f"{base_url}{op['path']}"
                data = op["data"]
                requests.request(method=op["method"],
                                 url=url,
                                 headers=headers,
                                 data=data)
                if "Action=ExecuteChangeSet" in data:
                    assert wait_until(is_change_set_finished(change_set_name),
                                      _max_wait=20,
                                      strategy="linear")
        finally:
            # clean up
            cleanup_changesets([change_set_name])
            cleanup_stacks([stack_name])
Ejemplo n.º 4
0
def get_service_action_names(service: str, version: str = None) -> Set[str]:
    """Returns, for a given service name and version, the list of available service action names."""
    version = version or DEFAULT_SERVICE_VERSIONS.get(service)
    key = f"{service}:{version}"
    result = SERVICE_ACTIONS_CACHE.get(key)
    if not result:
        file_path = os.path.join(os.path.dirname(botocore.__file__), "data",
                                 service, version, "service-2.json")
        content = json.loads(to_str(load_file(file_path)) or "{}")
        result = set(content.get("operations", {}).keys())
        SERVICE_ACTIONS_CACHE[key] = result
    return result
Ejemplo n.º 5
0
def install_opensearch(version=None):
    # locally import to avoid having a dependency on ASF when starting the CLI
    from localstack.aws.api.opensearch import EngineType
    from localstack.services.opensearch import versions

    if not version:
        version = OPENSEARCH_DEFAULT_VERSION

    version = get_opensearch_install_version(version)
    install_dir = get_opensearch_install_dir(version)
    installed_executable = os.path.join(install_dir, "bin", "opensearch")
    if not os.path.exists(installed_executable):
        with OS_INSTALL_LOCKS.setdefault(version, threading.Lock()):
            if not os.path.exists(installed_executable):
                log_install_msg(f"OpenSearch ({version})")
                opensearch_url = versions.get_download_url(
                    version, EngineType.OpenSearch)
                install_dir_parent = os.path.dirname(install_dir)
                mkdir(install_dir_parent)
                # download and extract archive
                tmp_archive = os.path.join(
                    config.dirs.tmp,
                    f"localstack.{os.path.basename(opensearch_url)}")
                download_and_extract_with_retry(opensearch_url, tmp_archive,
                                                install_dir_parent)
                opensearch_dir = glob.glob(
                    os.path.join(install_dir_parent, "opensearch*"))
                if not opensearch_dir:
                    raise Exception(
                        f"Unable to find OpenSearch folder in {install_dir_parent}"
                    )
                shutil.move(opensearch_dir[0], install_dir)

                for dir_name in ("data", "logs", "modules", "plugins",
                                 "config/scripts"):
                    dir_path = os.path.join(install_dir, dir_name)
                    mkdir(dir_path)
                    chmod_r(dir_path, 0o777)

    # patch JVM options file - replace hardcoded heap size settings
    jvm_options_file = os.path.join(install_dir, "config", "jvm.options")
    if os.path.exists(jvm_options_file):
        jvm_options = load_file(jvm_options_file)
        jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9.]+$)",
                                      r"# \1",
                                      jvm_options,
                                      flags=re.MULTILINE)
        if jvm_options != jvm_options_replaced:
            save_file(jvm_options_file, jvm_options_replaced)
Ejemplo n.º 6
0
def test_export_oas3_openapi(apigateway_client):
    spec_file = load_file(TEST_IMPORT_PETSTORE_SWAGGER)
    response = apigateway_client.import_rest_api(failOnWarnings=True,
                                                 body=spec_file)
    assert response.get("ResponseMetadata").get("HTTPStatusCode") == 201

    response = apigateway_client.get_export(restApiId=response["id"],
                                            stageName="local",
                                            exportType="oas3")
    spec_object = json.loads(response["body"].read())
    # required keys
    expected_keys = [
        "openapi",
        "info",
    ]
    assert all(k in spec_object.keys() for k in expected_keys)
    assert spec_object["info"]["title"] == "PetStore"
    assert spec_object["info"]["version"] is not None
    # optional keys
    optional_keys = ["paths"]
    assert all(k in spec_object.keys() for k in optional_keys)
Ejemplo n.º 7
0
def install_dynamodb_local():
    if not os.path.exists(INSTALL_PATH_DDB_JAR):
        log_install_msg("DynamoDB")
        # download and extract archive
        tmp_archive = os.path.join(tempfile.gettempdir(), "localstack.ddb.zip")
        download_and_extract_with_retry(DYNAMODB_JAR_URL, tmp_archive,
                                        INSTALL_DIR_DDB)

    # fix logging configuration for DynamoDBLocal
    log4j2_config = """<Configuration status="WARN">
      <Appenders>
        <Console name="Console" target="SYSTEM_OUT">
          <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
        </Console>
      </Appenders>
      <Loggers>
        <Root level="WARN"><AppenderRef ref="Console"/></Root>
      </Loggers>
    </Configuration>"""
    log4j2_file = os.path.join(INSTALL_DIR_DDB, "log4j2.xml")
    save_file(log4j2_file, log4j2_config)
    run_safe(lambda: run(["zip", "-u", "DynamoDBLocal.jar", "log4j2.xml"],
                         cwd=INSTALL_DIR_DDB))

    # download agent JAR
    if not os.path.exists(DDB_AGENT_JAR_PATH):
        download(DDB_AGENT_JAR_URL, DDB_AGENT_JAR_PATH)
    if not os.path.exists(JAVASSIST_JAR_PATH):
        download(JAVASSIST_JAR_URL, JAVASSIST_JAR_PATH)
    # ensure that javassist.jar is in the manifest classpath
    run(["unzip", "-o", "DynamoDBLocal.jar", "META-INF/MANIFEST.MF"],
        cwd=INSTALL_DIR_DDB)
    manifest_file = os.path.join(INSTALL_DIR_DDB, "META-INF", "MANIFEST.MF")
    manifest = load_file(manifest_file)
    if "javassist.jar" not in manifest:
        manifest = manifest.replace("Class-Path:", "Class-Path: javassist.jar",
                                    1)
        save_file(manifest_file, manifest)
        run(["zip", "-u", "DynamoDBLocal.jar", "META-INF/MANIFEST.MF"],
            cwd=INSTALL_DIR_DDB)
Ejemplo n.º 8
0
def create_lambda_function(
    func_name,
    zip_file=None,
    event_source_arn=None,
    handler_file=None,
    handler=None,
    starting_position=None,
    runtime=None,
    envvars=None,
    tags=None,
    libs=None,
    delete=False,
    layers=None,
    client=None,
    role=None,
    timeout=None,
    region_name=None,
    **kwargs,
):
    """Utility method to create a new function via the Lambda API"""
    if envvars is None:
        envvars = {}
    if tags is None:
        tags = {}
    if libs is None:
        libs = []

    starting_position = starting_position or LAMBDA_DEFAULT_STARTING_POSITION
    runtime = runtime or LAMBDA_DEFAULT_RUNTIME
    client = client or aws_stack.connect_to_service("lambda",
                                                    region_name=region_name)

    # load zip file content if handler_file is specified
    if not zip_file and handler_file:
        file_content = load_file(handler_file) if os.path.exists(
            handler_file) else handler_file
        if libs or not handler:
            zip_file = create_lambda_archive(
                file_content,
                libs=libs,
                get_content=True,
                runtime=runtime or LAMBDA_DEFAULT_RUNTIME,
            )
        else:
            zip_file = create_zip_file(handler_file, get_content=True)

    handler = handler or LAMBDA_DEFAULT_HANDLER

    if delete:
        try:
            # Delete function if one already exists
            client.delete_function(FunctionName=func_name)
        except Exception:
            pass

    lambda_code = {"ZipFile": zip_file}
    if len(zip_file) > MAX_LAMBDA_ARCHIVE_UPLOAD_SIZE:
        s3 = aws_stack.connect_to_service("s3")
        aws_stack.get_or_create_bucket(LAMBDA_ASSETS_BUCKET_NAME)
        asset_key = f"{short_uid()}.zip"
        s3.upload_fileobj(Fileobj=io.BytesIO(zip_file),
                          Bucket=LAMBDA_ASSETS_BUCKET_NAME,
                          Key=asset_key)
        lambda_code = {
            "S3Bucket": LAMBDA_ASSETS_BUCKET_NAME,
            "S3Key": asset_key
        }

    # create function
    additional_kwargs = kwargs
    kwargs = {
        "FunctionName": func_name,
        "Runtime": runtime,
        "Handler": handler,
        "Role": role or LAMBDA_TEST_ROLE,
        "Code": lambda_code,
        "Timeout": timeout or LAMBDA_TIMEOUT_SEC,
        "Environment": dict(Variables=envvars),
        "Tags": tags,
    }
    kwargs.update(additional_kwargs)
    if layers:
        kwargs["Layers"] = layers
    create_func_resp = client.create_function(**kwargs)

    resp = {
        "CreateFunctionResponse": create_func_resp,
        "CreateEventSourceMappingResponse": None,
    }

    # create event source mapping
    if event_source_arn:
        resp[
            "CreateEventSourceMappingResponse"] = client.create_event_source_mapping(
                FunctionName=func_name,
                EventSourceArn=event_source_arn,
                StartingPosition=starting_position,
            )

    return resp
Ejemplo n.º 9
0
def install_elasticsearch(version=None):
    # locally import to avoid having a dependency on ASF when starting the CLI
    from localstack.aws.api.opensearch import EngineType
    from localstack.services.opensearch import versions

    if not version:
        version = ELASTICSEARCH_DEFAULT_VERSION

    version = get_elasticsearch_install_version(version)
    install_dir = get_elasticsearch_install_dir(version)
    installed_executable = os.path.join(install_dir, "bin", "elasticsearch")
    if not os.path.exists(installed_executable):
        log_install_msg(f"Elasticsearch ({version})")
        es_url = versions.get_download_url(version, EngineType.Elasticsearch)
        install_dir_parent = os.path.dirname(install_dir)
        mkdir(install_dir_parent)
        # download and extract archive
        tmp_archive = os.path.join(config.dirs.tmp,
                                   f"localstack.{os.path.basename(es_url)}")
        download_and_extract_with_retry(es_url, tmp_archive,
                                        install_dir_parent)
        elasticsearch_dir = glob.glob(
            os.path.join(install_dir_parent, "elasticsearch*"))
        if not elasticsearch_dir:
            raise Exception(
                f"Unable to find Elasticsearch folder in {install_dir_parent}")
        shutil.move(elasticsearch_dir[0], install_dir)

        for dir_name in ("data", "logs", "modules", "plugins",
                         "config/scripts"):
            dir_path = os.path.join(install_dir, dir_name)
            mkdir(dir_path)
            chmod_r(dir_path, 0o777)

        # install default plugins
        for plugin in ELASTICSEARCH_PLUGIN_LIST:
            plugin_binary = os.path.join(install_dir, "bin",
                                         "elasticsearch-plugin")
            plugin_dir = os.path.join(install_dir, "plugins", plugin)
            if not os.path.exists(plugin_dir):
                LOG.info("Installing Elasticsearch plugin %s", plugin)

                def try_install():
                    output = run([plugin_binary, "install", "-b", plugin])
                    LOG.debug("Plugin installation output: %s", output)

                # We're occasionally seeing javax.net.ssl.SSLHandshakeException -> add download retries
                download_attempts = 3
                try:
                    retry(try_install, retries=download_attempts - 1, sleep=2)
                except Exception:
                    LOG.warning(
                        "Unable to download Elasticsearch plugin '%s' after %s attempts",
                        plugin,
                        download_attempts,
                    )
                    if not os.environ.get("IGNORE_ES_DOWNLOAD_ERRORS"):
                        raise

    # delete some plugins to free up space
    for plugin in ELASTICSEARCH_DELETE_MODULES:
        module_dir = os.path.join(install_dir, "modules", plugin)
        rm_rf(module_dir)

    # disable x-pack-ml plugin (not working on Alpine)
    xpack_dir = os.path.join(install_dir, "modules", "x-pack-ml", "platform")
    rm_rf(xpack_dir)

    # patch JVM options file - replace hardcoded heap size settings
    jvm_options_file = os.path.join(install_dir, "config", "jvm.options")
    if os.path.exists(jvm_options_file):
        jvm_options = load_file(jvm_options_file)
        jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9.]+$)",
                                      r"# \1",
                                      jvm_options,
                                      flags=re.MULTILINE)
        if jvm_options != jvm_options_replaced:
            save_file(jvm_options_file, jvm_options_replaced)
Ejemplo n.º 10
0
def get_host_kernel_version() -> str:
    return load_file("/proc/version", "failed").strip()
Ejemplo n.º 11
0
def is_redhat() -> bool:
    from localstack.utils.files import load_file

    return "rhel" in load_file("/etc/os-release", "")
Ejemplo n.º 12
0
def is_debian() -> bool:
    from localstack.utils.files import load_file

    return "Debian" in load_file("/etc/issue", "")