def install_kinesis_mock(bin_file_path: str = None): response = requests.get(KINESIS_MOCK_RELEASE_URL) if not response.ok: raise ValueError( f"Could not get list of releases from {KINESIS_MOCK_RELEASE_URL}: {response.text}" ) bin_file_path = bin_file_path or kinesis_mock_install_path() github_release = response.json() download_url = None bin_file_name = os.path.basename(bin_file_path) for asset in github_release.get("assets", []): # find the correct binary in the release if asset["name"] == bin_file_name: download_url = asset["browser_download_url"] break if download_url is None: raise ValueError( f"Could not find required binary {bin_file_name} in release {KINESIS_MOCK_RELEASE_URL}" ) mkdir(INSTALL_DIR_KINESIS_MOCK) LOG.info("downloading kinesis-mock binary from %s", download_url) download(download_url, bin_file_path) chmod_r(bin_file_path, 0o777)
def install_local_kms(): local_arch = f"{platform.system().lower()}-{get_arch()}" binary_path = INSTALL_PATH_KMS_BINARY_PATTERN.replace("<arch>", local_arch) if not os.path.exists(binary_path): log_install_msg("KMS") mkdir(INSTALL_DIR_KMS) kms_url = KMS_URL_PATTERN.replace("<arch>", local_arch) download(kms_url, binary_path) chmod_r(binary_path, 0o777)
def install_opensearch(version=None): # locally import to avoid having a dependency on ASF when starting the CLI from localstack.aws.api.opensearch import EngineType from localstack.services.opensearch import versions if not version: version = OPENSEARCH_DEFAULT_VERSION version = get_opensearch_install_version(version) install_dir = get_opensearch_install_dir(version) installed_executable = os.path.join(install_dir, "bin", "opensearch") if not os.path.exists(installed_executable): with OS_INSTALL_LOCKS.setdefault(version, threading.Lock()): if not os.path.exists(installed_executable): log_install_msg(f"OpenSearch ({version})") opensearch_url = versions.get_download_url( version, EngineType.OpenSearch) install_dir_parent = os.path.dirname(install_dir) mkdir(install_dir_parent) # download and extract archive tmp_archive = os.path.join( config.dirs.tmp, f"localstack.{os.path.basename(opensearch_url)}") download_and_extract_with_retry(opensearch_url, tmp_archive, install_dir_parent) opensearch_dir = glob.glob( os.path.join(install_dir_parent, "opensearch*")) if not opensearch_dir: raise Exception( f"Unable to find OpenSearch folder in {install_dir_parent}" ) shutil.move(opensearch_dir[0], install_dir) for dir_name in ("data", "logs", "modules", "plugins", "config/scripts"): dir_path = os.path.join(install_dir, dir_name) mkdir(dir_path) chmod_r(dir_path, 0o777) # patch JVM options file - replace hardcoded heap size settings jvm_options_file = os.path.join(install_dir, "config", "jvm.options") if os.path.exists(jvm_options_file): jvm_options = load_file(jvm_options_file) jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9.]+$)", r"# \1", jvm_options, flags=re.MULTILINE) if jvm_options != jvm_options_replaced: save_file(jvm_options_file, jvm_options_replaced)
def install_terraform() -> str: if os.path.isfile(TERRAFORM_BIN): return TERRAFORM_BIN log_install_msg(f"Installing terraform {TERRAFORM_VERSION}") system = platform.system().lower() arch = get_arch() url = TERRAFORM_URL_TEMPLATE.format(version=TERRAFORM_VERSION, os=system, arch=arch) download_and_extract(url, os.path.dirname(TERRAFORM_BIN)) chmod_r(TERRAFORM_BIN, 0o777) return TERRAFORM_BIN
def save_startup_info(): from localstack_ext import __version__ as localstack_ext_version file_path = os.path.join(config.dirs.data, STARTUP_INFO_FILE) info = StartupInfo( timestamp=datetime.datetime.now().isoformat(), localstack_version=constants.VERSION, localstack_ext_version=localstack_ext_version, pro_activated=is_env_true(constants.ENV_PRO_ACTIVATED), ) LOG.debug("saving startup info %s", info) try: _append_startup_info(file_path, info) except IOError as e: LOG.error("could not save startup info: %s", e) chmod_r(file_path, 0o777) return info
def prepare_docker_start(): # prepare environment for docker start container_name = config.MAIN_CONTAINER_NAME if DOCKER_CLIENT.is_container_running(container_name): raise ContainerExists( 'LocalStack container named "%s" is already running' % container_name) if config.dirs.tmp != config.dirs.functions and not config.LAMBDA_REMOTE_DOCKER: # Logger is not initialized at this point, so the warning is displayed via print print( f"WARNING: The detected temp folder for localstack ({config.dirs.tmp}) is not equal to the " f"HOST_TMP_FOLDER environment variable set ({config.dirs.functions})." ) os.environ[ENV_SCRIPT_STARTING_DOCKER] = "1" # make sure temp folder exists mkdir(config.dirs.tmp) try: chmod_r(config.dirs.tmp, 0o777) except Exception: pass
def create_lambda_archive( script: str, get_content: bool = False, libs: List[str] = None, runtime: str = None, file_name: str = None, exclude_func: Callable[[str], bool] = None, ): """Utility method to create a Lambda function archive""" if libs is None: libs = [] runtime = runtime or LAMBDA_DEFAULT_RUNTIME with tempfile.TemporaryDirectory(prefix=ARCHIVE_DIR_PREFIX) as tmp_dir: file_name = file_name or get_handler_file_from_name( LAMBDA_DEFAULT_HANDLER, runtime=runtime) script_file = os.path.join(tmp_dir, file_name) if os.path.sep in script_file: mkdir(os.path.dirname(script_file)) # create __init__.py files along the path to allow Python imports path = file_name.split(os.path.sep) for i in range(1, len(path)): save_file(os.path.join(tmp_dir, *(path[:i] + ["__init__.py"])), "") save_file(script_file, script) chmod_r(script_file, 0o777) # copy libs for lib in libs: paths = [lib, "%s.py" % lib] try: module = importlib.import_module(lib) paths.append(module.__file__) except Exception: pass target_dir = tmp_dir root_folder = os.path.join(LOCALSTACK_VENV_FOLDER, "lib/python*/site-packages") if lib == "localstack": paths = ["localstack/*.py", "localstack/utils"] root_folder = LOCALSTACK_ROOT_FOLDER target_dir = os.path.join(tmp_dir, lib) mkdir(target_dir) for path in paths: file_to_copy = path if path.startswith("/") else os.path.join( root_folder, path) for file_path in glob.glob(file_to_copy): name = os.path.join(target_dir, file_path.split(os.path.sep)[-1]) if os.path.isdir(file_path): copy_dir(file_path, name) else: shutil.copyfile(file_path, name) if exclude_func: for dirpath, folders, files in os.walk(tmp_dir): for name in list(folders) + list(files): full_name = os.path.join(dirpath, name) relative = os.path.relpath(full_name, start=tmp_dir) if exclude_func(relative): rm_rf(full_name) # create zip file result = create_zip_file(tmp_dir, get_content=get_content) return result
def install_elasticsearch(version=None): # locally import to avoid having a dependency on ASF when starting the CLI from localstack.aws.api.opensearch import EngineType from localstack.services.opensearch import versions if not version: version = ELASTICSEARCH_DEFAULT_VERSION version = get_elasticsearch_install_version(version) install_dir = get_elasticsearch_install_dir(version) installed_executable = os.path.join(install_dir, "bin", "elasticsearch") if not os.path.exists(installed_executable): log_install_msg(f"Elasticsearch ({version})") es_url = versions.get_download_url(version, EngineType.Elasticsearch) install_dir_parent = os.path.dirname(install_dir) mkdir(install_dir_parent) # download and extract archive tmp_archive = os.path.join(config.dirs.tmp, f"localstack.{os.path.basename(es_url)}") download_and_extract_with_retry(es_url, tmp_archive, install_dir_parent) elasticsearch_dir = glob.glob( os.path.join(install_dir_parent, "elasticsearch*")) if not elasticsearch_dir: raise Exception( f"Unable to find Elasticsearch folder in {install_dir_parent}") shutil.move(elasticsearch_dir[0], install_dir) for dir_name in ("data", "logs", "modules", "plugins", "config/scripts"): dir_path = os.path.join(install_dir, dir_name) mkdir(dir_path) chmod_r(dir_path, 0o777) # install default plugins for plugin in ELASTICSEARCH_PLUGIN_LIST: plugin_binary = os.path.join(install_dir, "bin", "elasticsearch-plugin") plugin_dir = os.path.join(install_dir, "plugins", plugin) if not os.path.exists(plugin_dir): LOG.info("Installing Elasticsearch plugin %s", plugin) def try_install(): output = run([plugin_binary, "install", "-b", plugin]) LOG.debug("Plugin installation output: %s", output) # We're occasionally seeing javax.net.ssl.SSLHandshakeException -> add download retries download_attempts = 3 try: retry(try_install, retries=download_attempts - 1, sleep=2) except Exception: LOG.warning( "Unable to download Elasticsearch plugin '%s' after %s attempts", plugin, download_attempts, ) if not os.environ.get("IGNORE_ES_DOWNLOAD_ERRORS"): raise # delete some plugins to free up space for plugin in ELASTICSEARCH_DELETE_MODULES: module_dir = os.path.join(install_dir, "modules", plugin) rm_rf(module_dir) # disable x-pack-ml plugin (not working on Alpine) xpack_dir = os.path.join(install_dir, "modules", "x-pack-ml", "platform") rm_rf(xpack_dir) # patch JVM options file - replace hardcoded heap size settings jvm_options_file = os.path.join(install_dir, "config", "jvm.options") if os.path.exists(jvm_options_file): jvm_options = load_file(jvm_options_file) jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9.]+$)", r"# \1", jvm_options, flags=re.MULTILINE) if jvm_options != jvm_options_replaced: save_file(jvm_options_file, jvm_options_replaced)
def generate_processor_script(events_file, log_file=None): script_file = os.path.join(tempfile.gettempdir(), "kclipy.%s.processor.py" % short_uid()) if log_file: log_file = "'%s'" % log_file else: log_file = "None" content = """#!/usr/bin/env python import os, sys, glob, json, socket, time, logging, subprocess, tempfile logging.basicConfig(level=logging.INFO) for path in glob.glob('%s/lib/python*/site-packages'): sys.path.insert(0, path) sys.path.insert(0, '%s') from localstack.config import DEFAULT_ENCODING from localstack.utils.kinesis import kinesis_connector from localstack.utils.time import timestamp events_file = '%s' log_file = %s error_log = os.path.join(tempfile.gettempdir(), 'kclipy.error.log') if __name__ == '__main__': sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) num_tries = 3 sleep_time = 2 error = None for i in range(0, num_tries): try: sock.connect(events_file) error = None break except Exception as e: error = e if i < num_tries: msg = '%%s: Unable to connect to UNIX socket. Retrying.' %% timestamp() subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True) time.sleep(sleep_time) if error: print("WARN: Unable to connect to UNIX socket after retrying: %%s" %% error) raise error def receive_msg(records, checkpointer, shard_id): try: # records is a list of amazon_kclpy.messages.Record objects -> convert to JSON records_dicts = [j._json_dict for j in records] message_to_send = {'shard_id': shard_id, 'records': records_dicts} string_to_send = '%%s\\n' %% json.dumps(message_to_send) bytes_to_send = string_to_send.encode(DEFAULT_ENCODING) sock.send(bytes_to_send) except Exception as e: msg = "WARN: Unable to forward event: %%s" %% e print(msg) subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True) kinesis_connector.KinesisProcessor.run_processor(log_file=log_file, processor_func=receive_msg) """ % ( LOCALSTACK_VENV_FOLDER, LOCALSTACK_ROOT_FOLDER, events_file, log_file, ) save_file(script_file, content) chmod_r(script_file, 0o755) TMP_FILES.append(script_file) return script_file