def install_dynamodb_local(): if not os.path.exists(INSTALL_PATH_DDB_JAR): log_install_msg("DynamoDB") # download and extract archive is_in_alpine = is_alpine() tmp_archive = os.path.join(tempfile.gettempdir(), "localstack.ddb.zip") dynamodb_url = DYNAMODB_JAR_URL_ALPINE if is_in_alpine else DYNAMODB_JAR_URL download_and_extract_with_retry(dynamodb_url, tmp_archive, INSTALL_DIR_DDB) # fix logging configuration for DynamoDBLocal log4j2_config = """<Configuration status="WARN"> <Appenders> <Console name="Console" target="SYSTEM_OUT"> <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/> </Console> </Appenders> <Loggers> <Root level="WARN"><AppenderRef ref="Console"/></Root> </Loggers> </Configuration>""" log4j2_file = os.path.join(INSTALL_DIR_DDB, "log4j2.xml") save_file(log4j2_file, log4j2_config) run('cd "%s" && zip -u DynamoDBLocal.jar log4j2.xml || true' % INSTALL_DIR_DDB)
def create_zip_file(file_path, zip_file=None, get_content=False, content_root=None, mode="w"): """ Creates a zipfile to the designated file_path. By default, a new zip file is created but the mode parameter can be used to append to an existing zip file """ base_dir = file_path if not os.path.isdir(file_path): base_dir = tempfile.mkdtemp(prefix=ARCHIVE_DIR_PREFIX) shutil.copy(file_path, base_dir) TMP_FILES.append(base_dir) tmp_dir = tempfile.mkdtemp(prefix=ARCHIVE_DIR_PREFIX) full_zip_file = zip_file if not full_zip_file: zip_file_name = "archive.zip" full_zip_file = os.path.join(tmp_dir, zip_file_name) # special case where target folder is empty -> create empty zip file if is_empty_dir(base_dir): # see https://stackoverflow.com/questions/25195495/how-to-create-an-empty-zip-file#25195628 content = ( b"PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" ) if get_content: return content save_file(full_zip_file, content) return full_zip_file # create zip file if is_alpine(): # todo: extend CLI with the new parameters create_zip_file_cli(source_path=file_path, base_dir=base_dir, zip_file=full_zip_file) else: create_zip_file_python( source_path=file_path, base_dir=base_dir, zip_file=full_zip_file, content_root=content_root, mode=mode, ) if not get_content: TMP_FILES.append(tmp_dir) return full_zip_file with open(full_zip_file, "rb") as file_obj: zip_file_content = file_obj.read() rm_dir(tmp_dir) return zip_file_content
def install_elasticsearch(): if not os.path.exists(INSTALL_DIR_ES): log_install_msg('Elasticsearch') mkdir(INSTALL_DIR_INFRA) # download and extract archive tmp_archive = os.path.join(tempfile.gettempdir(), 'localstack.es.zip') download_and_extract_with_retry(ELASTICSEARCH_JAR_URL, tmp_archive, INSTALL_DIR_INFRA) elasticsearch_dir = glob.glob( os.path.join(INSTALL_DIR_INFRA, 'elasticsearch*')) if not elasticsearch_dir: raise Exception('Unable to find Elasticsearch folder in %s' % INSTALL_DIR_INFRA) shutil.move(elasticsearch_dir[0], INSTALL_DIR_ES) for dir_name in ('data', 'logs', 'modules', 'plugins', 'config/scripts'): dir_path = '%s/%s' % (INSTALL_DIR_ES, dir_name) mkdir(dir_path) chmod_r(dir_path, 0o777) # install default plugins for plugin in ELASTICSEARCH_PLUGIN_LIST: if is_alpine(): # https://github.com/pires/docker-elasticsearch/issues/56 os.environ['ES_TMPDIR'] = '/tmp' plugin_binary = os.path.join(INSTALL_DIR_ES, 'bin', 'elasticsearch-plugin') print('install elasticsearch-plugin %s' % (plugin)) run('%s install -b %s' % (plugin_binary, plugin)) # delete some plugins to free up space for plugin in ELASTICSEARCH_DELETE_MODULES: module_dir = os.path.join(INSTALL_DIR_ES, 'modules', plugin) rm_rf(module_dir) # disable x-pack-ml plugin (not working on Alpine) xpack_dir = os.path.join(INSTALL_DIR_ES, 'modules', 'x-pack-ml', 'platform') rm_rf(xpack_dir) # patch JVM options file - replace hardcoded heap size settings jvm_options_file = os.path.join(INSTALL_DIR_ES, 'config', 'jvm.options') if os.path.exists(jvm_options_file): jvm_options = load_file(jvm_options_file) jvm_options_replaced = re.sub(r'(^-Xm[sx][a-zA-Z0-9\.]+$)', r'# \1', jvm_options, flags=re.MULTILINE) if jvm_options != jvm_options_replaced: save_file(jvm_options_file, jvm_options_replaced)
def install_elasticsearch(version=None): version = get_elasticsearch_install_version(version) install_dir = get_elasticsearch_install_dir(version) installed_executable = os.path.join(install_dir, 'bin', 'elasticsearch') if not os.path.exists(installed_executable): log_install_msg('Elasticsearch (%s)' % version) es_url = ELASTICSEARCH_URLS.get(version) if not es_url: raise Exception('Unable to find download URL for Elasticsearch version "%s"' % version) install_dir_parent = os.path.dirname(install_dir) mkdir(install_dir_parent) # download and extract archive tmp_archive = os.path.join(config.TMP_FOLDER, 'localstack.%s' % os.path.basename(es_url)) download_and_extract_with_retry(es_url, tmp_archive, install_dir_parent) elasticsearch_dir = glob.glob(os.path.join(install_dir_parent, 'elasticsearch*')) if not elasticsearch_dir: raise Exception('Unable to find Elasticsearch folder in %s' % install_dir_parent) shutil.move(elasticsearch_dir[0], install_dir) for dir_name in ('data', 'logs', 'modules', 'plugins', 'config/scripts'): dir_path = os.path.join(install_dir, dir_name) mkdir(dir_path) chmod_r(dir_path, 0o777) # install default plugins for plugin in ELASTICSEARCH_PLUGIN_LIST: if is_alpine(): # https://github.com/pires/docker-elasticsearch/issues/56 os.environ['ES_TMPDIR'] = '/tmp' plugin_binary = os.path.join(install_dir, 'bin', 'elasticsearch-plugin') plugin_dir = os.path.join(install_dir, 'plugins', plugin) if not os.path.exists(plugin_dir): LOG.info('Installing Elasticsearch plugin %s' % (plugin)) run('%s install -b %s' % (plugin_binary, plugin)) # delete some plugins to free up space for plugin in ELASTICSEARCH_DELETE_MODULES: module_dir = os.path.join(install_dir, 'modules', plugin) rm_rf(module_dir) # disable x-pack-ml plugin (not working on Alpine) xpack_dir = os.path.join(install_dir, 'modules', 'x-pack-ml', 'platform') rm_rf(xpack_dir) # patch JVM options file - replace hardcoded heap size settings jvm_options_file = os.path.join(install_dir, 'config', 'jvm.options') if os.path.exists(jvm_options_file): jvm_options = load_file(jvm_options_file) jvm_options_replaced = re.sub(r'(^-Xm[sx][a-zA-Z0-9\.]+$)', r'# \1', jvm_options, flags=re.MULTILINE) if jvm_options != jvm_options_replaced: save_file(jvm_options_file, jvm_options_replaced)
def install_dynamodb_local(): if OVERWRITE_DDB_FILES_IN_DOCKER and in_docker(): rm_rf(INSTALL_DIR_DDB) if not os.path.exists(INSTALL_PATH_DDB_JAR): log_install_msg('DynamoDB') # download and extract archive tmp_archive = os.path.join(tempfile.gettempdir(), 'localstack.ddb.zip') dynamodb_url = DYNAMODB_JAR_URL_ALPINE if in_docker( ) else DYNAMODB_JAR_URL download_and_extract_with_retry(dynamodb_url, tmp_archive, INSTALL_DIR_DDB) # fix for Alpine, otherwise DynamoDBLocal fails with: # DynamoDBLocal_lib/libsqlite4java-linux-amd64.so: __memcpy_chk: symbol not found if is_alpine(): ddb_libs_dir = '%s/DynamoDBLocal_lib' % INSTALL_DIR_DDB patched_marker = '%s/alpine_fix_applied' % ddb_libs_dir if APPLY_DDB_ALPINE_FIX and not os.path.exists(patched_marker): patched_lib = ( 'https://rawgit.com/bhuisgen/docker-alpine/master/alpine-dynamodb/' + 'rootfs/usr/local/dynamodb/DynamoDBLocal_lib/libsqlite4java-linux-amd64.so' ) patched_jar = ( 'https://rawgit.com/bhuisgen/docker-alpine/master/alpine-dynamodb/' + 'rootfs/usr/local/dynamodb/DynamoDBLocal_lib/sqlite4java.jar') run("curl -L -o %s/libsqlite4java-linux-amd64.so '%s'" % (ddb_libs_dir, patched_lib)) run("curl -L -o %s/sqlite4java.jar '%s'" % (ddb_libs_dir, patched_jar)) save_file(patched_marker, '') # fix logging configuration for DynamoDBLocal log4j2_config = """<Configuration status="WARN"> <Appenders> <Console name="Console" target="SYSTEM_OUT"> <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/> </Console> </Appenders> <Loggers> <Root level="WARN"><AppenderRef ref="Console"/></Root> </Loggers> </Configuration>""" log4j2_file = os.path.join(INSTALL_DIR_DDB, 'log4j2.xml') save_file(log4j2_file, log4j2_config) run('cd "%s" && zip -u DynamoDBLocal.jar log4j2.xml || true' % INSTALL_DIR_DDB)
def create_zip_file(file_path, get_content=False): base_dir = file_path if not os.path.isdir(file_path): base_dir = tempfile.mkdtemp(prefix=ARCHIVE_DIR_PREFIX) shutil.copy(file_path, base_dir) TMP_FILES.append(base_dir) tmp_dir = tempfile.mkdtemp(prefix=ARCHIVE_DIR_PREFIX) zip_file_name = 'archive.zip' full_zip_file = os.path.join(tmp_dir, zip_file_name) # create zip file if is_alpine(): create_zip_file_cli(file_path, base_dir, zip_file=full_zip_file) else: create_zip_file_python(file_path, base_dir, zip_file=full_zip_file) if not get_content: TMP_FILES.append(tmp_dir) return full_zip_file zip_file_content = None with open(full_zip_file, 'rb') as file_obj: zip_file_content = file_obj.read() rm_dir(tmp_dir) return zip_file_content
def install_elasticsearch(version=None): version = get_elasticsearch_install_version(version) install_dir = get_elasticsearch_install_dir(version) installed_executable = os.path.join(install_dir, "bin", "elasticsearch") if not os.path.exists(installed_executable): log_install_msg("Elasticsearch (%s)" % version) es_url = ELASTICSEARCH_URLS.get(version) if not es_url: raise Exception( 'Unable to find download URL for Elasticsearch version "%s"' % version) install_dir_parent = os.path.dirname(install_dir) mkdir(install_dir_parent) # download and extract archive tmp_archive = os.path.join(config.TMP_FOLDER, "localstack.%s" % os.path.basename(es_url)) download_and_extract_with_retry(es_url, tmp_archive, install_dir_parent) elasticsearch_dir = glob.glob( os.path.join(install_dir_parent, "elasticsearch*")) if not elasticsearch_dir: raise Exception("Unable to find Elasticsearch folder in %s" % install_dir_parent) shutil.move(elasticsearch_dir[0], install_dir) for dir_name in ("data", "logs", "modules", "plugins", "config/scripts"): dir_path = os.path.join(install_dir, dir_name) mkdir(dir_path) chmod_r(dir_path, 0o777) # install default plugins for plugin in ELASTICSEARCH_PLUGIN_LIST: if is_alpine(): # https://github.com/pires/docker-elasticsearch/issues/56 os.environ["ES_TMPDIR"] = "/tmp" plugin_binary = os.path.join(install_dir, "bin", "elasticsearch-plugin") plugin_dir = os.path.join(install_dir, "plugins", plugin) if not os.path.exists(plugin_dir): LOG.info("Installing Elasticsearch plugin %s" % plugin) def try_install(): safe_run([plugin_binary, "install", "-b", plugin]) # We're occasionally seeing javax.net.ssl.SSLHandshakeException -> add download retries download_attempts = 3 try: retry(try_install, retries=download_attempts - 1, sleep=2) except Exception: LOG.warning( "Unable to download Elasticsearch plugin '%s' after %s attempts" % (plugin, download_attempts)) if not os.environ.get("IGNORE_ES_DOWNLOAD_ERRORS"): raise # delete some plugins to free up space for plugin in ELASTICSEARCH_DELETE_MODULES: module_dir = os.path.join(install_dir, "modules", plugin) rm_rf(module_dir) # disable x-pack-ml plugin (not working on Alpine) xpack_dir = os.path.join(install_dir, "modules", "x-pack-ml", "platform") rm_rf(xpack_dir) # patch JVM options file - replace hardcoded heap size settings jvm_options_file = os.path.join(install_dir, "config", "jvm.options") if os.path.exists(jvm_options_file): jvm_options = load_file(jvm_options_file) jvm_options_replaced = re.sub(r"(^-Xm[sx][a-zA-Z0-9\.]+$)", r"# \1", jvm_options, flags=re.MULTILINE) if jvm_options != jvm_options_replaced: save_file(jvm_options_file, jvm_options_replaced)
def rm_dir(dir): if is_alpine(): # Using the native command can be an order of magnitude faster on Travis-CI return run('rm -r %s' % (dir)) shutil.rmtree(dir)
def copy_dir(source, target): if is_alpine(): # Using the native command can be an order of magnitude faster on Travis-CI return run('cp -r %s %s' % (source, target)) shutil.copytree(source, target)
def _has_stack_status(cfn_client, statuses: List[str]): def _has_status(stack_id: str): def _inner(): resp = cfn_client.describe_stacks(StackName=stack_id) s = resp["Stacks"][ 0] # since the lookup uses the id we can only get a single response return s.get("StackStatus") in statuses return _inner return _has_status @pytest.fixture def is_change_set_finished(cfn_client): def _is_change_set_finished(change_set_id: str): def _inner(): check_set = cfn_client.describe_change_set( ChangeSetName=change_set_id) return check_set.get("ExecutionStatus") == "EXECUTE_COMPLETE" return _inner return _is_change_set_finished only_in_alpine = pytest.mark.skipif( not is_alpine(), reason="test only applicable if run in alpine", )