def download_and_extract(archive_url, target_dir, retries=0, sleep=3, tmp_archive=None): mkdir(target_dir) tmp_archive = tmp_archive or new_tmp_file() if not os.path.exists(tmp_archive): # create temporary placeholder file, to avoid duplicate parallel downloads save_file(tmp_archive, '') for i in range(retries + 1): try: download(archive_url, tmp_archive) break except Exception: time.sleep(sleep) _, ext = os.path.splitext(tmp_archive) if ext == '.zip': unzip(tmp_archive, target_dir) elif ext == '.gz' or ext == '.bz2': untar(tmp_archive, target_dir) else: raise Exception('Unsupported archive format: %s' % ext)
def test_train_tensorflow(): sagemaker_client = aws_stack.connect_to_service('sagemaker') iam_client = aws_stack.connect_to_service('iam') sagemaker_session = sagemaker.Session(boto_session=aws_stack.Boto3Session(), sagemaker_client=sagemaker_client) try: response = iam_client.create_role(RoleName='r1', AssumeRolePolicyDocument='{}') except Exception: response = iam_client.get_role(RoleName='r1') role_arn = response['Role']['Arn'] test_data = 'testdata' if not os.path.exists(test_data): data_sets = input_data.read_data_sets(test_data, dtype=tf.uint8, reshape=False, validation_size=5000) convert_to(data_sets.train, 'train', test_data) convert_to(data_sets.validation, 'validation', test_data) convert_to(data_sets.test, 'test', test_data) inputs = sagemaker_session.upload_data(path=test_data, key_prefix='data/mnist') tmp_file = new_tmp_file() download(TF_MNIST_URL, tmp_file) mnist_estimator = TensorFlow(entry_point=tmp_file, role=role_arn, framework_version='1.12.0', training_steps=10, evaluation_steps=10, sagemaker_session=sagemaker_session, train_instance_count=1, train_instance_type='local') mnist_estimator.fit(inputs, logs=False)
def install_lambda_java_libs(): # install LocalStack "fat" JAR file (contains all dependencies) if not os.path.exists(INSTALL_PATH_LOCALSTACK_FAT_JAR): LOGGER.info( 'Downloading and installing LocalStack Java libraries. This may take some time.' ) download(URL_LOCALSTACK_FAT_JAR, INSTALL_PATH_LOCALSTACK_FAT_JAR)
def download_and_extract(archive_url, target_dir, retries=0, sleep=3, tmp_archive=None): mkdir(target_dir) if tmp_archive: _, ext = os.path.splitext(tmp_archive) else: _, ext = os.path.splitext(archive_url) tmp_archive = tmp_archive or new_tmp_file() if not os.path.exists(tmp_archive) or os.path.getsize(tmp_archive) <= 0: # create temporary placeholder file, to avoid duplicate parallel downloads save_file(tmp_archive, "") for i in range(retries + 1): try: download(archive_url, tmp_archive) break except Exception: time.sleep(sleep) if ext == ".zip": unzip(tmp_archive, target_dir) elif ext == ".gz" or ext == ".bz2": untar(tmp_archive, target_dir) else: raise Exception("Unsupported archive format: %s" % ext)
def install_stepfunctions_local(): if not os.path.exists(INSTALL_PATH_STEPFUNCTIONS_JAR): # pull the JAR file from the Docker image, which is more up-to-date than the downloadable JAR file log_install_msg("Step Functions") mkdir(INSTALL_DIR_STEPFUNCTIONS) run("{dc} pull {img}".format(dc=config.DOCKER_CMD, img=IMAGE_NAME_SFN_LOCAL)) docker_name = "tmp-ls-sfn" run(("{dc} run --name={dn} --entrypoint= -d --rm {img} sleep 15" ).format(dc=config.DOCKER_CMD, dn=docker_name, img=IMAGE_NAME_SFN_LOCAL)) time.sleep(5) run("{dc} cp {dn}:/home/stepfunctionslocal/ {tgt}".format( dc=config.DOCKER_CMD, dn=docker_name, tgt=INSTALL_DIR_INFRA)) run("mv %s/stepfunctionslocal/*.jar %s" % (INSTALL_DIR_INFRA, INSTALL_DIR_STEPFUNCTIONS)) rm_rf("%s/stepfunctionslocal" % INSTALL_DIR_INFRA) # apply patches patch_class_file = os.path.join(INSTALL_DIR_STEPFUNCTIONS, SFN_PATCH_CLASS) if not os.path.exists(patch_class_file): download(SFN_PATCH_CLASS_URL, patch_class_file) cmd = 'cd "%s"; zip %s %s' % ( INSTALL_DIR_STEPFUNCTIONS, INSTALL_PATH_STEPFUNCTIONS_JAR, SFN_PATCH_CLASS, ) run(cmd)
def install_dynamodb_local(): if not os.path.exists(INSTALL_DIR_DDB): LOGGER.info( 'Downloading and installing local DynamoDB server. This may take some time.' ) mkdir(INSTALL_DIR_DDB) if not os.path.exists(TMP_ARCHIVE_DDB): download(DYNAMODB_JAR_URL, TMP_ARCHIVE_DDB) cmd = 'cd %s && cp %s ddb.zip && unzip -q ddb.zip && rm ddb.zip' run(cmd % (INSTALL_DIR_DDB, TMP_ARCHIVE_DDB)) # fix for Alpine, otherwise DynamoDBLocal fails with: # DynamoDBLocal_lib/libsqlite4java-linux-amd64.so: __memcpy_chk: symbol not found if is_alpine(): patched_lib = ( 'https://rawgit.com/bhuisgen/docker-alpine/master/alpine-dynamodb/' + 'rootfs/usr/local/dynamodb/DynamoDBLocal_lib/libsqlite4java-linux-amd64.so' ) patched_jar = ( 'https://rawgit.com/bhuisgen/docker-alpine/master/alpine-dynamodb/' + 'rootfs/usr/local/dynamodb/DynamoDBLocal_lib/sqlite4java.jar') run("curl -L -o %s/DynamoDBLocal_lib/libsqlite4java-linux-amd64.so '%s'" % (INSTALL_DIR_DDB, patched_lib)) run("curl -L -o %s/DynamoDBLocal_lib/sqlite4java.jar '%s'" % (INSTALL_DIR_DDB, patched_jar))
def setUpClass(cls): cls.lambda_client = aws_stack.connect_to_service('lambda') # deploy lambda - Java if not os.path.exists(TEST_LAMBDA_JAVA): mkdir(os.path.dirname(TEST_LAMBDA_JAVA)) download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA) # Lambda supports single JAR deployments without the zip, # so we upload the JAR directly. cls.test_java_jar = load_file(TEST_LAMBDA_JAVA, mode='rb') cls.test_java_zip = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True) testutil.create_lambda_function( func_name=TEST_LAMBDA_NAME_JAVA, zip_file=cls.test_java_jar, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaHandler') # deploy lambda - Java with stream handler testutil.create_lambda_function( func_name=TEST_LAMBDA_NAME_JAVA_STREAM, zip_file=cls.test_java_jar, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaStreamHandler') # deploy lambda - Java with serializable input object testutil.create_lambda_function( func_name=TEST_LAMBDA_NAME_JAVA_SERIALIZABLE, zip_file=cls.test_java_zip, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.SerializedInputLambdaHandler')
def install_lambda_java_libs(): for jar in JAR_DEPENDENCIES: jar_path = '%s/repository/%s' % (M2_HOME, jar) if not os.path.exists(jar_path): jar_url = ('http://central.maven.org/maven2/%s' % jar) mkdir(os.path.dirname(jar_path)) download(jar_url, jar_path)
def install_amazon_kinesis_libs(): # install KCL/STS JAR files if not os.path.exists(INSTALL_DIR_KCL): mkdir(INSTALL_DIR_KCL) if not os.path.exists(TMP_ARCHIVE_STS): download(URL_STS_JAR, TMP_ARCHIVE_STS) shutil.copy(TMP_ARCHIVE_STS, INSTALL_DIR_KCL) # install LocalStack JAR file if not os.path.exists(INSTALL_PATH_LOCALSTACK_JAR): download(URL_LOCALSTACK_JAR, INSTALL_PATH_LOCALSTACK_JAR) # install extended libs try: from amazon_kclpy import kcl except Exception as e: for lib in EXTENDED_PIP_LIBS: run('pip install %s' % lib) # Compile Java files from localstack.utils.kinesis import kclipy_helper classpath = kclipy_helper.get_kcl_classpath() java_files = '%s/utils/kinesis/java/com/atlassian/*.java' % ROOT_PATH class_files = '%s/utils/kinesis/java/com/atlassian/*.class' % ROOT_PATH if not glob.glob(class_files): run('javac -cp "%s" %s' % (classpath, java_files)) ext_java_dir = '%s/ext/java' % ROOT_PATH if not glob.glob('%s/target/*.jar' % ext_java_dir): run('cd "%s"; mvn -DskipTests package' % (ext_java_dir))
def install_elasticmq(): if not os.path.exists(INSTALL_DIR_ELASTICMQ): LOGGER.info('Downloading and installing local ElasticMQ server. This may take some time.') mkdir(INSTALL_DIR_ELASTICMQ) # download archive if not os.path.exists(TMP_ARCHIVE_ELASTICMQ): download(ELASTICMQ_JAR_URL, TMP_ARCHIVE_ELASTICMQ) shutil.copy(TMP_ARCHIVE_ELASTICMQ, INSTALL_DIR_ELASTICMQ)
def install_local_kms(): binary_path = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) if not os.path.exists(binary_path): log_install_msg('KMS') mkdir(INSTALL_DIR_KMS) kms_url = KMS_URL_PATTERN.replace('<arch>', get_arch()) download(kms_url, binary_path) chmod_r(binary_path, 0o777)
def install_elasticmq(): if not os.path.exists(INSTALL_DIR_ELASTICMQ): LOGGER.info('Downloading and installing local ElasticMQ server. This may take some time.') run('mkdir -p %s' % INSTALL_DIR_ELASTICMQ) # download archive if not os.path.exists(TMP_ARCHIVE_ELASTICMQ): download(URL_ELASTICMQ_JAR, TMP_ARCHIVE_ELASTICMQ) shutil.copy(TMP_ARCHIVE_ELASTICMQ, INSTALL_DIR_ELASTICMQ)
def install_elasticmq(): if not os.path.exists(INSTALL_DIR_ELASTICMQ): log_install_msg('ElasticMQ') mkdir(INSTALL_DIR_ELASTICMQ) # download archive tmp_archive = os.path.join(tempfile.gettempdir(), 'elasticmq-server.jar') if not os.path.exists(tmp_archive): download(ELASTICMQ_JAR_URL, tmp_archive) shutil.copy(tmp_archive, INSTALL_DIR_ELASTICMQ)
def install_local_kms(): local_arch = f"{platform.system().lower()}-{get_arch()}" binary_path = INSTALL_PATH_KMS_BINARY_PATTERN.replace("<arch>", local_arch) if not os.path.exists(binary_path): log_install_msg("KMS") mkdir(INSTALL_DIR_KMS) kms_url = KMS_URL_PATTERN.replace("<arch>", local_arch) download(kms_url, binary_path) chmod_r(binary_path, 0o777)
def start_kinesis_mock(port=None, asynchronous=False, update_listener=None): target_dir = os.path.join(INSTALL_DIR_INFRA, 'kinesis-mock') machine = platform.machine().lower() system = platform.system().lower() if machine == 'x86_64' or machine == 'amd64': if system == 'windows': target_file_name = 'kinesis-mock-mostly-static.exe' elif system == 'linux': target_file_name = 'kinesis-mock-linux-amd64-static' elif system == 'darwin': target_file_name = 'kinesis-mock-macos-amd64-dynamic' else: target_file_name = 'kinesis-mock.jar' else: target_file_name = 'kinesis-mock.jar' target_file = os.path.join(target_dir, target_file_name) if not os.path.exists(target_file): response = requests.get(KINESIS_MOCK_RELEASES) content = json.loads(to_str(response.content)) assets = content.get('assets', []) filtered = [x for x in assets if x['name'] == target_file_name] archive_url = filtered[0].get('browser_download_url') download(archive_url, target_file) port = port or config.PORT_KINESIS backend_port = get_free_tcp_port() kinesis_data_dir_param = '' if config.DATA_DIR: kinesis_data_dir = '%s/kinesis' % config.DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = 'SHOULD_PERSIST_DATA=true PERSIST_PATH=%s' % kinesis_data_dir if not config.LS_LOG: log_level = 'INFO' elif config.LS_LOG == 'warning': log_level = 'WARN' else: log_level = config.LS_LOG.upper log_level_param = 'LOG_LEVEL=%s' % (log_level) latency = config.KINESIS_LATENCY + 'ms' latency_param = 'CREATE_STREAM_DURATION=%s DELETE_STREAM_DURATION=%s REGISTER_STREAM_CONSUMER_DURATION=%s ' \ 'START_STREAM_ENCRYPTION_DURATION=%s STOP_STREAM_ENCRYPTION_DURATION=%s ' \ 'DEREGISTER_STREAM_CONSUMER_DURATION=%s MERGE_SHARDS_DURATION=%s SPLIT_SHARD_DURATION=%s ' \ 'UPDATE_SHARD_COUNT_DURATION=%s' \ % (latency, latency, latency, latency, latency, latency, latency, latency, latency) if target_file_name.endswith('.jar'): cmd = 'KINESIS_MOCK_HTTP1_PLAIN_PORT=%s SHARD_LIMIT=%s %s %s %s java -XX:+UseG1GC -jar %s' \ % (backend_port, config.KINESIS_SHARD_LIMIT, latency_param, kinesis_data_dir_param, log_level_param, target_file) else: chmod_r(target_file, 0o777) cmd = 'KINESIS_MOCK_HTTP1_PLAIN_PORT=%s SHARD_LIMIT=%s %s %s %s %s --gc=G1' \ % (backend_port, config.KINESIS_SHARD_LIMIT, latency_param, kinesis_data_dir_param, log_level_param, target_file) start_proxy_for_service('kinesis', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def install_local_kms(): local_arch = get_os() binary_path = INSTALL_PATH_KMS_BINARY_PATTERN.replace("<arch>", local_arch) if not os.path.exists(binary_path): log_install_msg("KMS") mkdir(INSTALL_DIR_KMS) # TODO ARM download platform specific binary kms_url = KMS_URL_PATTERN.replace("<arch>", local_arch) download(kms_url, binary_path) chmod_r(binary_path, 0o777)
def install_elasticmq(): # TODO remove this function if we stop using ElasticMQ entirely if not os.path.exists(INSTALL_PATH_ELASTICMQ_JAR): log_install_msg("ElasticMQ") mkdir(INSTALL_DIR_ELASTICMQ) # download archive tmp_archive = os.path.join(config.dirs.tmp, "elasticmq-server.jar") if not os.path.exists(tmp_archive): download(ELASTICMQ_JAR_URL, tmp_archive) shutil.copy(tmp_archive, INSTALL_DIR_ELASTICMQ)
def get_lambda_code(func_name, retries=1, cache_time=None, env=None, region=None): if MOCK_OBJ: return "" env = aws_stack.get_environment(env) if cache_time is None and not aws_stack.is_local_env(env): cache_time = AWS_LAMBDA_CODE_CACHE_TIMEOUT lambda_client = _connect("lambda", env=env, region=region) out = lambda_client.get_function(FunctionName=func_name) loc = out["Code"]["Location"] hash = md5(loc) folder = TMP_DOWNLOAD_FILE_PATTERN.replace("*", hash) filename = "archive.zip" archive = "%s/%s" % (folder, filename) try: mkdir(folder) if not os.path.isfile(archive): download(loc, archive, verify_ssl=False) if len(os.listdir(folder)) <= 1: zip_path = os.path.join(folder, filename) unzip(zip_path, folder) except Exception as e: print("WARN: %s" % e) rm_rf(archive) if retries > 0: return get_lambda_code(func_name, retries=retries - 1, cache_time=1, env=env) else: print("WARNING: Unable to retrieve lambda code: %s" % e) # traverse subdirectories and get script sources result = {} for root, subdirs, files in os.walk(folder): for file in files: prefix = root.split(folder)[-1] key = "%s/%s" % (prefix, file) if re.match(r".+\.py$", key) or re.match(r".+\.js$", key): codefile = "%s/%s" % (root, file) result[key] = load_file(codefile) # cleanup cache clean_cache( file_pattern=TMP_DOWNLOAD_FILE_PATTERN, last_clean_time=last_cache_cleanup_time, max_age=TMP_DOWNLOAD_CACHE_MAX_AGE, ) # TODO: delete only if cache_time is over rm_rf(folder) return result
def install_elasticsearch(): if not os.path.exists(INSTALL_DIR_ES): LOGGER.info('Downloading and installing local Elasticsearch server. This may take some time.') run('mkdir -p %s' % INSTALL_DIR_INFRA) if not os.path.exists(TMP_ARCHIVE_ES): download(ELASTICSEARCH_JAR_URL, TMP_ARCHIVE_ES) cmd = 'cd %s && cp %s es.zip && unzip -q es.zip && mv elasticsearch* elasticsearch && rm es.zip' run(cmd % (INSTALL_DIR_INFRA, TMP_ARCHIVE_ES)) for dir_name in ('data', 'logs', 'modules', 'plugins', 'config/scripts'): cmd = 'cd %s && mkdir -p %s && chmod -R 777 %s' run(cmd % (INSTALL_DIR_ES, dir_name, dir_name))
def install_kinesis_mock(): target_dir = INSTALL_PATH_KINESIS_MOCK machine = platform.machine().lower() system = platform.system().lower() version = platform.version().lower() is_probably_m1 = system == "darwin" and ("arm64" in version or "arm32" in version) LOG.debug("getting kinesis-mock for %s %s", system, machine) if is_env_true("KINESIS_MOCK_FORCE_JAVA"): # sometimes the static binaries may have problems, and we want to fal back to Java bin_file = "kinesis-mock.jar" elif (machine == "x86_64" or machine == "amd64") and not is_probably_m1: if system == "windows": bin_file = "kinesis-mock-mostly-static.exe" elif system == "linux": bin_file = "kinesis-mock-linux-amd64-static" elif system == "darwin": bin_file = "kinesis-mock-macos-amd64-dynamic" else: bin_file = "kinesis-mock.jar" else: bin_file = "kinesis-mock.jar" bin_file_path = os.path.join(target_dir, bin_file) if os.path.exists(bin_file_path): LOG.debug("kinesis-mock found at %s", bin_file_path) return bin_file_path response = requests.get(KINESIS_MOCK_RELEASE_URL) if not response.ok: raise ValueError("Could not get list of releases from %s: %s" % (KINESIS_MOCK_RELEASE_URL, response.text)) github_release = response.json() download_url = None for asset in github_release.get("assets", []): # find the correct binary in the release if asset["name"] == bin_file: download_url = asset["browser_download_url"] break if download_url is None: raise ValueError("could not find required binary %s in release %s" % (bin_file, KINESIS_MOCK_RELEASE_URL)) mkdir(target_dir) LOG.info("downloading kinesis-mock binary from %s", download_url) download(download_url, bin_file_path) chmod_r(bin_file_path, 0o777) return bin_file_path
def download_and_extract(): if not os.path.exists(tmp_archive): download(archive_url, tmp_archive) _, ext = os.path.splitext(tmp_archive) if ext == '.zip': unzip(tmp_archive, target_dir) elif ext == '.gz' or ext == '.bz2': untar(tmp_archive, target_dir) else: raise Exception('Unsupported archive format: %s' % ext)
def install_elasticmq(): if SQS_BACKEND_IMPL != 'elasticmq': return # TODO remove this function if we stop using ElasticMQ entirely if not os.path.exists(INSTALL_PATH_ELASTICMQ_JAR): log_install_msg('ElasticMQ') mkdir(INSTALL_DIR_ELASTICMQ) # download archive tmp_archive = os.path.join(tempfile.gettempdir(), 'elasticmq-server.jar') if not os.path.exists(tmp_archive): download(ELASTICMQ_JAR_URL, tmp_archive) shutil.copy(tmp_archive, INSTALL_DIR_ELASTICMQ)
def download_and_extract(): if not os.path.exists(tmp_archive): # create temporary placeholder file, to avoid duplicate parallel downloads save_file(tmp_archive, '') download(archive_url, tmp_archive) _, ext = os.path.splitext(tmp_archive) if ext == '.zip': unzip(tmp_archive, target_dir) elif ext == '.gz' or ext == '.bz2': untar(tmp_archive, target_dir) else: raise Exception('Unsupported archive format: %s' % ext)
def install_amazon_kinesis_client_libs(): # install KCL/STS JAR files if not os.path.exists(INSTALL_DIR_KCL): mkdir(INSTALL_DIR_KCL) if not os.path.exists(TMP_ARCHIVE_STS): download(STS_JAR_URL, TMP_ARCHIVE_STS) shutil.copy(TMP_ARCHIVE_STS, INSTALL_DIR_KCL) # Compile Java files from localstack.utils.kinesis import kclipy_helper classpath = kclipy_helper.get_kcl_classpath() java_files = '%s/utils/kinesis/java/com/atlassian/*.java' % ROOT_PATH class_files = '%s/utils/kinesis/java/com/atlassian/*.class' % ROOT_PATH if not glob.glob(class_files): run('javac -cp "%s" %s' % (classpath, java_files))
def install_amazon_kinesis_client_libs(): # install KCL/STS JAR files if not os.path.exists(INSTALL_DIR_KCL): mkdir(INSTALL_DIR_KCL) tmp_archive = os.path.join(tempfile.gettempdir(), 'aws-java-sdk-sts.jar') if not os.path.exists(tmp_archive): download(STS_JAR_URL, tmp_archive) shutil.copy(tmp_archive, INSTALL_DIR_KCL) # Compile Java files from localstack.utils.kinesis import kclipy_helper classpath = kclipy_helper.get_kcl_classpath() java_files = '%s/utils/kinesis/java/cloud/localstack/*.java' % ROOT_PATH class_files = '%s/utils/kinesis/java/cloud/localstack/*.class' % ROOT_PATH
def get_lambda_code(func_name, retries=1, cache_time=None, env=None): if MOCK_OBJ: return '' env = aws_stack.get_environment(env) if cache_time is None and not aws_stack.is_local_env(env): cache_time = AWS_LAMBDA_CODE_CACHE_TIMEOUT out = cmd_lambda('get-function --function-name %s' % func_name, env, cache_time) out = json.loads(out) loc = out['Code']['Location'] hash = md5(loc) folder = TMP_DOWNLOAD_FILE_PATTERN.replace('*', hash) filename = 'archive.zip' archive = '%s/%s' % (folder, filename) try: mkdir(folder) if not os.path.isfile(archive): download(loc, archive, verify_ssl=False) if len(os.listdir(folder)) <= 1: zip_path = os.path.join(folder, filename) unzip(zip_path, folder) except Exception as e: print('WARN: %s' % e) rm_rf(archive) if retries > 0: return get_lambda_code(func_name, retries=retries - 1, cache_time=1, env=env) else: print('WARNING: Unable to retrieve lambda code: %s' % e) # traverse subdirectories and get script sources result = {} for root, subdirs, files in os.walk(folder): for file in files: prefix = root.split(folder)[-1] key = '%s/%s' % (prefix, file) if re.match(r'.+\.py$', key) or re.match(r'.+\.js$', key): codefile = '%s/%s' % (root, file) result[key] = load_file(codefile) # cleanup cache clean_cache(file_pattern=TMP_DOWNLOAD_FILE_PATTERN, last_clean_time=last_cache_cleanup_time, max_age=TMP_DOWNLOAD_CACHE_MAX_AGE) # TODO: delete only if cache_time is over rm_rf(folder) return result
def install_kinesis_mock(): target_dir = INSTALL_PATH_KINESIS_MOCK machine = platform.machine().lower() system = platform.system().lower() version = platform.version().lower() is_probably_m1 = system == 'darwin' and ('arm64' in version or 'arm32' in version) LOG.debug('getting kinesis-mock for %s %s', system, machine) if ((machine == 'x86_64' or machine == 'amd64') and not is_probably_m1): if system == 'windows': bin_file = 'kinesis-mock-mostly-static.exe' elif system == 'linux': bin_file = 'kinesis-mock-linux-amd64-static' elif system == 'darwin': bin_file = 'kinesis-mock-macos-amd64-dynamic' else: bin_file = 'kinesis-mock.jar' else: bin_file = 'kinesis-mock.jar' bin_file_path = os.path.join(target_dir, bin_file) if os.path.exists(bin_file_path): LOG.debug('kinesis-mock found at %s', bin_file_path) return bin_file_path response = requests.get(KINESIS_MOCK_RELEASE_URL) if not response.ok: raise ValueError('Could not get list of releases from %s: %s' % (KINESIS_MOCK_RELEASE_URL, response.text)) github_release = response.json() download_url = None for asset in github_release.get('assets', []): # find the correct binary in the release if asset['name'] == bin_file: download_url = asset['browser_download_url'] break if download_url is None: raise ValueError('could not find required binary %s in release %s' % (bin_file, KINESIS_MOCK_RELEASE_URL)) mkdir(target_dir) LOG.info('downloading kinesis-mock binary from %s', download_url) download(download_url, bin_file_path) chmod_r(bin_file_path, 0o777) return bin_file_path
def install_amazon_kinesis_client_libs(): # install KCL/STS JAR files if not os.path.exists(INSTALL_PATH_KCL_JAR): mkdir(INSTALL_DIR_KCL) tmp_archive = os.path.join(tempfile.gettempdir(), 'aws-java-sdk-sts.jar') if not os.path.exists(tmp_archive): download(STS_JAR_URL, tmp_archive) shutil.copy(tmp_archive, INSTALL_DIR_KCL) # Compile Java files from localstack.utils.kinesis import kclipy_helper classpath = kclipy_helper.get_kcl_classpath() java_files = '%s/utils/kinesis/java/cloud/localstack/*.java' % ROOT_PATH class_files = '%s/utils/kinesis/java/cloud/localstack/*.class' % ROOT_PATH if not glob.glob(class_files): run('javac -source %s -target %s -cp "%s" %s' % ( JAVAC_TARGET_VERSION, JAVAC_TARGET_VERSION, classpath, java_files))
def get_lambda_code(func_name, retries=1, cache_time=None, env=None): if MOCK_OBJ: return '' env = aws_stack.get_environment(env) if cache_time is None and env.region != REGION_LOCAL: cache_time = AWS_LAMBDA_CODE_CACHE_TIMEOUT out = cmd_lambda('get-function --function-name %s' % func_name, env, cache_time) out = json.loads(out) loc = out['Code']['Location'] hash = md5(loc) folder = TMP_DOWNLOAD_FILE_PATTERN.replace('*', hash) filename = 'archive.zip' archive = '%s/%s' % (folder, filename) try: mkdir(folder) if not os.path.isfile(archive): download(loc, archive, verify_ssl=False) if len(os.listdir(folder)) <= 1: zip_path = os.path.join(folder, filename) unzip(zip_path, folder) except Exception as e: print('WARN: %s' % e) rm_rf(archive) if retries > 0: return get_lambda_code(func_name, retries=retries - 1, cache_time=1, env=env) else: print('WARNING: Unable to retrieve lambda code: %s' % e) # traverse subdirectories and get script sources result = {} for root, subdirs, files in os.walk(folder): for file in files: prefix = root.split(folder)[-1] key = '%s/%s' % (prefix, file) if re.match(r'.+\.py$', key) or re.match(r'.+\.js$', key): codefile = '%s/%s' % (root, file) result[key] = load_file(codefile) # cleanup cache clean_cache(file_pattern=TMP_DOWNLOAD_FILE_PATTERN, last_clean_time=last_cache_cleanup_time, max_age=TMP_DOWNLOAD_CACHE_MAX_AGE) # TODO: delete only if cache_time is over rm_rf(folder) return result
def test_lambda_runtimes(): lambda_client = aws_stack.connect_to_service('lambda') # deploy and invoke lambda - Python 2.7 zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY, zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' if use_docker(): # deploy and invoke lambda - Python 3.6 zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON3), get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON36) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY3, zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON36) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY3, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' # deploy and invoke lambda - Java if not os.path.exists(TEST_LAMBDA_JAVA): mkdir(os.path.dirname(TEST_LAMBDA_JAVA)) download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA) zip_file = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA, zip_file=zip_file, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaHandler') result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' if use_docker(): # deploy and invoke lambda - Node.js zip_file = testutil.create_zip_file(TEST_LAMBDA_NODEJS, get_content=True) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JS, zip_file=zip_file, handler='lambda_integration.handler', runtime=LAMBDA_RUNTIME_NODEJS) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JS, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}'
def install_go_lambda_runtime(): install_glibc_for_alpine() if not os.path.isfile(GO_LAMBDA_RUNTIME): log_install_msg("Installing golang runtime") file_location = os.path.join(config.TMP_FOLDER, GO_ZIP_NAME) download(GO_RUNTIME_DOWNLOAD_URL, file_location) if not zipfile.is_zipfile(file_location): raise ValueError("Downloaded file is not zip ") zipfile.ZipFile(file_location).extractall(config.TMP_FOLDER) st = os.stat(GO_LAMBDA_RUNTIME) os.chmod(GO_LAMBDA_RUNTIME, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) st = os.stat(GO_LAMBDA_MOCKSERVER) os.chmod(GO_LAMBDA_MOCKSERVER, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
def test_download_with_timeout(): class DownloadListener(ProxyListener): def forward_request(self, method, path, data, headers): if path == "/sleep": time.sleep(2) return {} port = get_free_tcp_port() proxy = start_proxy_server(port, update_listener=DownloadListener()) tmp_file = new_tmp_file() download(f"http://localhost:{port}/", tmp_file) assert load_file(tmp_file) == "{}" with pytest.raises(TimeoutError): download(f"http://localhost:{port}/sleep", tmp_file, timeout=1) # clean up proxy.stop() rm_rf(tmp_file)
def install_lambda_java_libs(): # install LocalStack "fat" JAR file (contains all dependencies) if not os.path.exists(INSTALL_PATH_LOCALSTACK_FAT_JAR): LOGGER.info('Downloading and installing LocalStack Java libraries. This may take some time.') download(URL_LOCALSTACK_FAT_JAR, INSTALL_PATH_LOCALSTACK_FAT_JAR)
def download_and_extract(): if not os.path.exists(tmp_archive): download(archive_url, tmp_archive) unzip(tmp_archive, target_dir)
def test_lambda_runtimes(): lambda_client = aws_stack.connect_to_service('lambda') # deploy and invoke lambda - Python 2.7 zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY, zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' if use_docker(): # deploy and invoke lambda - Python 3.6 zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON3), get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON36) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY3, zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON36) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY3, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' # deploy and invoke lambda - Java if not os.path.exists(TEST_LAMBDA_JAVA): mkdir(os.path.dirname(TEST_LAMBDA_JAVA)) download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA) zip_file = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA, zip_file=zip_file, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaHandler') result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert 'LinkedHashMap' in to_str(result_data) # test SNSEvent result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, InvocationType='Event', Payload=b'{"Records": [{"Sns": {"Message": "{}"}}]}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert json.loads(to_str(result_data)) == {'async': 'True'} # test DDBEvent result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, InvocationType='Event', Payload=b'{"Records": [{"dynamodb": {"Message": "{}"}}]}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert json.loads(to_str(result_data)) == {'async': 'True'} # test KinesisEvent result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, Payload=b'{"Records": [{"Kinesis": {"Data": "data", "PartitionKey": "partition"}}]}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert 'KinesisEvent' in to_str(result_data) # deploy and invoke lambda - Java with stream handler testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA_STREAM, zip_file=zip_file, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaStreamHandler') result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA_STREAM, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' # deploy and invoke lambda - Java with serializable input object testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA_SERIALIZABLE, zip_file=zip_file, runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.SerializedInputLambdaHandler') result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA_SERIALIZABLE, Payload=b'{"bucket": "test_bucket", "key": "test_key"}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert json.loads(to_str(result_data)) == {'validated': True, 'bucket': 'test_bucket', 'key': 'test_key'} if use_docker(): # deploy and invoke lambda - Node.js zip_file = testutil.create_zip_file(TEST_LAMBDA_NODEJS, get_content=True) testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JS, zip_file=zip_file, handler='lambda_integration.handler', runtime=LAMBDA_RUNTIME_NODEJS) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JS, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}' # deploy and invoke - .NET Core 2.0. Its already a zip zip_file = TEST_LAMBDA_DOTNETCORE2 zip_file_content = None with open(zip_file, 'rb') as file_obj: zip_file_content = file_obj.read() testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DOTNETCORE2, zip_file=zip_file_content, handler='DotNetCore2::DotNetCore2.Lambda.Function::SimpleFunctionHandler', runtime=LAMBDA_RUNTIME_DOTNETCORE2) result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_DOTNETCORE2, Payload=b'{}') assert result['StatusCode'] == 200 result_data = result['Payload'].read() assert to_str(result_data).strip() == '{}'