def start_sqs_elasticmq(port=None, asynchronous=False, update_listener=None): global PORT_SQS_BACKEND port = port or config.PORT_SQS install_elasticmq() PORT_SQS_BACKEND = get_free_tcp_port() # create config file config_params = """ include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } """ % (LOCALSTACK_HOSTNAME, port, PORT_SQS_BACKEND) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config_params) # start process cmd = ('java -Dconfig.file=%s -Xmx%s -jar %s/elasticmq-server.jar' % (config_file, MAX_HEAP_SIZE, INSTALL_DIR_ELASTICMQ)) print( 'Starting mock SQS service in %s ports %s (recommended) and %s (deprecated)...' % (get_service_protocol(), config.EDGE_PORT, port)) start_proxy_for_service('sqs', port, PORT_SQS_BACKEND, update_listener) return do_run(cmd, asynchronous)
def start_elasticsearch(port=None, delete_data=True, asynchronous=False, update_listener=None): port = port or config.PORT_ELASTICSEARCH # delete Elasticsearch data that may be cached locally from a previous test run delete_all_elasticsearch_data() install.install_elasticsearch() backend_port = DEFAULT_PORT_ELASTICSEARCH_BACKEND es_data_dir = '%s/infra/elasticsearch/data' % (ROOT_PATH) es_tmp_dir = '%s/infra/elasticsearch/tmp' % (ROOT_PATH) if config.DATA_DIR: es_data_dir = '%s/elasticsearch' % config.DATA_DIR # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments, # hence we use the default bind address 127.0.0.0 and put a proxy in front of it cmd = (('%s/infra/elasticsearch/bin/elasticsearch ' + '-E http.port=%s -E http.publish_port=%s -E http.compression=false -E path.data=%s') % (ROOT_PATH, backend_port, backend_port, es_data_dir)) env_vars = { 'ES_JAVA_OPTS': os.environ.get('ES_JAVA_OPTS', '-Xms200m -Xmx600m'), 'ES_TMPDIR': es_tmp_dir } print('Starting local Elasticsearch (%s port %s)...' % (get_service_protocol(), port)) if delete_data: run('rm -rf %s' % es_data_dir) # fix permissions chmod_r('%s/infra/elasticsearch' % ROOT_PATH, 0o777) mkdir(es_data_dir) chmod_r(es_data_dir, 0o777) # start proxy and ES process start_proxy_for_service('elasticsearch', port, backend_port, update_listener, quiet=True, params={'protocol_version': 'HTTP/1.0'}) if is_root(): cmd = "su -c '%s' localstack" % cmd thread = do_run(cmd, asynchronous, env_vars=env_vars) return thread
def start_sqs(port=PORT_SQS, asynchronous=False, update_listener=None): install_elasticmq() backend_port = DEFAULT_PORT_SQS_BACKEND # create config file config = """ include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } %s """ % (LOCALSTACK_HOSTNAME, port, backend_port, SQS_QUEUES) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config) # start process cmd = ('java -Dconfig.file=%s -jar %s/elasticmq-server.jar' % (config_file, INSTALL_DIR_ELASTICMQ)) print('Starting mock SQS (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('sqs', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_cloudformation(port=PORT_CLOUDFORMATION, asynchronous=False, update_listener=None): backend_port = DEFAULT_PORT_CLOUDFORMATION_BACKEND cmd = 'python "%s" cloudformation -p %s -H 0.0.0.0' % (__file__, backend_port) print('Starting mock CloudFormation (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) env_vars = {'PYTHONPATH': ':'.join(sys.path)} return do_run(cmd, asynchronous, env_vars=env_vars)
def start_s3(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_S3 backend_port = DEFAULT_PORT_S3_BACKEND cmd = '%s "%s" s3 -p %s -H 0.0.0.0' % (sys.executable, __file__, backend_port) print('Starting mock S3 (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('s3', port, backend_port, update_listener) env_vars = {'PYTHONPATH': ':'.join(sys.path)} return do_run(cmd, asynchronous, env_vars=env_vars)
def start_elasticsearch(port=None, version=None, delete_data=True, asynchronous=False, update_listener=None): if STATE.get('_thread_'): return STATE['_thread_'] port = port or config.PORT_ELASTICSEARCH # delete Elasticsearch data that may be cached locally from a previous test run delete_all_elasticsearch_data(version) install.install_elasticsearch(version) backend_port = get_free_tcp_port() base_dir = install.get_elasticsearch_install_dir(version) es_data_dir = os.path.join(base_dir, 'data') es_tmp_dir = os.path.join(base_dir, 'tmp') es_mods_dir = os.path.join(base_dir, 'modules') if config.DATA_DIR: delete_data = False es_data_dir = '%s/elasticsearch' % config.DATA_DIR # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments, # hence we use the default bind address 127.0.0.0 and put a proxy in front of it backup_dir = os.path.join(config.TMP_FOLDER, 'es_backup') cmd = ( ('%s/bin/elasticsearch ' + '-E http.port=%s -E http.publish_port=%s -E http.compression=false ' + '-E path.data=%s -E path.repo=%s') % (base_dir, backend_port, backend_port, es_data_dir, backup_dir)) if os.path.exists(os.path.join(es_mods_dir, 'x-pack-ml')): cmd += ' -E xpack.ml.enabled=false' env_vars = { 'ES_JAVA_OPTS': os.environ.get('ES_JAVA_OPTS', '-Xms200m -Xmx600m'), 'ES_TMPDIR': es_tmp_dir } LOG.debug('Starting local Elasticsearch (%s port %s)' % (get_service_protocol(), port)) if delete_data: rm_rf(es_data_dir) # fix permissions chmod_r(base_dir, 0o777) mkdir(es_data_dir) chmod_r(es_data_dir, 0o777) mkdir(es_tmp_dir) chmod_r(es_tmp_dir, 0o777) # start proxy and ES process proxy = start_proxy_for_service('elasticsearch', port, backend_port, update_listener, quiet=True, params={'protocol_version': 'HTTP/1.0'}) STATE['_proxy_'] = proxy if is_root(): cmd = "su localstack -c '%s'" % cmd thread = do_run(cmd, asynchronous, env_vars=env_vars) STATE['_thread_'] = thread return thread
def start_stepfunctions(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_STEPFUNCTIONS install.install_stepfunctions_local() # TODO: local port is currently hard coded in Step Functions Local :/ backend_port = 8083 cmd = get_command() print('Starting mock StepFunctions service on %s port %s...' % (get_service_protocol(), config.EDGE_PORT)) start_proxy_for_service('stepfunctions', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_dynamodb(port=PORT_DYNAMODB, asynchronous=False, update_listener=None): install.install_dynamodb_local() backend_port = DEFAULT_PORT_DYNAMODB_BACKEND ddb_data_dir_param = '-inMemory' if DATA_DIR: ddb_data_dir = '%s/dynamodb' % DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ('cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-jar DynamoDBLocal.jar -sharedDb -port %s %s') % (ROOT_PATH, backend_port, ddb_data_dir_param) print('Starting mock DynamoDB (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kinesis(port=PORT_KINESIS, asynchronous=False, shard_limit=100, update_listener=None): install.install_kinesalite() backend_port = DEFAULT_PORT_KINESIS_BACKEND kinesis_data_dir_param = '' if DATA_DIR: kinesis_data_dir = '%s/kinesis' % DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = '--path %s' % kinesis_data_dir cmd = ('%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s %s' % (ROOT_PATH, shard_limit, backend_port, kinesis_data_dir_param)) print('Starting mock Kinesis (%s port %s)...' % (get_service_protocol(), port)) do_run(cmd, asynchronous) start_proxy_for_service('kinesis', port, backend_port, update_listener)
def start_kms(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = get_free_tcp_port() kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) print('Starting mock KMS service in %s ports %s (recommended) and %s (deprecated)...' % ( get_service_protocol(), config.EDGE_PORT, port)) start_proxy_for_service('kms', port, backend_port, update_listener) env_vars = { 'PORT': str(backend_port), 'KMS_REGION': config.DEFAULT_REGION, 'KMS_ACCOUNT_ID': TEST_AWS_ACCOUNT_ID } return do_run(kms_binary, asynchronous, env_vars=env_vars)
def start_cloudformation(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_CLOUDFORMATION backend_port = DEFAULT_PORT_CLOUDFORMATION_BACKEND print('Starting mock CloudFormation (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('cloudformation', port, backend_port, update_listener) if RUN_SERVER_IN_PROCESS: cmd = 'python "%s" cloudformation -p %s -H 0.0.0.0' % (__file__, backend_port) env_vars = {'PYTHONPATH': ':'.join(sys.path)} return do_run(cmd, asynchronous, env_vars=env_vars) else: argv = ['cloudformation', '-p', str(backend_port), '-H', '0.0.0.0'] thread = FuncThread(start_up, argv) thread.start() return thread
def start_kms(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = DEFAULT_PORT_KMS_BACKEND kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) print('Starting mock KMS (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('kms', port, backend_port, update_listener) env_vars = { 'PORT': str(backend_port), 'REGION': config.DEFAULT_REGION, 'ACCOUNT_ID': TEST_AWS_ACCOUNT_ID } return do_run(kms_binary, asynchronous, env_vars=env_vars)
def start_stepfunctions(port=PORT_STEPFUNCTIONS, asynchronous=False, update_listener=None): install.install_stepfunctions_local() backend_port = DEFAULT_PORT_STEPFUNCTIONS_BACKEND # TODO: local port is currently hard coded in Step Functions Local :/ backend_port = 8083 lambda_endpoint = aws_stack.get_local_service_url('lambda') dynamodb_endpoint = aws_stack.get_local_service_url('dynamodb') sns_endpoint = aws_stack.get_local_service_url('sns') sqs_endpoint = aws_stack.get_local_service_url('sqs') cmd = ('cd %s; java -Dcom.amazonaws.sdk.disableCertChecking -jar StepFunctionsLocal.jar ' '--lambda-endpoint %s --dynamodb-endpoint %s --sns-endpoint %s ' '--sqs-endpoint %s --aws-region %s --aws-account %s') % ( install.INSTALL_DIR_STEPFUNCTIONS, lambda_endpoint, dynamodb_endpoint, sns_endpoint, sqs_endpoint, DEFAULT_REGION, TEST_AWS_ACCOUNT_ID) print('Starting mock StepFunctions (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('stepfunctions', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_dynamodb(port=None, asynchronous=False, update_listener=None): global PORT_DYNAMODB_BACKEND PORT_DYNAMODB_BACKEND = get_free_tcp_port() port = port or config.PORT_DYNAMODB install.install_dynamodb_local() ddb_data_dir_param = '-inMemory' if config.DATA_DIR: ddb_data_dir = '%s/dynamodb' % config.DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ('cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-Xmx%s -jar DynamoDBLocal.jar -sharedDb -port %s %s') % ( ROOT_PATH, MAX_HEAP_SIZE, PORT_DYNAMODB_BACKEND, ddb_data_dir_param) print('Starting mock DynamoDB service in %s ports %s (recommended) and %s (deprecated)...' % ( get_service_protocol(), config.EDGE_PORT, port)) start_proxy_for_service('dynamodb', port, backend_port=PORT_DYNAMODB_BACKEND, update_listener=update_listener) return do_run(cmd, asynchronous)
def start_kinesis(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_KINESIS install.install_kinesalite() backend_port = DEFAULT_PORT_KINESIS_BACKEND latency = config.KINESIS_LATENCY kinesis_data_dir_param = '' if config.DATA_DIR: kinesis_data_dir = '%s/kinesis' % config.DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = '--path %s' % kinesis_data_dir cmd = ('%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s' ' --createStreamMs %s --deleteStreamMs %s --updateStreamMs %s %s' ) % (ROOT_PATH, config.KINESIS_SHARD_LIMIT, backend_port, latency, latency, latency, kinesis_data_dir_param) print('Starting mock Kinesis (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('kinesis', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def check_dynamodb(expect_shutdown=False, print_error=False): out = None try: # wait for port to be opened wait_for_port_open(DEFAULT_PORT_DYNAMODB_BACKEND) # check DynamoDB out = aws_stack.connect_to_service(service_name='dynamodb').list_tables() except Exception as e: if print_error: LOGGER.error('DynamoDB health check failed: %s %s' % (e, traceback.format_exc())) if expect_shutdown: assert out is None else: assert isinstance(out['TableNames'], list) def start_dynamodb(port=PORT_DYNAMODB, async=False, update_listener=None): install.install_dynamodb_local() backend_port = DEFAULT_PORT_DYNAMODB_BACKEND ddb_data_dir_param = '-inMemory' if DATA_DIR: ddb_data_dir = '%s/dynamodb' % DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ('cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-jar DynamoDBLocal.jar -sharedDb -port %s %s') % (ROOT_PATH, backend_port, ddb_data_dir_param) print('Starting mock DynamoDB (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) return do_run(cmd, async)
LOGGER = logging.getLogger(__name__) def start_sqs(port=PORT_SQS, async=False, update_listener=None): install_elasticmq() backend_port = DEFAULT_PORT_SQS_BACKEND # create config file config = """ include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } """ % (LOCALSTACK_HOSTNAME, port, backend_port) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config) # start process cmd = ('java -Dconfig.file=%s -jar %s/elasticmq-server.jar' % (config_file, INSTALL_DIR_ELASTICMQ)) print('Starting mock SQS (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('sqs', port, backend_port, update_listener) return do_run(cmd, async)
LOGGER = logging.getLogger(__name__) def check_dynamodb(expect_shutdown=False, print_error=False): out = None try: # check DynamoDB out = aws_stack.connect_to_service(service_name='dynamodb').list_tables() except Exception as e: if print_error: LOGGER.error('DynamoDB health check failed: %s %s' % (e, traceback.format_exc())) if expect_shutdown: assert out is None else: assert isinstance(out['TableNames'], list) def start_dynamodb(port=PORT_DYNAMODB, async=False, update_listener=None): install.install_dynamodb_local() backend_port = DEFAULT_PORT_DYNAMODB_BACKEND ddb_data_dir_param = '-inMemory' if DATA_DIR: ddb_data_dir = '%s/dynamodb' % DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ('cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-jar DynamoDBLocal.jar -sharedDb -port %s %s') % (ROOT_PATH, backend_port, ddb_data_dir_param) print('Starting mock DynamoDB (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) return do_run(cmd, async)
def start_sqs(port=PORT_SQS, async=False, update_listener=None): install_elasticmq() backend_port = DEFAULT_PORT_SQS_BACKEND # create config file config = ''' include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } ''' % (LOCALSTACK_HOSTNAME, port, backend_port) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config) # start process cmd = ('java -Dconfig.file=%s -jar %s/elasticmq-server.jar' % (config_file, INSTALL_DIR_ELASTICMQ)) print("Starting mock SQS (%s port %s)..." % (get_service_protocol(), port)) start_proxy_for_service('sqs', port, backend_port, update_listener) return do_run(cmd, async)
install.install_elasticsearch() backend_port = DEFAULT_PORT_ELASTICSEARCH_BACKEND es_data_dir = '%s/infra/elasticsearch/data' % (ROOT_PATH) es_tmp_dir = '%s/infra/elasticsearch/tmp' % (ROOT_PATH) if DATA_DIR: es_data_dir = '%s/elasticsearch' % DATA_DIR # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments, # hence we use the default bind address 127.0.0.0 and put a proxy in front of it cmd = (( 'ES_JAVA_OPTS=\"$ES_JAVA_OPTS -Xms200m -Xmx500m\" ES_TMPDIR="%s" ' + '%s/infra/elasticsearch/bin/elasticsearch ' + '-E http.port=%s -E http.publish_port=%s -E http.compression=false -E path.data=%s' ) % (es_tmp_dir, ROOT_PATH, backend_port, backend_port, es_data_dir)) print('Starting local Elasticsearch (%s port %s)...' % (get_service_protocol(), port)) if delete_data: run('rm -rf %s' % es_data_dir) # fix permissions chmod_r('%s/infra/elasticsearch' % ROOT_PATH, 0o777) mkdir(es_data_dir) chmod_r(es_data_dir, 0o777) # start proxy and ES process start_proxy_for_service('elasticsearch', port, backend_port, update_listener, quiet=True, params={'protocol_version': 'HTTP/1.0'}) if is_root(): cmd = "su -c '%s' localstack" % cmd