def start_kinesis(port=PORT_KINESIS, asynchronous=False, shard_limit=100, update_listener=None): install.install_kinesalite() backend_port = DEFAULT_PORT_KINESIS_BACKEND kinesis_data_dir_param = '' if DATA_DIR: kinesis_data_dir = '%s/kinesis' % DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = '--path %s' % kinesis_data_dir cmd = ('%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s %s' % (ROOT_PATH, shard_limit, backend_port, kinesis_data_dir_param)) print('Starting mock Kinesis (%s port %s)...' % (get_service_protocol(), port)) do_run(cmd, asynchronous) start_proxy_for_service('kinesis', port, backend_port, update_listener)
def _run_elasticsearch(self, *args): # *args is necessary for start_thread to work with self._lifecycle_lock: if self._elasticsearch_thread: return # FIXME: if this fails the cluster could be left in a wonky state # FIXME: this is not a good place to run install, and it only works because we're # assuming that there will only ever be one running Elasticsearch cluster install.install_elasticsearch(self.version) self._init_directories() cmd = self._create_run_command(additional_settings=self.command_settings) cmd = " ".join(cmd) user = constants.OS_USER_ELASTICSEARCH if is_root() and user: # run the elasticsearch process as a non-root user (when running in docker) cmd = f"su {user} -c '{cmd}'" env_vars = self._create_env_vars() LOG.info("starting elasticsearch: %s with env %s", cmd, env_vars) # use asynchronous=True to get a ShellCommandThread self._elasticsearch_thread = do_run(cmd, asynchronous=True, env_vars=env_vars) self._starting.set() # block until the thread running the command is done try: self._elasticsearch_thread.join() finally: LOG.info("elasticsearch process ended") self._stopped.set()
def start_dynamodb(port=None, asynchronous=False, update_listener=None): global PORT_DYNAMODB_BACKEND PORT_DYNAMODB_BACKEND = get_free_tcp_port() port = port or config.PORT_DYNAMODB install.install_dynamodb_local() ddb_data_dir_param = "-inMemory" if config.DATA_DIR: ddb_data_dir = "%s/dynamodb" % config.DATA_DIR mkdir(ddb_data_dir) # as the service command cds into a different directory, the absolute # path of the DATA_DIR is needed as the -dbPath absolute_path = os.path.abspath(ddb_data_dir) ddb_data_dir_param = "-dbPath %s" % absolute_path cmd = ( "cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib " + "-Xmx%s -jar DynamoDBLocal.jar -port %s %s" ) % ( MODULE_MAIN_PATH, config.DYNAMODB_HEAP_SIZE, PORT_DYNAMODB_BACKEND, ddb_data_dir_param, ) log_startup_message("DynamoDB") start_proxy_for_service( "dynamodb", port, backend_port=PORT_DYNAMODB_BACKEND, update_listener=update_listener, ) return do_run(cmd, asynchronous, auto_restart=True)
def start_kinesalite(port=None, asynchronous=False, update_listener=None): # install and apply patches install.install_kinesalite() apply_patches_kinesalite() # start up process port = port or config.PORT_KINESIS backend_port = get_free_tcp_port() latency = config.KINESIS_LATENCY kinesis_data_dir_param = "" if config.DATA_DIR: kinesis_data_dir = "%s/kinesis" % config.DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = "--path %s" % kinesis_data_dir cmd = ( "%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s" " --createStreamMs %s --deleteStreamMs %s --updateStreamMs %s %s") % ( MODULE_MAIN_PATH, config.KINESIS_SHARD_LIMIT, backend_port, latency, latency, latency, kinesis_data_dir_param, ) log_startup_message("Kinesis") start_proxy_for_service("kinesis", port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_cloudformation(port=PORT_CLOUDFORMATION, asynchronous=False, update_listener=None): backend_port = DEFAULT_PORT_CLOUDFORMATION_BACKEND cmd = 'python "%s" cloudformation -p %s -H 0.0.0.0' % (__file__, backend_port) print('Starting mock CloudFormation (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) env_vars = {'PYTHONPATH': ':'.join(sys.path)} return do_run(cmd, asynchronous, env_vars=env_vars)
def _run_proxy_and_command(cmd, port, backend_port, update_listener, asynchronous): global PROCESS_THREAD log_startup_message("Kinesis") start_proxy_for_service("kinesis", port, backend_port, update_listener) # TODO: generalize into service manager once it is introduced try: PROCESS_THREAD = do_run(cmd, asynchronous) finally: if asynchronous: def _return_listener(*_): try: ret_code = PROCESS_THREAD.result_future.result() if ret_code not in [0, None]: LOGGER.error("kinesis terminated with return code %s", ret_code) finally: kinesis_stopped.set() start_thread(_return_listener) else: kinesis_stopped.set() return PROCESS_THREAD
def start_sqs_elasticmq(port=None, asynchronous=False, update_listener=None): global PORT_SQS_BACKEND port = port or config.PORT_SQS install_elasticmq() PORT_SQS_BACKEND = get_free_tcp_port() # create config file config_params = """ include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } """ % (LOCALSTACK_HOSTNAME, port, PORT_SQS_BACKEND) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config_params) # start process cmd = ('java -Dconfig.file=%s -Xmx%s -jar %s/elasticmq-server.jar' % (config_file, MAX_HEAP_SIZE, INSTALL_DIR_ELASTICMQ)) print( 'Starting mock SQS service in %s ports %s (recommended) and %s (deprecated)...' % (get_service_protocol(), config.EDGE_PORT, port)) start_proxy_for_service('sqs', port, PORT_SQS_BACKEND, update_listener) return do_run(cmd, asynchronous)
def start_elasticsearch(port=None, delete_data=True, asynchronous=False, update_listener=None): port = port or config.PORT_ELASTICSEARCH # delete Elasticsearch data that may be cached locally from a previous test run delete_all_elasticsearch_data() install.install_elasticsearch() backend_port = DEFAULT_PORT_ELASTICSEARCH_BACKEND es_data_dir = '%s/infra/elasticsearch/data' % (ROOT_PATH) es_tmp_dir = '%s/infra/elasticsearch/tmp' % (ROOT_PATH) if config.DATA_DIR: es_data_dir = '%s/elasticsearch' % config.DATA_DIR # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments, # hence we use the default bind address 127.0.0.0 and put a proxy in front of it cmd = (('%s/infra/elasticsearch/bin/elasticsearch ' + '-E http.port=%s -E http.publish_port=%s -E http.compression=false -E path.data=%s') % (ROOT_PATH, backend_port, backend_port, es_data_dir)) env_vars = { 'ES_JAVA_OPTS': os.environ.get('ES_JAVA_OPTS', '-Xms200m -Xmx600m'), 'ES_TMPDIR': es_tmp_dir } print('Starting local Elasticsearch (%s port %s)...' % (get_service_protocol(), port)) if delete_data: run('rm -rf %s' % es_data_dir) # fix permissions chmod_r('%s/infra/elasticsearch' % ROOT_PATH, 0o777) mkdir(es_data_dir) chmod_r(es_data_dir, 0o777) # start proxy and ES process start_proxy_for_service('elasticsearch', port, backend_port, update_listener, quiet=True, params={'protocol_version': 'HTTP/1.0'}) if is_root(): cmd = "su -c '%s' localstack" % cmd thread = do_run(cmd, asynchronous, env_vars=env_vars) return thread
def start_sqs(port=PORT_SQS, asynchronous=False, update_listener=None): install_elasticmq() backend_port = DEFAULT_PORT_SQS_BACKEND # create config file config = """ include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } %s """ % (LOCALSTACK_HOSTNAME, port, backend_port, SQS_QUEUES) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config) # start process cmd = ('java -Dconfig.file=%s -jar %s/elasticmq-server.jar' % (config_file, INSTALL_DIR_ELASTICMQ)) print('Starting mock SQS (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('sqs', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_s3(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_S3 backend_port = DEFAULT_PORT_S3_BACKEND cmd = '%s "%s" s3 -p %s -H 0.0.0.0' % (sys.executable, __file__, backend_port) print('Starting mock S3 (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('s3', port, backend_port, update_listener) env_vars = {'PYTHONPATH': ':'.join(sys.path)} return do_run(cmd, asynchronous, env_vars=env_vars)
def start_elasticsearch(port=None, version=None, delete_data=True, asynchronous=False, update_listener=None): if STATE.get('_thread_'): return STATE['_thread_'] port = port or config.PORT_ELASTICSEARCH # delete Elasticsearch data that may be cached locally from a previous test run delete_all_elasticsearch_data(version) install.install_elasticsearch(version) backend_port = get_free_tcp_port() base_dir = install.get_elasticsearch_install_dir(version) es_data_dir = os.path.join(base_dir, 'data') es_tmp_dir = os.path.join(base_dir, 'tmp') es_mods_dir = os.path.join(base_dir, 'modules') if config.DATA_DIR: delete_data = False es_data_dir = '%s/elasticsearch' % config.DATA_DIR # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments, # hence we use the default bind address 127.0.0.0 and put a proxy in front of it backup_dir = os.path.join(config.TMP_FOLDER, 'es_backup') cmd = ( ('%s/bin/elasticsearch ' + '-E http.port=%s -E http.publish_port=%s -E http.compression=false ' + '-E path.data=%s -E path.repo=%s') % (base_dir, backend_port, backend_port, es_data_dir, backup_dir)) if os.path.exists(os.path.join(es_mods_dir, 'x-pack-ml')): cmd += ' -E xpack.ml.enabled=false' env_vars = { 'ES_JAVA_OPTS': os.environ.get('ES_JAVA_OPTS', '-Xms200m -Xmx600m'), 'ES_TMPDIR': es_tmp_dir } LOG.debug('Starting local Elasticsearch (%s port %s)' % (get_service_protocol(), port)) if delete_data: rm_rf(es_data_dir) # fix permissions chmod_r(base_dir, 0o777) mkdir(es_data_dir) chmod_r(es_data_dir, 0o777) mkdir(es_tmp_dir) chmod_r(es_tmp_dir, 0o777) # start proxy and ES process proxy = start_proxy_for_service('elasticsearch', port, backend_port, update_listener, quiet=True, params={'protocol_version': 'HTTP/1.0'}) STATE['_proxy_'] = proxy if is_root(): cmd = "su localstack -c '%s'" % cmd thread = do_run(cmd, asynchronous, env_vars=env_vars) STATE['_thread_'] = thread return thread
def start_stepfunctions(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_STEPFUNCTIONS install.install_stepfunctions_local() # TODO: local port is currently hard coded in Step Functions Local :/ backend_port = 8083 cmd = get_command() print('Starting mock StepFunctions service on %s port %s...' % (get_service_protocol(), config.EDGE_PORT)) start_proxy_for_service('stepfunctions', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_stepfunctions(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_STEPFUNCTIONS backend_port = config.LOCAL_PORT_STEPFUNCTIONS install.install_stepfunctions_local() cmd = get_command(backend_port) log_startup_message("StepFunctions") start_proxy_for_service("stepfunctions", port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_stepfunctions(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_STEPFUNCTIONS backend_port = config.LOCAL_PORT_STEPFUNCTIONS install.install_stepfunctions_local() cmd = get_command(backend_port) print('Starting mock StepFunctions service on %s ...' % edge_ports_info()) start_proxy_for_service('stepfunctions', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kinesis_mock(port=None, asynchronous=False, update_listener=None): target_dir = os.path.join(INSTALL_DIR_INFRA, 'kinesis-mock') machine = platform.machine().lower() system = platform.system().lower() if machine == 'x86_64' or machine == 'amd64': if system == 'windows': target_file_name = 'kinesis-mock-mostly-static.exe' elif system == 'linux': target_file_name = 'kinesis-mock-linux-amd64-static' elif system == 'darwin': target_file_name = 'kinesis-mock-macos-amd64-dynamic' else: target_file_name = 'kinesis-mock.jar' else: target_file_name = 'kinesis-mock.jar' target_file = os.path.join(target_dir, target_file_name) if not os.path.exists(target_file): response = requests.get(KINESIS_MOCK_RELEASES) content = json.loads(to_str(response.content)) assets = content.get('assets', []) filtered = [x for x in assets if x['name'] == target_file_name] archive_url = filtered[0].get('browser_download_url') download(archive_url, target_file) port = port or config.PORT_KINESIS backend_port = get_free_tcp_port() kinesis_data_dir_param = '' if config.DATA_DIR: kinesis_data_dir = '%s/kinesis' % config.DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = 'SHOULD_PERSIST_DATA=true PERSIST_PATH=%s' % kinesis_data_dir if not config.LS_LOG: log_level = 'INFO' elif config.LS_LOG == 'warning': log_level = 'WARN' else: log_level = config.LS_LOG.upper log_level_param = 'LOG_LEVEL=%s' % (log_level) latency = config.KINESIS_LATENCY + 'ms' latency_param = 'CREATE_STREAM_DURATION=%s DELETE_STREAM_DURATION=%s REGISTER_STREAM_CONSUMER_DURATION=%s ' \ 'START_STREAM_ENCRYPTION_DURATION=%s STOP_STREAM_ENCRYPTION_DURATION=%s ' \ 'DEREGISTER_STREAM_CONSUMER_DURATION=%s MERGE_SHARDS_DURATION=%s SPLIT_SHARD_DURATION=%s ' \ 'UPDATE_SHARD_COUNT_DURATION=%s' \ % (latency, latency, latency, latency, latency, latency, latency, latency, latency) if target_file_name.endswith('.jar'): cmd = 'KINESIS_MOCK_HTTP1_PLAIN_PORT=%s SHARD_LIMIT=%s %s %s %s java -XX:+UseG1GC -jar %s' \ % (backend_port, config.KINESIS_SHARD_LIMIT, latency_param, kinesis_data_dir_param, log_level_param, target_file) else: chmod_r(target_file, 0o777) cmd = 'KINESIS_MOCK_HTTP1_PLAIN_PORT=%s SHARD_LIMIT=%s %s %s %s %s --gc=G1' \ % (backend_port, config.KINESIS_SHARD_LIMIT, latency_param, kinesis_data_dir_param, log_level_param, target_file) start_proxy_for_service('kinesis', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kms(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = get_free_tcp_port() kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) print('Starting mock KMS service in %s ports %s (recommended) and %s (deprecated)...' % ( get_service_protocol(), config.EDGE_PORT, port)) start_proxy_for_service('kms', port, backend_port, update_listener) env_vars = { 'PORT': str(backend_port), 'KMS_REGION': config.DEFAULT_REGION, 'KMS_ACCOUNT_ID': TEST_AWS_ACCOUNT_ID } return do_run(kms_binary, asynchronous, env_vars=env_vars)
def start_dynamodb(port=PORT_DYNAMODB, asynchronous=False, update_listener=None): install.install_dynamodb_local() backend_port = DEFAULT_PORT_DYNAMODB_BACKEND ddb_data_dir_param = '-inMemory' if DATA_DIR: ddb_data_dir = '%s/dynamodb' % DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ('cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-jar DynamoDBLocal.jar -sharedDb -port %s %s') % (ROOT_PATH, backend_port, ddb_data_dir_param) print('Starting mock DynamoDB (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_cloudformation(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_CLOUDFORMATION backend_port = DEFAULT_PORT_CLOUDFORMATION_BACKEND print('Starting mock CloudFormation (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('cloudformation', port, backend_port, update_listener) if RUN_SERVER_IN_PROCESS: cmd = 'python "%s" cloudformation -p %s -H 0.0.0.0' % (__file__, backend_port) env_vars = {'PYTHONPATH': ':'.join(sys.path)} return do_run(cmd, asynchronous, env_vars=env_vars) else: argv = ['cloudformation', '-p', str(backend_port), '-H', '0.0.0.0'] thread = FuncThread(start_up, argv) thread.start() return thread
def start_kinesis_mock(port=None, asynchronous=False, update_listener=None): kinesis_mock_bin = install.install_kinesis_mock() port = port or config.PORT_KINESIS backend_port = get_free_tcp_port() kinesis_data_dir_param = '' if config.DATA_DIR: kinesis_data_dir = '%s/kinesis' % config.DATA_DIR mkdir(kinesis_data_dir) # FIXME: workaround for https://github.com/localstack/localstack/issues/4227 streams_file = os.path.join(kinesis_data_dir, 'kinesis-data.json') if not os.path.exists(streams_file): with open(streams_file, 'w') as fd: fd.write('{"streams":{}}') kinesis_data_dir_param = 'SHOULD_PERSIST_DATA=true PERSIST_PATH=%s' % kinesis_data_dir if not config.LS_LOG: log_level = 'INFO' elif config.LS_LOG == 'warning': log_level = 'WARN' else: log_level = config.LS_LOG.upper() log_level_param = 'LOG_LEVEL=%s' % log_level latency = config.KINESIS_LATENCY + 'ms' latency_param = 'CREATE_STREAM_DURATION=%s DELETE_STREAM_DURATION=%s REGISTER_STREAM_CONSUMER_DURATION=%s ' \ 'START_STREAM_ENCRYPTION_DURATION=%s STOP_STREAM_ENCRYPTION_DURATION=%s ' \ 'DEREGISTER_STREAM_CONSUMER_DURATION=%s MERGE_SHARDS_DURATION=%s SPLIT_SHARD_DURATION=%s ' \ 'UPDATE_SHARD_COUNT_DURATION=%s' \ % (latency, latency, latency, latency, latency, latency, latency, latency, latency) if config.KINESIS_INITIALIZE_STREAMS != '': initialize_streams_param = 'INITIALIZE_STREAMS=%s' % ( config.KINESIS_INITIALIZE_STREAMS) else: initialize_streams_param = '' if kinesis_mock_bin.endswith('.jar'): cmd = 'KINESIS_MOCK_PLAIN_PORT=%s SHARD_LIMIT=%s %s %s %s %s java -XX:+UseG1GC -jar %s' \ % (backend_port, config.KINESIS_SHARD_LIMIT, latency_param, kinesis_data_dir_param, log_level_param, initialize_streams_param, kinesis_mock_bin) else: chmod_r(kinesis_mock_bin, 0o777) cmd = 'KINESIS_MOCK_PLAIN_PORT=%s SHARD_LIMIT=%s %s %s %s %s %s --gc=G1' \ % (backend_port, config.KINESIS_SHARD_LIMIT, latency_param, kinesis_data_dir_param, log_level_param, initialize_streams_param, kinesis_mock_bin) LOGGER.info('starting kinesis-mock proxy %d:%d with cmd: %s', port, backend_port, cmd) start_proxy_for_service('kinesis', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kms(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = DEFAULT_PORT_KMS_BACKEND kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) print('Starting mock KMS (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('kms', port, backend_port, update_listener) env_vars = { 'PORT': str(backend_port), 'REGION': config.DEFAULT_REGION, 'ACCOUNT_ID': TEST_AWS_ACCOUNT_ID } return do_run(kms_binary, asynchronous, env_vars=env_vars)
def start_dynamodb(port=None, asynchronous=False, update_listener=None): global PORT_DYNAMODB_BACKEND PORT_DYNAMODB_BACKEND = get_free_tcp_port() port = port or config.PORT_DYNAMODB install.install_dynamodb_local() ddb_data_dir_param = '-inMemory' if config.DATA_DIR: ddb_data_dir = '%s/dynamodb' % config.DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ('cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-Xmx%s -jar DynamoDBLocal.jar -sharedDb -port %s %s') % ( ROOT_PATH, config.DYNAMODB_HEAP_SIZE, PORT_DYNAMODB_BACKEND, ddb_data_dir_param) print('Starting mock DynamoDB service on %s ...' % edge_ports_info()) start_proxy_for_service('dynamodb', port, backend_port=PORT_DYNAMODB_BACKEND, update_listener=update_listener) return do_run(cmd, asynchronous)
def start_stepfunctions(port=PORT_STEPFUNCTIONS, asynchronous=False, update_listener=None): install.install_stepfunctions_local() backend_port = DEFAULT_PORT_STEPFUNCTIONS_BACKEND # TODO: local port is currently hard coded in Step Functions Local :/ backend_port = 8083 lambda_endpoint = aws_stack.get_local_service_url('lambda') dynamodb_endpoint = aws_stack.get_local_service_url('dynamodb') sns_endpoint = aws_stack.get_local_service_url('sns') sqs_endpoint = aws_stack.get_local_service_url('sqs') cmd = ('cd %s; java -Dcom.amazonaws.sdk.disableCertChecking -jar StepFunctionsLocal.jar ' '--lambda-endpoint %s --dynamodb-endpoint %s --sns-endpoint %s ' '--sqs-endpoint %s --aws-region %s --aws-account %s') % ( install.INSTALL_DIR_STEPFUNCTIONS, lambda_endpoint, dynamodb_endpoint, sns_endpoint, sqs_endpoint, DEFAULT_REGION, TEST_AWS_ACCOUNT_ID) print('Starting mock StepFunctions (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('stepfunctions', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kinesis(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_KINESIS install.install_kinesalite() backend_port = get_free_tcp_port() latency = config.KINESIS_LATENCY kinesis_data_dir_param = '' if config.DATA_DIR: kinesis_data_dir = '%s/kinesis' % config.DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = '--path %s' % kinesis_data_dir cmd = ('%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s' ' --createStreamMs %s --deleteStreamMs %s --updateStreamMs %s %s' ) % (ROOT_PATH, config.KINESIS_SHARD_LIMIT, backend_port, latency, latency, latency, kinesis_data_dir_param) log_startup_message('Kinesis') start_proxy_for_service('kinesis', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kinesis(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_KINESIS install.install_kinesalite() backend_port = DEFAULT_PORT_KINESIS_BACKEND latency = config.KINESIS_LATENCY kinesis_data_dir_param = '' if config.DATA_DIR: kinesis_data_dir = '%s/kinesis' % config.DATA_DIR mkdir(kinesis_data_dir) kinesis_data_dir_param = '--path %s' % kinesis_data_dir cmd = ('%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s' ' --createStreamMs %s --deleteStreamMs %s --updateStreamMs %s %s' ) % (ROOT_PATH, config.KINESIS_SHARD_LIMIT, backend_port, latency, latency, latency, kinesis_data_dir_param) print('Starting mock Kinesis (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('kinesis', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kms(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = get_free_tcp_port() kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) log_startup_message('KMS') start_proxy_for_service('kms', port, backend_port, update_listener) env_vars = { 'PORT': str(backend_port), 'KMS_REGION': config.DEFAULT_REGION, 'REGION': config.DEFAULT_REGION, 'KMS_ACCOUNT_ID': TEST_AWS_ACCOUNT_ID, 'ACCOUNT_ID': TEST_AWS_ACCOUNT_ID } if config.DATA_DIR: env_vars['KMS_DATA_PATH'] = config.DATA_DIR result = do_run(kms_binary, asynchronous, env_vars=env_vars) wait_for_port_open(backend_port) return result
def start_kms_local(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = get_free_tcp_port() kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace("<arch>", get_os()) log_startup_message("KMS") start_proxy_for_service("kms", port, backend_port, update_listener) env_vars = { "PORT": str(backend_port), "KMS_REGION": config.DEFAULT_REGION, "REGION": config.DEFAULT_REGION, "KMS_ACCOUNT_ID": TEST_AWS_ACCOUNT_ID, "ACCOUNT_ID": TEST_AWS_ACCOUNT_ID, } if config.dirs.data: env_vars["KMS_DATA_PATH"] = config.dirs.data result = do_run(kms_binary, asynchronous, env_vars=env_vars) wait_for_port_open(backend_port) return result
def start_dynamodb(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_DYNAMODB install.install_dynamodb_local() backend_port = DEFAULT_PORT_DYNAMODB_BACKEND ddb_data_dir_param = '-inMemory' if config.DATA_DIR: ddb_data_dir = '%s/dynamodb' % config.DATA_DIR mkdir(ddb_data_dir) ddb_data_dir_param = '-dbPath %s' % ddb_data_dir cmd = ( 'cd %s/infra/dynamodb/; java -Djava.library.path=./DynamoDBLocal_lib ' + '-Xmx%s -jar DynamoDBLocal.jar -sharedDb -port %s %s') % ( ROOT_PATH, MAX_HEAP_SIZE, backend_port, ddb_data_dir_param) print( 'Starting mock DynamoDB service in %s ports %s (recommended) and %s (deprecated)...' % (get_service_protocol(), config.EDGE_PORT, port)) start_proxy_for_service('dynamodb', port, backend_port, update_listener) return do_run(cmd, asynchronous)
def start_kms(port=None, backend_port=None, asynchronous=None, update_listener=None): port = port or config.PORT_KMS backend_port = get_free_tcp_port() kms_binary = INSTALL_PATH_KMS_BINARY_PATTERN.replace('<arch>', get_arch()) print('Starting mock KMS service on %s ...' % edge_ports_info()) start_proxy_for_service('kms', port, backend_port, update_listener) env_vars = { 'PORT': str(backend_port), 'KMS_REGION': config.DEFAULT_REGION, 'REGION': config.DEFAULT_REGION, 'KMS_ACCOUNT_ID': TEST_AWS_ACCOUNT_ID, 'ACCOUNT_ID': TEST_AWS_ACCOUNT_ID } result = do_run(kms_binary, asynchronous, env_vars=env_vars) wait_for_port_open(backend_port) return result
def start_stepfunctions(asynchronous=True): # TODO: introduce Server abstraction for StepFunctions process global PROCESS_THREAD backend_port = config.LOCAL_PORT_STEPFUNCTIONS install.install_stepfunctions_local() cmd = get_command(backend_port) log_startup_message("StepFunctions") # TODO: change ports in stepfunctions.jar, then update here PROCESS_THREAD = do_run( cmd, asynchronous, strip_color=True, env_vars={ "EDGE_PORT": config.EDGE_PORT_HTTP or config.EDGE_PORT, "EDGE_PORT_HTTP": config.EDGE_PORT_HTTP or config.EDGE_PORT, "DATA_DIR": config.DATA_DIR, }, ) return PROCESS_THREAD
def start_stepfunctions(port=None, asynchronous=False, update_listener=None): port = port or config.service_port("stepfunctions") backend_port = config.LOCAL_PORT_STEPFUNCTIONS install.install_stepfunctions_local() cmd = get_command(backend_port) log_startup_message("StepFunctions") start_proxy_for_service("stepfunctions", port, backend_port, update_listener) global PROCESS_THREAD # TODO: change ports in stepfunctions.jar, then update here PROCESS_THREAD = do_run( cmd, asynchronous, strip_color=True, env_vars={ "EDGE_PORT": config.EDGE_PORT_HTTP or config.EDGE_PORT, "EDGE_PORT_HTTP": config.EDGE_PORT_HTTP or config.EDGE_PORT, "DATA_DIR": config.DATA_DIR, }, ) return PROCESS_THREAD
LOGGER = logging.getLogger(__name__) def start_sqs(port=PORT_SQS, async=False, update_listener=None): install_elasticmq() backend_port = DEFAULT_PORT_SQS_BACKEND # create config file config = """ include classpath("application.conf") node-address { protocol = http host = "%s" port = %s context-path = "" } rest-sqs { enabled = true bind-port = %s bind-hostname = "0.0.0.0" sqs-limits = strict } """ % (LOCALSTACK_HOSTNAME, port, backend_port) config_file = os.path.join(TMP_FOLDER, 'sqs.%s.conf' % short_uid()) TMP_FILES.append(config_file) save_file(config_file, config) # start process cmd = ('java -Dconfig.file=%s -jar %s/elasticmq-server.jar' % (config_file, INSTALL_DIR_ELASTICMQ)) print('Starting mock SQS (%s port %s)...' % (get_service_protocol(), port)) start_proxy_for_service('sqs', port, backend_port, update_listener) return do_run(cmd, async)