def start_proxy(port, backend_port, update_listener, quiet=False, backend_host=DEFAULT_BACKEND_HOST): proxy_thread = GenericProxy(port=port, forward_host='%s:%s' % (backend_host, backend_port), update_listener=update_listener, quiet=quiet) proxy_thread.start() TMP_THREADS.append(proxy_thread)
def serve(port, quiet=True): if quiet: log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) ssl_context = GenericProxy.get_flask_ssl_context() app.run(port=int(port), threaded=True, host='0.0.0.0', ssl_context=ssl_context)
def test_api_gateway_http_integration(): test_port = 12123 backend_url = 'http://localhost:%s%s' % (test_port, API_PATH_HTTP_BACKEND) # create target HTTP backend def listener(**kwargs): response = Response() response.status_code = 200 response._content = json.dumps( kwargs['data']) if kwargs['data'] else '{}' return response proxy = GenericProxy(test_port, update_listener=listener) proxy.start() # create API Gateway and connect it to the HTTP backend result = connect_api_gateway_to_http('test_gateway2', backend_url, path=API_PATH_HTTP_BACKEND) # make test request to gateway url = INBOUND_GATEWAY_URL_PATTERN.format(api_id=result['id'], stage_name=TEST_STAGE_NAME, path=API_PATH_HTTP_BACKEND) result = requests.get(url) assert result.status_code == 200 assert to_str(result.content) == '{}' data = {"data": 123} result = requests.post(url, data=json.dumps(data)) assert result.status_code == 200 assert json.loads(to_str(result.content)) == data # clean up proxy.stop()
def start_proxy(port, backend_port, update_listener): proxy_thread = GenericProxy(port=port, forward_host='127.0.0.1:%s' % backend_port, update_listener=update_listener) proxy_thread.start() TMP_THREADS.append(proxy_thread)
t.start() TMP_THREADS.append(t) return t else: return run(cmd) def start_dynalite(port=DEFAULT_PORT_DYNAMODB, async=False, update_listener=None): backend_port = DEFAULT_PORT_DYNAMODB_BACKEND cmd = '%s/node_modules/dynalite/cli.js --port %s' % (root_path, backend_port) print("Starting mock DynamoDB (port %s)..." % port) proxy_thread = GenericProxy(port=port, forward_host='127.0.0.1:%s' % backend_port, update_listener=update_listener) proxy_thread.start() TMP_THREADS.append(proxy_thread) return do_run(cmd, async) def start_kinesalite(port=DEFAULT_PORT_KINESIS, async=False, shard_limit=100, update_listener=None): backend_port = DEFAULT_PORT_KINESIS_BACKEND cmd = ('%s/node_modules/kinesalite/cli.js --shardLimit %s --port %s' % (root_path, shard_limit, backend_port)) print("Starting mock Kinesis (port %s)..." % port) proxy_thread = GenericProxy(port=port,
install.install_elasticsearch() backend_port = DEFAULT_PORT_ELASTICSEARCH_BACKEND es_data_dir = '%s/infra/elasticsearch/data' % (ROOT_PATH) if DATA_DIR: es_data_dir = '%s/elasticsearch' % DATA_DIR # Elasticsearch 5.x cannot be bound to 0.0.0.0 in some Docker environments, # hence we use the default bind address 127.0.0.0 and put a proxy in front of it cmd = (( 'ES_JAVA_OPTS=\"$ES_JAVA_OPTS -Xms200m -Xmx500m\" %s/infra/elasticsearch/bin/elasticsearch ' + '-E http.port=%s -E http.publish_port=%s -E http.compression=false -E path.data=%s ' + '-E xpack.security.enabled=false') % (ROOT_PATH, backend_port, backend_port, es_data_dir)) if USE_SSL: # make sure we have a test cert generated and configured GenericProxy.create_ssl_cert() cmd += ((' -E xpack.ssl.key=%s.key -E xpack.ssl.certificate=%s.crt ' + '-E xpack.security.transport.ssl.enabled=true ' + '-E xpack.security.http.ssl.enabled=true') % (SERVER_CERT_PEM_FILE, SERVER_CERT_PEM_FILE)) print("Starting local Elasticsearch (%s port %s)..." % (get_service_protocol(), port)) if delete_data: run('rm -rf %s' % es_data_dir) # fix permissions run('chmod -R 777 %s/infra/elasticsearch' % ROOT_PATH) run('mkdir -p "%s"; chmod -R 777 "%s"' % (es_data_dir, es_data_dir)) # start proxy and ES process start_proxy(port, backend_port, update_listener,