def fix_error_codes(method, data, response): regex = r"<Errors>\s*(<Error>(\s|.)*</Error>)\s*</Errors>" if method == "POST" and "Action=CreateRole" in to_str( data) and response.status_code >= 400: content = to_str(response.content) # remove the <Errors> wrapper element, as this breaks AWS Java SDKs (issue #2231) response._content = re.sub(regex, r"\1", content, flags=REGEX_FLAGS)
def return_response(self, method, path, data, headers, response): res = super(PersistingProxyListener, self).return_response(method, path, data, headers, response) if self.should_persist(method, path, data, headers, response): record(self.api_name(), to_str(method), to_str(path), data, headers, response) return res
def booleans_to_lowercase(response, tag_names): for tag_name in tag_names: regex_true = r"<{tag}>\s*True\s*</{tag}>".format(tag=tag_name) replace_true = r"<{tag}>true</{tag}>".format(tag=tag_name) response._content = re.sub(regex_true, replace_true, to_str(response.content), flags=REGEX_FLAGS) regex_false = r"<{tag}>\s*False\s*</{tag}>".format(tag=tag_name) replace_false = r"<{tag}>false</{tag}>".format(tag=tag_name) response._content = re.sub(regex_false, replace_false, to_str(response.content), flags=REGEX_FLAGS)
def _run_async_cmd( self, cmd: List[str], stdin: bytes, container_name: str, image_name=None ) -> Tuple[bytes, bytes]: kwargs = { "inherit_env": True, "asynchronous": True, "stderr": subprocess.PIPE, "outfile": self.default_run_outfile or subprocess.PIPE, } if stdin: kwargs["stdin"] = True try: process = run(cmd, **kwargs) stdout, stderr = process.communicate(input=stdin) if process.returncode != 0: raise subprocess.CalledProcessError( process.returncode, cmd, stdout, stderr, ) else: return stdout, stderr except subprocess.CalledProcessError as e: stderr_str = to_str(e.stderr) if "Unable to find image" in stderr_str: raise NoSuchImage(image_name or "", stdout=e.stdout, stderr=e.stderr) if "No such container" in stderr_str: raise NoSuchContainer(container_name, stdout=e.stdout, stderr=e.stderr) raise ContainerException( "Docker process returned with errorcode %s" % e.returncode, e.stdout, e.stderr )
def __init__( self, method: str = "GET", path: str = "", headers: Union[Mapping, Headers] = None, body: Union[bytes, str] = None, scheme: str = "http", root_path: str = "/", query_string: Union[bytes, str] = b"", remote_addr: str = None, server: Optional[Tuple[str, Optional[int]]] = None, raw_path: str = None, ): # decode query string if necessary (latin-1 is what werkzeug would expect) query_string = strings.to_str(query_string, "latin-1") # create the WSGIEnvironment dictionary that represents this request environ = dummy_wsgi_environment( method=method, path=path, headers=headers, body=body, scheme=scheme, root_path=root_path, query_string=query_string, remote_addr=remote_addr, server=server, raw_uri=raw_path, ) super(Request, self).__init__(environ) # werkzeug normally provides read-only access to headers set in the WSGIEnvironment through the EnvironHeaders # class, this makes them mutable. self.headers = Headers(self.headers)
def data_as_string(self) -> Union[str, bytes]: try: return ( json.dumps(self.data) if isinstance(self.data, (dict, list)) else to_str(self.data) ) except UnicodeDecodeError: return base64.b64encode(self.data)
def push_image(self, docker_image: str) -> None: cmd = self._docker_cmd() cmd += ["push", docker_image] LOG.debug("Pushing image with cmd: %s", cmd) try: run(cmd) except subprocess.CalledProcessError as e: if "is denied" in to_str(e.stdout): raise AccessDenied(docker_image) if "does not exist" in to_str(e.stdout): raise NoSuchImage(docker_image) if "connection refused" in to_str(e.stdout): raise RegistryConnectionError(e.stdout) raise ContainerException( f"Docker process returned with errorcode {e.returncode}", e.stdout, e.stderr) from e
def import_rest_api( self, context: RequestContext, body: Blob, fail_on_warnings: Boolean = None, parameters: MapOfStringToString = None, ) -> RestApi: openapi_spec = parse_json_or_yaml(to_str(body)) response = _call_moto( context, "CreateRestApi", CreateRestApiRequest(name=openapi_spec.get("info").get("title")), ) return _call_moto( context, "PutRestApi", PutRestApiRequest( restApiId=response.get("id"), failOnWarnings=str_to_bool(fail_on_warnings) or False, parameters=parameters or {}, body=body, ), )
def connect_container_to_network(self, network_name: str, container_name_or_id: str, aliases: Optional[List] = None) -> None: LOG.debug( "Connecting container '%s' to network '%s' with aliases '%s'", container_name_or_id, network_name, aliases, ) cmd = self._docker_cmd() cmd += ["network", "connect"] if aliases: cmd += ["--alias", ",".join(aliases)] cmd += [network_name, container_name_or_id] try: run(cmd) except subprocess.CalledProcessError as e: stdout_str = to_str(e.stdout) if re.match(r".*network (.*) not found.*", stdout_str): raise NoSuchNetwork(network_name=network_name) elif "No such container" in stdout_str: raise NoSuchContainer(container_name_or_id, stdout=e.stdout, stderr=e.stderr) else: raise ContainerException( "Docker process returned with errorcode %s" % e.returncode, e.stdout, e.stderr)
def send_raw_email( self, context: RequestContext, raw_message: RawMessage, source: Address = None, destinations: AddressList = None, from_arn: AmazonResourceName = None, source_arn: AmazonResourceName = None, return_path_arn: AmazonResourceName = None, tags: MessageTagList = None, configuration_set_name: ConfigurationSetName = None, ) -> SendRawEmailResponse: raw_data = to_str(raw_message["Data"]) if source is None or not source.strip(): LOGGER.debug("Raw email:\n%s\nEOT", raw_data) source = self.get_source_from_raw(raw_data) if not source: LOGGER.warning("Source not specified. Rejecting message.") raise MessageRejected() message = ses_backend.send_raw_email(source, destinations, raw_data, context.region) save_for_retrospection(message.id, context.region, Source=source, Destination=destinations, RawData=raw_data) return SendRawEmailResponse(MessageId=message.id)
def get_template_body(req_data): body = req_data.get("TemplateBody") if body: return body url = req_data.get("TemplateURL") if url: response = run_safe(lambda: safe_requests.get(url, verify=False)) # check error codes, and code 301 - fixes https://github.com/localstack/localstack/issues/1884 status_code = 0 if response is None else response.status_code if response is None or status_code == 301 or status_code >= 400: # check if this is an S3 URL, then get the file directly from there url = convert_s3_to_local_url(url) if is_local_service_url(url): parsed_path = urlparse(url).path.lstrip("/") parts = parsed_path.partition("/") client = aws_stack.connect_to_service("s3") LOG.debug( "Download CloudFormation template content from local S3: %s - %s", parts[0], parts[2], ) result = client.get_object(Bucket=parts[0], Key=parts[2]) body = to_str(result["Body"].read()) return body raise Exception( "Unable to fetch template body (code %s) from URL %s" % (status_code, url)) return response.content raise Exception("Unable to get template body from input: %s" % req_data)
def fix_account_id_in_arns(response, colon_delimiter=":", existing=None, replace=None): """Fix the account ID in the ARNs returned in the given Flask response or string""" existing = existing or [ "123456789", "1234567890", "123456789012", MOTO_ACCOUNT_ID ] existing = existing if isinstance(existing, list) else [existing] replace = replace or TEST_AWS_ACCOUNT_ID is_str_obj = is_string_or_bytes(response) content = to_str(response if is_str_obj else response._content) replace = r"arn{col}aws{col}\1{col}\2{col}{acc}{col}".format( col=colon_delimiter, acc=replace) for acc_id in existing: regex = r"arn{col}aws{col}([^:%]+){col}([^:%]*){col}{acc}{col}".format( col=colon_delimiter, acc=acc_id) content = re.sub(regex, replace, content) if not is_str_obj: response._content = content response.headers["Content-Length"] = len(response._content) return response return content
def _adjust_partition(self, source, static_partition: str = None): # Call this function recursively if we get a dictionary or a list if isinstance(source, dict): result = {} for k, v in source.items(): result[k] = self._adjust_partition(v, static_partition) return result if isinstance(source, list): result = [] for v in source: result.append(self._adjust_partition(v, static_partition)) return result elif isinstance(source, bytes): try: decoded = unquote(to_str(source)) adjusted = self._adjust_partition(decoded, static_partition) return to_bytes(adjusted) except UnicodeDecodeError: # If the body can't be decoded to a string, we return the initial source return source elif not isinstance(source, str): # Ignore any other types return source return self.arn_regex.sub( lambda m: self._adjust_match(m, static_partition), source)
def on_get(self, request): from localstack.utils.aws.aws_stack import get_valid_regions path = request.path data = request.data headers = request.headers deploy_html_file = os.path.join( constants.MODULE_MAIN_PATH, "services", "cloudformation", "deploy.html" ) deploy_html = load_file(deploy_html_file) req_params = parse_request_data("GET", path, data, headers) params = { "stackName": "stack1", "templateBody": "{}", "errorMessage": "''", "regions": json.dumps(sorted(list(get_valid_regions()))), } download_url = req_params.get("templateURL") if download_url: try: LOG.debug("Attempting to download CloudFormation template URL: %s", download_url) template_body = to_str(requests.get(download_url).content) template_body = parse_json_or_yaml(template_body) params["templateBody"] = json.dumps(template_body) except Exception as e: msg = f"Unable to download CloudFormation template URL: {e}" LOG.info(msg) params["errorMessage"] = json.dumps(msg.replace("\n", " - ")) # using simple string replacement here, for simplicity (could be replaced with, e.g., jinja) for key, value in params.items(): deploy_html = deploy_html.replace(f"<{key}>", value) return deploy_html
def fix_xml_empty_boolean(response, tag_names): for tag_name in tag_names: regex = r"<{tag}>\s*([Nn]one|null)\s*</{tag}>".format(tag=tag_name) replace = r"<{tag}>false</{tag}>".format(tag=tag_name) response._content = re.sub(regex, replace, to_str(response.content), flags=REGEX_FLAGS)
def push_image(self, docker_image: str) -> None: LOG.debug("Pushing Docker image: %s", docker_image) try: result = self.client().images.push(docker_image) # some SDK clients (e.g., 5.0.0) seem to return an error string, instead of raising if isinstance(result, (str, bytes)) and '"errorDetail"' in to_str(result): if "image does not exist locally" in to_str(result): raise NoSuchImage(docker_image) if "is denied" in to_str(result): raise AccessDenied(docker_image) if "connection refused" in to_str(result): raise RegistryConnectionError(result) raise ContainerException(result) except ImageNotFound: raise NoSuchImage(docker_image) except APIError as e: raise ContainerException() from e
def _tail(self, file): p = subprocess.Popen(["tail", "-f", file], stdout=subprocess.PIPE) while True: line = p.stdout.readline() if not line: break line = to_str(line) yield line.replace("\n", "")
def is_data_base64_encoded(self): try: json.dumps(self.data) if isinstance(self.data, (dict, list)) else to_str(self.data) return False except UnicodeDecodeError: return True
def get_response_payload(response, as_json=False): result = (response.content if isinstance(response, RequestsResponse) else response.data if isinstance(response, FlaskResponse) else None) result = "" if result is None else result if as_json: result = result or "{}" result = json.loads(to_str(result)) return result
def get_recordable_data(request_data): if request_data or request_data in ["", b""]: try: request_data = to_bytes(request_data) except Exception as ex: LOG.warning("Unable to call to_bytes: %s", ex) request_data = to_str(base64.b64encode(request_data)) return request_data
def auth_keys_from_connection(connection: Dict): headers = {} auth_type = connection.get("AuthorizationType").upper() auth_parameters = connection.get("AuthParameters") if auth_type == AUTH_BASIC: basic_auth_parameters = auth_parameters.get("BasicAuthParameters", {}) username = basic_auth_parameters.get("Username", "") password = basic_auth_parameters.get("Password", "") auth = "Basic " + to_str( base64.b64encode("{}:{}".format(username, password).encode("ascii"))) headers.update({"authorization": auth}) if auth_type == AUTH_API_KEY: api_key_parameters = auth_parameters.get("ApiKeyAuthParameters", {}) api_key_name = api_key_parameters.get("ApiKeyName", "") api_key_value = api_key_parameters.get("ApiKeyValue", "") headers.update({api_key_name: api_key_value}) if auth_type == AUTH_OAUTH: oauth_parameters = auth_parameters.get("OAuthParameters", {}) oauth_method = oauth_parameters.get("HttpMethod") oauth_http_parameters = oauth_parameters.get("OAuthHttpParameters", {}) oauth_endpoint = oauth_parameters.get("AuthorizationEndpoint", "") query_object = list_of_parameters_to_object( oauth_http_parameters.get("QueryStringParameters", [])) oauth_endpoint = add_query_params_to_url(oauth_endpoint, query_object) client_parameters = oauth_parameters.get("ClientParameters", {}) client_id = client_parameters.get("ClientID", "") client_secret = client_parameters.get("ClientSecret", "") oauth_body = list_of_parameters_to_object( oauth_http_parameters.get("BodyParameters", [])) oauth_body.update({ "client_id": client_id, "client_secret": client_secret }) oauth_header = list_of_parameters_to_object( oauth_http_parameters.get("HeaderParameters", [])) oauth_result = requests.request( method=oauth_method, url=oauth_endpoint, data=json.dumps(oauth_body), headers=oauth_header, ) oauth_data = json.loads(oauth_result.text) token_type = oauth_data.get("token_type", "") access_token = oauth_data.get("access_token", "") auth_header = "{} {}".format(token_type, access_token) headers.update({"authorization": auth_header}) return headers
def dynamodb_table_exists(table_name, client=None): client = client or connect_to_service("dynamodb") paginator = client.get_paginator("list_tables") pages = paginator.paginate(PaginationConfig={"PageSize": 100}) for page in pages: table_names = page["TableNames"] if to_str(table_name) in table_names: return True return False
def _handle_test_invoke_method(self, invocation_context): # if call is from test_invoke_api then use http_method to find the integration, # as test_invoke_api makes a POST call to request the test invocation match = re.match(PATH_REGEX_TEST_INVOKE_API, invocation_context.path) invocation_context.method = match[3] if data := parse_json_or_yaml(to_str(invocation_context.data or b"")): orig_data = data if path_with_query_string := orig_data.get("pathWithQueryString", None): invocation_context.path_with_query_string = path_with_query_string
def download_s3_object(s3, bucket, path): with tempfile.SpooledTemporaryFile() as tmpfile: s3.Bucket(bucket).download_fileobj(path, tmpfile) tmpfile.seek(0) result = tmpfile.read() try: result = to_str(result) except Exception: pass return result
def invoke(self, invocation_event: "QueuedInvocation") -> None: with self.status_lock: if self.status != RuntimeStatus.READY: raise InvalidStatusException("Invoke can only happen if status is ready") self.status = RuntimeStatus.RUNNING invoke_payload = { "invoke-id": invocation_event.invocation_id, "payload": to_str(invocation_event.invocation.payload), } self.runtime_executor.invoke(payload=invoke_payload)
def store_cloudwatch_logs( log_group_name, log_stream_name, log_output, start_time=None, auto_create_group: Optional[bool] = True, ): if not is_api_enabled("logs"): return start_time = start_time or int(time.time() * 1000) logs_client = aws_stack.connect_to_service("logs") log_output = to_str(log_output) if auto_create_group: # make sure that the log group exists, create it if not log_groups = logs_client.describe_log_groups()["logGroups"] log_groups = [lg["logGroupName"] for lg in log_groups] if log_group_name not in log_groups: try: logs_client.create_log_group(logGroupName=log_group_name) except Exception as e: if "ResourceAlreadyExistsException" in str(e): # this can happen in certain cases, possibly due to a race condition pass else: raise e # create a new log stream for this lambda invocation try: logs_client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name) except Exception: # TODO: narrow down pass # store new log events under the log stream finish_time = int(time.time() * 1000) # fix for log lines that were merged into a singe line, e.g., "log line 1 ... \x1b[32mEND RequestId ..." log_output = log_output.replace("\\x1b", "\n\\x1b") log_output = log_output.replace("\x1b", "\n\x1b") log_lines = log_output.split("\n") time_diff_per_line = float(finish_time - start_time) / float( len(log_lines)) log_events = [] for i, line in enumerate(log_lines): if not line: continue # simple heuristic: assume log lines were emitted in regular intervals log_time = start_time + float(i) * time_diff_per_line event = {"timestamp": int(log_time), "message": line} log_events.append(event) if not log_events: return logs_client.put_log_events(logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=log_events)
def test_expiration_date_format(self): url = config.get_edge_url() data = {"Action": "GetSessionToken", "Version": "2011-06-15"} headers = aws_stack.mock_aws_request_headers("sts") headers["Accept"] = APPLICATION_JSON response = requests.post(url, data=data, headers=headers) assert response content = json.loads(to_str(response.content)) # Expiration field should be numeric (tested against AWS) result = content["GetSessionTokenResponse"]["GetSessionTokenResult"] assert is_number(result["Credentials"]["Expiration"])
def extract_version_and_action(path: str, data_bytes: bytes) -> Tuple[str, str]: """Extract Version=... and Action=... info from request path and/or data bytes.""" result = {} candidates = ( ("version", REGEX_VERSION, REGEXB_VERSION), ("action", REGEX_ACTION, REGEXB_ACTION), ) for attr, regex, regexb in candidates: match = regex.match(path) if match: result[attr] = match.group(2) else: match = regexb.match(data_bytes) if match: result[attr] = match.group(2) version = to_str(result.get("version") or "") or None action = to_str(result.get("action") or "") or None return version, action
def tag_image(self, source_ref: str, target_name: str) -> None: cmd = self._docker_cmd() cmd += ["tag", source_ref, target_name] LOG.debug("Tagging Docker image %s as %s", source_ref, target_name) try: run(cmd) except subprocess.CalledProcessError as e: if "No such image" in to_str(e.stdout): raise NoSuchImage(source_ref) raise ContainerException( f"Docker process returned with error code {e.returncode}", e.stdout, e.stderr) from e
def pull_image(self, docker_image: str) -> None: cmd = self._docker_cmd() cmd += ["pull", docker_image] LOG.debug("Pulling image with cmd: %s", cmd) try: run(cmd) except subprocess.CalledProcessError as e: if "pull access denied" in to_str(e.stdout): raise NoSuchImage(docker_image) raise ContainerException( "Docker process returned with errorcode %s" % e.returncode, e.stdout, e.stderr)