def _cleanup_changesets(changesets: List[str]) -> None: changesets = ensure_list(changesets) for cs in changesets: try: cfn_client.delete_change_set(ChangeSetName=cs) except Exception: LOG.debug(f"Failed to cleanup changeset '{cs}'")
def _process_sent_message(path: str, req_data: Dict[str, str], headers: Dict, response: Response): """Extract one or multiple messages sent via SendMessage/SendMessageBatch from the given request/response data and forward them to the Lambda EventSourceListener for further processing""" queue_url = _queue_url(path, req_data, headers) action = req_data.get("Action") # extract data from XML response - assume data is wrapped in 2 parent elements response_data = xmltodict.parse(response.content) messages = [] if action == "SendMessage": response_data = response_data["SendMessageResponse"][ "SendMessageResult"] message = clone(req_data) message.update(response_data) messages.append(message) elif action == "SendMessageBatch": response_data = response_data["SendMessageBatchResponse"][ "SendMessageBatchResult"] messages = parse_urlencoded_data(req_data, "SendMessageBatchRequestEntry") # Note: only forwarding messages from 'Successful', not from 'Failed' list entries = response_data.get("SendMessageBatchResultEntry") or [] entries = ensure_list(entries) for successful in entries: msg = [m for m in messages if m["Id"] == successful["Id"]][0] msg.update(successful) event = { "QueueUrl": queue_url, "Messages": messages, } EventSourceListener.process_event_via_listener("sqs", event)
def _cleanup_stacks(stacks: List[str]) -> None: stacks = ensure_list(stacks) for stack in stacks: try: cfn_client.delete_stack(StackName=stack) except Exception: LOG.debug(f"Failed to cleanup stack '{stack}'")
def delete_all_s3_objects(buckets): s3_client = aws_stack.connect_to_service("s3") buckets = ensure_list(buckets) for bucket in buckets: keys = all_s3_object_keys(bucket) deletes = [{"Key": key} for key in keys] if deletes: s3_client.delete_objects(Bucket=bucket, Delete={"Objects": deletes})
def _post_create(resource_id, resources, resource_type, func, stack_name): """attaches managed policies from the template to the role""" from localstack.utils.cloudformation.template_deployer import ( find_stack, resolve_refs_recursively, ) iam = aws_stack.connect_to_service("iam") resource = resources[resource_id] props = resource["Properties"] role_name = props["RoleName"] # attach managed policies policy_arns = props.get("ManagedPolicyArns", []) for arn in policy_arns: iam.attach_role_policy(RoleName=role_name, PolicyArn=arn) # TODO: to be removed once we change the method signature to pass in the stack object directly! stack = find_stack(stack_name) # add inline policies inline_policies = props.get("Policies", []) for policy in inline_policies: assert not isinstance( policy, list) # remove if this doesn't make any problems for a while if policy == PLACEHOLDER_AWS_NO_VALUE: continue if not isinstance(policy, dict): LOG.info('Invalid format of policy for IAM role "%s": %s', props.get("RoleName"), policy) continue pol_name = policy.get("PolicyName") # get policy document - make sure we're resolving references in the policy doc doc = dict(policy["PolicyDocument"]) doc = resolve_refs_recursively(stack, doc) doc["Version"] = doc.get("Version") or IAM_POLICY_VERSION statements = ensure_list(doc["Statement"]) for statement in statements: if isinstance(statement.get("Resource"), list): # filter out empty resource strings statement["Resource"] = [ r for r in statement["Resource"] if r ] doc = json.dumps(doc) iam.put_role_policy( RoleName=props["RoleName"], PolicyName=pol_name, PolicyDocument=doc, )
def _post_create(resource_id, resources, resource_type, func, stack_name): """attaches managed policies from the template to the role""" iam = aws_stack.connect_to_service("iam") resource = resources[resource_id] props = resource["Properties"] role_name = props["RoleName"] # attach managed policies policy_arns = props.get("ManagedPolicyArns", []) for arn in policy_arns: iam.attach_role_policy(RoleName=role_name, PolicyArn=arn) # add inline policies inline_policies = props.get("Policies", []) for policy in inline_policies: assert not isinstance( policy, list ) # remove if this doesn't make any problems for a while if policy == PLACEHOLDER_AWS_NO_VALUE: continue if not isinstance(policy, dict): LOG.info('Invalid format of policy for IAM role "%s": %s' % (props.get("RoleName"), policy)) continue pol_name = policy.get("PolicyName") doc = dict(policy["PolicyDocument"]) doc["Version"] = doc.get("Version") or IAM_POLICY_VERSION statements = ensure_list(doc["Statement"]) for statement in statements: if isinstance(statement.get("Resource"), list): # filter out empty resource strings statement["Resource"] = [ r for r in statement["Resource"] if r ] doc = json.dumps(doc) iam.put_role_policy( RoleName=props["RoleName"], PolicyName=pol_name, PolicyDocument=doc, )
def map_all_s3_objects(to_json: bool = True, buckets: List[str] = None) -> Dict[str, Any]: s3_client = aws_stack.connect_to_resource("s3") result = {} buckets = ensure_list(buckets) buckets = [s3_client.Bucket(b) for b in buckets] if buckets else s3_client.buckets.all() for bucket in buckets: for key in bucket.objects.all(): value = download_s3_object(s3_client, key.bucket_name, key.key) try: if to_json: value = json.loads(value) key = "%s%s%s" % ( key.bucket_name, "" if key.key.startswith("/") else "/", key.key, ) result[key] = value except Exception: # skip non-JSON or binary objects pass return result