def init_horey_cached_type(self, attr_name, value): """ Init automatically cached values @param attr_name: @param value: {self.SELF_CACHED_TYPE_KEY_NAME: datetime/region/ip..., "value": value_to_init} @return: """ if value.get(self.SELF_CACHED_TYPE_KEY_NAME) == "datetime": # Example: datetime.datetime.strptime('2017-07-26 15:54:10.000000+0000', '%Y-%m-%d %H:%M:%S.%f%z') new_value = datetime.datetime.strptime(value["value"], "%Y-%m-%d %H:%M:%S.%f%z") elif value.get(self.SELF_CACHED_TYPE_KEY_NAME) == "ip": new_value = IP(value["value"], from_dict=True) elif value.get(self.SELF_CACHED_TYPE_KEY_NAME) == "region": inited_region = Region() inited_region.init_from_dict(value["value"]) new_value = Region.get_region(inited_region.region_mark) if inited_region.region_name is not None: if new_value.region_name is not None: if new_value.region_name != inited_region.region_name: raise ValueError(f"{new_value.region_name} != {inited_region.region_name}") else: new_value.region_name = inited_region.region_name else: raise ValueError(f"{attr_name} : {value}") self.init_default_attr(attr_name, new_value)
def test_provision_s3_bucket(): region = Region.get_region("us-west-2") s3_client = S3Client() s3_bucket = S3Bucket({}) s3_bucket.region = region s3_bucket.name = TEST_BUCKET_NAME s3_bucket.acl = "private" s3_bucket.policy = S3Bucket.Policy({}) s3_bucket.policy.version = "2012-10-17" s3_bucket.policy.statement = [{ "Sid": "AllowReadAny", "Effect": "Allow", "Principal": "*", "Action": "s3:GetObject", "Resource": f"arn:aws:s3:::{s3_bucket.name}/*" }] s3_client.provision_bucket(s3_bucket)
def __init__(self, dict_src): self.aws_access_key_id = None self.aws_secret_access_key = None self.profile_name = None self.role_arn = None self.region = None self.type = None self.external_id = None if "region_mark" in dict_src: self.region = Region() self.region.region_mark = dict_src["region_mark"] if "credentials" in dict_src: raise NotImplementedError() if "profile" in dict_src: self.type = AWSAccount.ConnectionStep.Type.PROFILE self.profile_name = dict_src["profile"] elif "assume_role" in dict_src: self.type = AWSAccount.ConnectionStep.Type.ASSUME_ROLE self.role_arn = dict_src["assume_role"] else: raise NotImplementedError(f"Unknown {dict_src}") if "external_id" in dict_src: self.external_id = dict_src["external_id"]
def region(self): if self._region is not None: return self._region if self.arn is not None: self._region = Region.get_region(self.arn.split(":")[3]) return self._region
def region(self): if self._region is not None: return self._region raise NotImplementedError() if self.arn is not None: self._region = Region.get_region(self.arn.split(":")[3]) return self._region
def test_copy_db_cluster_snapshot(): client = RDSClient() snapshot_src = RDSDBClusterSnapshot({}) snapshot_src.region = Region.get_region("us-east-1") snapshot_src.db_cluster_identifier = mock_values[ "snapshot_src.db_cluster_identifier"] snapshot_dst = RDSDBClusterSnapshot({}) snapshot_dst.region = Region.get_region("us-west-2") snapshot_dst.id = "horey-test-snapshot-id" snapshot_dst.tags = [{ 'Key': 'lvl', 'Value': "tst" }, { 'Key': 'name', 'Value': snapshot_dst.id }] client.copy_db_cluster_snapshot(snapshot_src, snapshot_dst)
def test_provision_lambda_event_source_mapping(): event_mapping = LambdaEventSourceMapping({}) event_mapping.region = Region.get_region("us-west-2") event_mapping.function_identification = "horey-test-lambda" event_mapping.event_source_arn = mock_values[ "lambda_event_source_mapping:event_source_arn"] event_mapping.enabled = True aws_api.provision_lambda_event_source_mapping(event_mapping) assert event_mapping.state == "Enabled"
def main(): ret_accounts = {} acc_default = AWSAccount() acc_default.name = "horey_account" # Human readable name acc_default.id = "12345678910" # Unique ID. Used to name a directory storing the cache data. I use the AWS Account ID. cs1 = AWSAccount.ConnectionStep({ "profile": "default", "region_mark": "us-east-1" }) # profile name in ~/.aws/credentials acc_default.connection_steps.append(cs1) reg = Region() reg.region_mark = "us-east-1" acc_default.regions[reg.region_mark] = reg ret_accounts[acc_default.id] = acc_default return ret_accounts
def set_aws_region(value): """ Set current region to work against. :return: """ if isinstance(value, str): value = Region.get_region(value) if not isinstance(value, Region): raise ValueError(f"{value} is not of type Region") AWSAccount._CURRENT_REGION = value
def init_role_last_used_attr(self, _, dict_src): """ Init RoleLastUsed - split to time and region @param _: @param dict_src: @return: """ if not dict_src: return for key in dict_src: if key == "LastUsedDate": self.role_last_used_time = dict_src.get(key) elif key == "Region": self.role_last_used_region = Region.get_region(dict_src.get(key)) else: raise NotImplementedError(key)
def test_provision_certificate(): cert = ACMCertificate({}) cert.region = Region.get_region("us-east-1") cert.domain_name = "front.horey.com" cert.validation_method = "DNS" cert.tags = [{ 'Key': 'lvl', 'Value': "tst" }, { 'Key': 'name', 'Value': cert.domain_name.replace("*", "star") }] hosted_zone_name = "horey.com" aws_api.provision_acm_certificate(cert, hosted_zone_name) assert cert.status == "ISSUED"
def test_provision_aws_lambda_from_filelist(): aws_lambda = AWSLambda({}) aws_lambda.region = Region.get_region("us-west-2") aws_lambda.name = "horey-test-lambda" aws_lambda.role = mock_values["lambda:execution_role"] aws_lambda.handler = "lambda_test.lambda_handler" aws_lambda.runtime = "python3.8" aws_lambda.tags = {"lvl": "tst", "name": "horey-test"} files_paths = [ os.path.join(os.path.dirname(os.path.abspath(__file__)), filename) for filename in ["lambda_test.py", "lambda_test_2.py"] ] aws_api.provision_aws_lambda_from_filelist(aws_lambda, files_paths, force=True) assert aws_lambda.state == "Active"
def test_provision_lambda(): packer = Packer() aws_api = AWSAPI() files_paths = [ os.path.join(os.path.dirname(os.path.abspath(__file__)), "build", "files_list_test", file_name) for file_name in ["dependency_1.py", "entrypoint.py"] ] packer.add_files_to_zip(f"{ZIP_FILE_NAME}.zip", files_paths) aws_lambda = AWSLambda({}) aws_lambda.region = Region.get_region("us-west-2") aws_lambda.name = "horey-test-lambda" aws_lambda.role = mock_values["lambda:execution_role"] aws_lambda.handler = "entrypoint.main" aws_lambda.runtime = "python3.8" aws_lambda.tags = {"lvl": "tst", "name": "horey-test"} aws_lambda.policy = { "Version": "2012-10-17", "Id": "default", "Statement": [{ "Sid": aws_lambda.name + "_" + "sid", "Effect": "Allow", "Principal": { "Service": "events.amazonaws.com" }, "Action": "lambda:InvokeFunction", "Resource": None, "Condition": { "ArnLike": { "AWS:SourceArn": mock_values["lambda:policy_events_rule_arn"] } } }] } aws_api.provision_aws_lambda(aws_lambda, force=True) assert aws_lambda.state == "Active"
def test_provision_rule(): rule = EventBridgeRule({}) rule.name = "rule-alexey-test-trigger-lambda" rule.description = "rule-alexey-test-trigger-lambda" rule.region = Region.get_region("us-west-2") rule.schedule_expression = "rate(1 minute)" rule.event_bus_name = "default" rule.state = "ENABLED" rule.tags = [ { 'Key': 'string', 'Value': 'string' }, ] target = EventBridgeTarget({}) target.id = "test-alexey-target" target.arn = "arn:aws:lambda:us-west-2:xxxxxx:function:horey-test-lambda" rule.targets = [target] client = EventsClient() client.provision_rule(rule)
def __init__(self, dict_src): self.aws_access_key_id = None self.aws_secret_access_key = None self.profile_name = None self.role_arn = None self.region = None self.type = None self.external_id = None if "region_mark" in dict_src: self.region = Region.get_region(dict_src["region_mark"]) if "credentials" in dict_src: raise NotImplementedError() if "profile" in dict_src: logger.info( f"Setting connection step type to AWSAccount.ConnectionStep.Type.PROFILE: {dict_src}" ) self.type = AWSAccount.ConnectionStep.Type.PROFILE self.profile_name = dict_src["profile"] elif "assume_role" in dict_src: self.type = AWSAccount.ConnectionStep.Type.ASSUME_ROLE self.role_arn = dict_src["assume_role"] elif "role" in dict_src: if dict_src["role"] != "current": raise ValueError(dict_src["role"]) logger.info( f"Setting connection step type to AWSAccount.ConnectionStep.Type.CURRENT_ROLE: {dict_src}" ) self.type = AWSAccount.ConnectionStep.Type.CURRENT_ROLE else: raise NotImplementedError(f"Unknown {dict_src}") if "external_id" in dict_src: self.external_id = dict_src["external_id"]
import pdb from horey.aws_api.aws_clients.events_client import EventsClient from horey.aws_api.aws_services_entities.event_bridge_rule import EventBridgeRule from horey.aws_api.aws_services_entities.event_bridge_target import EventBridgeTarget from horey.aws_api.base_entities.aws_account import AWSAccount from horey.aws_api.base_entities.region import Region AWSAccount.set_aws_region(Region.get_region("us-west-2")) def test_init_lambda_client(): assert isinstance(EventsClient(), EventsClient) def test_get_region_events(): events_client = EventsClient() region_rules = events_client.get_region_rules( Region.get_region("us-east-1")) assert isinstance(region_rules, list) def test_provision_rule(): rule = EventBridgeRule({}) rule.name = "rule-alexey-test-trigger-lambda" rule.description = "rule-alexey-test-trigger-lambda" rule.region = Region.get_region("us-west-2") rule.schedule_expression = "rate(1 minute)" rule.event_bus_name = "default" rule.state = "ENABLED" rule.tags = [
def main(): ret_accounts = {} # DEFAULT acc_default = AWSAccount() acc_default.name = "horey_account" acc_default.id = "12345678910" cs1 = AWSAccount.ConnectionStep({ "profile": "default", "region_mark": "us-east-1" }) acc_default.connection_steps.append(cs1) reg = Region() reg.region_mark = "us-east-1" acc_default.regions[reg.region_mark] = reg reg = Region() reg.region_mark = "us-west-2" acc_default.regions[reg.region_mark] = reg ret_accounts[acc_default.id] = acc_default # STAGING acc_staging = AWSAccount() acc_staging.name = "staging_account" acc_staging.id = "109876543210" cs1 = AWSAccount.ConnectionStep({ "profile": "horey_account", "region_mark": "us-east-1" }) cs2 = AWSAccount.ConnectionStep( {"assume_role": "arn:aws:iam::109876543210:role/sts-management-role"}) cs3 = AWSAccount.ConnectionStep({ "assume_role": f"arn:aws:iam::{acc_staging.id}:role/sts-ec2-management-role", "external_id": "ABCDE123456" }) acc_staging.connection_steps.append(cs1) acc_staging.connection_steps.append(cs2) acc_staging.connection_steps.append(cs3) reg = Region() reg.region_mark = "us-east-1" acc_staging.regions[reg.region_mark] = reg reg = Region() reg.region_mark = "us-west-2" acc_staging.regions[reg.region_mark] = reg ret_accounts[acc_staging.id] = acc_staging return ret_accounts
import pdb from horey.aws_api.aws_clients.lambda_client import LambdaClient from horey.aws_api.aws_services_entities.aws_lambda import AWSLambda from horey.aws_api.base_entities.aws_account import AWSAccount from horey.aws_api.base_entities.region import Region AWSAccount.set_aws_region(Region.get_region("eu-central-1")) def test_init_lambda_client(): assert isinstance(LambdaClient(), LambdaClient) def test_get_region_lambdas(): lambda_client = LambdaClient() lambdas = lambda_client.get_region_lambdas( Region.get_region("eu-central-1")) assert isinstance(lambdas, list) if __name__ == "__main__": # test_init_lambda_client() test_get_region_lambdas() #test_provision_lambda()
def test_get_region_events(): events_client = EventsClient() region_rules = events_client.get_region_rules( Region.get_region("us-east-1")) assert isinstance(region_rules, list)
class S3Bucket(AwsObject): """ Class representing S3 bucket. """ def __init__(self, dict_src, from_cache=False): self.acl = None self.policy = None self.bucket_objects = [] self.region = None self.index_document = None self.error_document = None self.redirect_all_requests_to = None self.location = None super().__init__(dict_src) if from_cache: self._init_bucket_from_cache(dict_src) return init_options = { "Name": lambda x, y: self.init_default_attr(x, y, formatted_name="name"), "CreationDate": self.init_default_attr } self.init_attrs(dict_src, init_options) def _init_bucket_from_cache(self, dict_src): """ Init the object from saved cache dict :param dict_src: :return: """ options = { "acl": self._init_acl_from_cache, "policy": self._init_policy_from_cache, "region": self._init_region_from_cache, } self._init_from_cache(dict_src, options) def _init_region_from_cache(self, _, dict_src): if dict_src is None: return self.region = Region() self.region.init_from_dict(dict_src) def _init_acl_from_cache(self, _, dict_src): """ Init bucket ACL from previously cached dict :param _: :param dict_src: :return: """ if dict_src is None: return if self.acl is None: self.acl = S3Bucket.ACL(dict_src, from_cache=True) else: raise NotImplementedError def _init_policy_from_cache(self, _, dict_src): """ Init policy object from previously cached dict :param _: :param dict_src: :return: """ if self.policy is not None: raise NotImplementedError if dict_src is not None: self.policy = S3Bucket.Policy(dict_src, from_cache=True) def update_objects(self, lst_src, from_cache=False): """ Update objects list with new object :param lst_src: :param from_cache: :return: """ for dict_object in lst_src: bucket_object = S3Bucket.BucketObject(dict_object, from_cache=from_cache) self.bucket_objects.append(bucket_object) def update_acl(self, lst_src): """ Update ACL from AWS API response list :param lst_src: :return: """ if self.acl is None: self.acl = S3Bucket.ACL(lst_src) else: raise NotImplementedError() def update_policy(self, str_src): """ Update Policy from AWS API response str :param str_src: :return: """ if self.policy is None: self.policy = S3Bucket.Policy(str_src) else: raise NotImplementedError() def update_website(self, lst_src): if len(lst_src) > 1: raise ValueError(lst_src) init_options = { "IndexDocument": self.init_default_attr, "ErrorDocument": self.init_default_attr, "RedirectAllRequestsTo": self.init_default_attr, "ResponseMetadata": lambda x, y: 0 } for dict_src in lst_src: self.init_attrs(dict_src, init_options) def update_location(self, lst_src): """ For more info about this ugly stuff check get_dns_records docstring """ if len(lst_src) > 1: raise ValueError(lst_src) self.location = lst_src[0] if lst_src[0] is not None else "us-east-1" return def get_dns_records(self): """ If while reading this you say "WHAT????", read here and cry: https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_website_region_endpoints and this: " LocationConstraint (string) -- Specifies the Region where the bucket resides. For a list of all the Amazon S3 supported location constraints by Region, see Regions and Endpoints . Buckets in Region us-east-1 have a LocationConstraint of null . " Get all self dns records. :return: """ mappings = { "us-east-2": ".", "us-east-1": "-", "us-west-1": "-", "us-west-2": "-", "af-south-1": ".", "ap-east-1": ".", "ap-south-1": ".", "ap-northeast-3": ".", "ap-northeast-2": ".", "ap-southeast-1": "-", "ap-southeast-2": "-", "ap-northeast-1": "-", "eu-west-1": "-", "sa-east-1": "-", "us-gov-west-1": "-", "ca-central-1": ".", "cn-northwest-1": ".", "eu-central-1": ".", "eu-west-2": ".", "eu-south-1": ".", "eu-west-3": ".", "eu-north-1": ".", "me-south-1": ".", "us-gov-east-1": "." } if self.index_document is None and self.error_document is None and self.redirect_all_requests_to is None: return [] return [ f"{self.name}.s3-website{mappings[self.location]}{self.location}.amazonaws.com" ] class ACL(AwsObject): """ Class representing S3 Bucket's ACL """ def __init__(self, src_data, from_cache=False): super(S3Bucket.ACL, self).__init__(src_data) self.grants = [] if from_cache: if not isinstance(src_data, dict): raise TypeError( "Not implemented - replacement of pdb.set_trace") self._init_acl_from_cache(src_data) return if not isinstance(src_data, list): raise TypeError( "Not implemented - replacement of pdb.set_trace") for dict_grant in src_data: grant = self.Grant(dict_grant) self.grants.append(grant) def _init_acl_from_cache(self, dict_src): """ Init ACL from previously cached dict :param dict_src: :return: """ options = { 'grants': self._init_grants_from_cache, } self._init_from_cache(dict_src, options) def _init_grants_from_cache(self, _, lst_src): """ Init grants from previously cached list :param _: :param lst_src: :return: """ if self.grants: raise NotImplementedError("Can reinit yet") for dict_grant in lst_src: grant = self.Grant(dict_grant, from_cache=True) self.grants.append(grant) class Grant(AwsObject): """ Class representing S3 bucket policy Grant. """ def __init__(self, dict_src, from_cache=False): super(S3Bucket.ACL.Grant, self).__init__(dict_src) if from_cache: self._init_grant_from_cache(dict_src) return init_options = { "Grantee": self.init_default_attr, "Permission": self.init_default_attr } self.init_attrs(dict_src, init_options) def _init_grant_from_cache(self, dict_src): """ Init grant from previously cached dict :param dict_src: :return: """ options = {} self._init_from_cache(dict_src, options) class Policy(AwsObject): """ Class representing S3 Bucket policy """ def __init__(self, src_, from_cache=False): if isinstance(src_, str): dict_src = json.loads(src_) else: if from_cache: self._init_policy_from_cache(src_) return raise NotImplementedError("Not yet implemented") super(S3Bucket.Policy, self).__init__(dict_src) if from_cache: raise NotImplementedError("Not yet implemented") init_options = { "Version": self.init_default_attr, "Statement": self.init_default_attr, "Id": self.init_default_attr, } self.init_attrs(dict_src, init_options) def _init_policy_from_cache(self, dict_src): """ Init policy from previously cached dict :param dict_src: :return: """ options = {} try: self._init_from_cache(dict_src, options) except Exception: print(dict_src) raise class BucketObject(AwsObject): """ Class representing one saved object in S3 bucket. """ def __init__(self, src_data, from_cache=False): self.key = None super(S3Bucket.BucketObject, self).__init__(src_data) if from_cache: self._init_bucket_object_from_cache(src_data) return init_options = { "Key": self.init_default_attr, "LastModified": self.init_default_attr, "ETag": self.init_default_attr, "Size": self.init_default_attr, "StorageClass": self.init_default_attr, } self.init_attrs(src_data, init_options) def _init_bucket_object_from_cache(self, dict_src): """ Init object from previously cached dict. :param dict_src: :return: """ options = {} self._init_from_cache(dict_src, options) self.size = dict_src["dict_src"]["Size"]
def _init_region_from_cache(self, _, dict_src): if dict_src is None: return self.region = Region() self.region.init_from_dict(dict_src)
def test_get_region_lambdas(): lambda_client = LambdaClient() lambdas = lambda_client.get_region_lambdas( Region.get_region("eu-central-1")) assert isinstance(lambdas, list)
def test_add_managed_region(): region = Region.get_region("us-west-2") aws_api.add_managed_region(region) assert region in aws_api.get_managed_regions()
import datetime import json import os import pdb from unittest.mock import Mock from horey.aws_api.aws_clients.s3_client import S3Client from horey.aws_api.aws_services_entities.s3_bucket import S3Bucket from horey.aws_api.base_entities.aws_account import AWSAccount from horey.aws_api.base_entities.region import Region AWSAccount.set_aws_region(Region.get_region('us-west-2')) TEST_BUCKET_NAME = "horey-test-bucket" def test_init_s3_client(): assert isinstance(S3Client(), S3Client) def test_provision_s3_bucket(): region = Region.get_region("us-west-2") s3_client = S3Client() s3_bucket = S3Bucket({}) s3_bucket.region = region s3_bucket.name = TEST_BUCKET_NAME s3_bucket.acl = "private" s3_bucket.policy = S3Bucket.Policy({}) s3_bucket.policy.version = "2012-10-17"
def get_cluster_from_arn(cluster_arn): cluster = ECSCluster({}) cluster.name = cluster_arn.split(":")[-1].split("/")[-1] cluster.arn = cluster_arn cluster.region = Region.get_region(cluster_arn.split(":")[3]) return cluster
def test_add_managed_region(): aws_api.add_managed_region(Region.get_region("us-west-2"))