class Athena: def __init__(self): self.aws_athena = Session().client('athena') def execute_query_and_return_csv(self, query, database,s3_output): query_id = self.start_query(query, database, s3_output) self.wait_for_query(query_id) return self.query_csv(query_id) def query_csv(self,query_id): details = self.query_details(query_id) return details.get('QueryExecution').get('ResultConfiguration').get('OutputLocation') def query_details(self, query_id): return self.aws_athena.get_query_execution(QueryExecutionId=query_id) def query_status(self, query_id): exectution = self.aws_athena.get_query_execution(QueryExecutionId=query_id) return exectution.get('QueryExecution').get('Status').get('State') def start_query(self, query, database, s3_output): response = self.aws_athena.start_query_execution( QueryString=query, QueryExecutionContext={ 'Database': database }, ResultConfiguration={ 'OutputLocation': s3_output, } ) return response.get('QueryExecutionId') def query_as_json(self, query_id): return json.dumps(self.query_results(query_id)) def query_results(self, query_id, next_Token = None): if next_Token is None: results = self.aws_athena.get_query_results(QueryExecutionId = query_id) else: results = self.aws_athena.get_query_results(QueryExecutionId=query_id, NextToken=next_Token) headers = [h['Name'] for h in results['ResultSet']['ResultSetMetadata']['ColumnInfo']] query_results = [] for i, row in enumerate(results['ResultSet']['Rows']): if i == 0 and next_Token is None: continue row_data = {} for j, value in enumerate(row['Data']): row_data[headers[j]] = value['VarCharValue'] query_results.append(row_data) return query_results, results.get('NextToken') def wait_for_query(self, query_id): while True: status = self.query_status(query_id) if status in ['SUCCEEDED', 'FAILED', 'CANCELLED']: return self time.sleep(0.2) # 200,s
class test_Session(TestCase): def setUp(self): self.session = Session() def test_client(self): assert self.session.client('aaaa') is None assert type(self.session.client('s3')).__name__ == 'S3' def test_client_boto(self): assert "Unknown service: 'aaaa'. Valid service names are:" in self.session.client_boto3( 'aaaa').get('data') Globals.aws_session_profile_name = 'bad_profile' assert type( self.session.client_boto3('s3').get('client')).__name__ == 'S3' Globals.aws_session_profile_name = 'default'
class Ec2: def __init__(self): self.ec2 = Session().client('ec2') self.ec2_resource = boto3.resource('ec2') def instances_details(self): instances = {} for instance in self.ec2_resource.instances.all(): instance_id = instance.instance_id instances[instance_id] = { 'cpus' : instance.cpu_options['CoreCount'] , 'image_id' : instance.image_id , 'instance_type': instance.instance_type , 'public_ip' : instance.public_ip_address , 'state' : instance.state , 'tags' : instance.tags } return instances def security_groups(self): groups={} for group in self.ec2.describe_security_groups().get('SecurityGroups'): groups[group.get('GroupId')] = group return groups def subnets(self): subnets = {} for subnet in self.ec2.describe_subnets().get('Subnets'): subnets[subnet.get('SubnetId')] = subnet return subnets def vpcs(self): vpcs = {} for vpc in self.ec2.describe_vpcs().get('Vpcs'): vpcs[vpc.get('VpcId')] = vpc return vpcs
class test_Session(Unit_Test): def setUp(self): super().setUp() self.session = Session() def test_client(self): assert self.session.client('aaaa') is None assert type(self.session.client('s3')).__name__ == 'S3' def test_client_boto3(self): assert "Unknown service: 'aaaa'. Valid service names are:" in self.session.client_boto3( 'aaaa').get('data') AWS_Config().set_aws_session_profile_name('bad_profile') assert type( self.session.client_boto3('s3').get('client')).__name__ == 'S3' AWS_Config().set_aws_session_profile_name('default') def test_profiles(self): profiles = self.session.profiles() if len(set(profiles)) > 0: pass # todo: add tests when there are credentials configured at ~/.aws/credentials #assert set(self.session.profiles().get('default')) == {'aws_access_key_id', 'aws_secret_access_key', 'region'} @pytest.mark.skip('Fix test') def test_session(self): assert self.session.session().profile_name == AWS_Config( ).aws_session_profile_name() assert self.session.session().region_name == AWS_Config( ).aws_session_region_name() with Temp_User() as temp_user: iam = temp_user.iam user_info = iam.user_info() access_key = iam.user_access_key_create(wait_for_key_working=True) aws_access_key_id = access_key.get('AccessKeyId') aws_secret_access_key = access_key.get('SecretAccessKey') session = boto3.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) user_identity = session.client('sts').get_caller_identity() assert user_identity.get('UserId') == user_info.get('UserId') assert user_identity.get('Arn') == user_info.get('Arn') self.result = user_identity
def s3_resource(self): return Session().resource('s3')
class Cloud_Trail(Boto_Helpers): def __init__(self): self.cloudtrail = Session().client('cloudtrail') self.s3 = S3() # helpers def date_now(self): return datetime.datetime.utcnow() def date_minutes_ago(self, minutes): return datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes) def event_selectors(self, trail_name): return self.cloudtrail.get_event_selectors( TrailName=trail_name).get('EventSelectors') def events(self, start_time, end_time, max_results=50): return self.cloudtrail.lookup_events( LookupAttributes=[], StartTime=start_time, EndTime=end_time, MaxResults=max_results).get('Events') def events_all(self, start_time, end_time): return Boto_Helpers.invoke_using_paginator(self.cloudtrail, 'lookup_events', 'Events', LookupAttributes=[], StartTime=start_time, EndTime=end_time) def events_in_last(self, minutes): end_time = self.date_now() start_time = self.date_minutes_ago(minutes) return self.events_all(start_time, end_time) def tags(self, resource_ids=[]): return self.cloudtrail.list_tags(ResourceIdList=resource_ids) def log_files(self, trail_name, account_id, region, year, month, day): trail = self.trail(trail_name) log_type = 'CloudTrail' s3_bucket = trail.get('S3BucketName') s3_key_prefix = trail.get('S3KeyPrefix') s3_prefix = f'{s3_key_prefix}/AWSLogs/{account_id}/' \ f'{log_type}/{region}/{year}/{month}/{day}/' s3_files = self.s3.find_files(s3_bucket, prefix=s3_prefix) return s3_files def log_files_records(self, trail_name, log_files): trail = self.trail(trail_name) s3_bucket = trail.get('S3BucketName') records = [] for log_file in log_files: contents = self.s3.file_contents_from_gzip(s3_bucket, log_file) records.extend(json.loads(contents).get('Records')) return records def trail(self, trail_name): trail = self.cloudtrail.get_trail(Name=trail_name).get('Trail') trail_status = self.cloudtrail.get_trail_status( Name=trail_name) # also get the trail status trail.update(trail_status) # add its values to the trail object del trail[ 'ResponseMetadata'] # remove the ResponseMetadata since we don't need this value return trail def trail_create(self, trail_name, account_id, region, s3_bucket, s3_key_prefix): self.trail_s3_bucket_configure(trail_name, s3_bucket, account_id, region) # create and configure bucket self.trail_create_raw(trail_name, s3_bucket, s3_key_prefix) # create trail self.trail_start(trail_name) # enable it return self.trail(trail_name) # return trail details def trail_create_raw(self, trail_name, s3_bucket, s3_key_prefix, sns_topic_name=None, global_service_events=True, multi_region_trail=True, log_file_verification=True): params = { 'Name': trail_name, 'S3BucketName': s3_bucket, 'S3KeyPrefix': s3_key_prefix, 'IncludeGlobalServiceEvents': global_service_events, 'IsMultiRegionTrail': multi_region_trail, 'EnableLogFileValidation': log_file_verification } if sns_topic_name: params['SnsTopicName'] = sns_topic_name try: return self.cloudtrail.create_trail(**params) except Exception as error: return {'error': f'{error}'} def trail_delete(self, trail_name): self.cloudtrail.delete_trail(Name=trail_name) return self def trail_status(self, trail_name): return self.cloudtrail.get_trail_status(Name=trail_name) def trail_s3_bucket_configure(self, trail_name, s3_bucket, account_id, region): self.s3.bucket_create(s3_bucket, region) # make sure bucket exists # see https://docs.aws.amazon.com/awscloudtrail/latest/userguide/create-s3-bucket-policy-for-cloudtrail.html resource_arn = self.s3.policy_statements__resource_arn( s3_bucket, trail_name, account_id) statements = self.s3.policy_statements__without( s3_bucket, 'Resource', resource_arn ) # making sure we are not adding the same resource_arn twice statement = self.s3.policy_statements__new('s3:PutObject', 'Allow', 'cloudtrail.amazonaws.com', resource_arn) statements.append(statement) if 's3:GetBucketAcl' not in self.s3.policy_statements( s3_bucket, index_by='Action'): statements.append( self.s3.policy_statements__new('s3:GetBucketAcl', 'Allow', 'cloudtrail.amazonaws.com', f"arn:aws:s3:::{s3_bucket}")) self.s3.policy_create(s3_bucket, statements) # add cloud trail policy return self def trail_start(self, trail_name): self.cloudtrail.start_logging(Name=trail_name) return self def trail_stop(self, trail_name): return self.cloudtrail.stop_logging(Name=trail_name) def trails(self): return self.cloudtrail.list_trails().get('Trails')
def __init__(self): self.api_gateway = Session().client('apigateway')
def firehose(self): return Session().client('firehose')
def acm(self): if self._acm is None: self._acm = Session().client('acm') return self._acm
def __init__(self, account_id): self.ecs = Session().client('ecs') self.account_id = account_id
class Fargate: def __init__(self, account_id): self.ecs = Session().client('ecs') self.account_id = account_id def cluster_create(self, cluster_name): return self.ecs.create_cluster(clusterName=cluster_name) def cluster_delete(self, cluster_arn): return self.ecs.delete_cluster(cluster=cluster_arn) def clusters(self): return self.ecs.list_clusters().get('clusterArns') def task_get_log_details(self, task_name): log_group_name = "awslogs-{0}".format(task_name) log_group_region = "eu-west-2" log_group_stream_prefix = "awslogs-example" cloud_watch = Cloud_Watch() if cloud_watch.log_group_exists(log_group_name) is False: cloud_watch.log_group_create(log_group_name) return log_group_name, log_group_region, log_group_stream_prefix def task_create(self, task_name, task_role_arn, execution_role_arn): (log_group_name, log_group_region, log_group_stream_prefix) = self.task_get_log_details(task_name) kwargs = { 'family': task_name, 'taskRoleArn': task_role_arn, 'executionRoleArn': execution_role_arn, 'cpu': "1024", 'memory': "2048", 'networkMode': 'awsvpc', "containerDefinitions": [{ 'name': 'gs-docker-codebuild', 'image': '{0}.dkr.ecr.eu-west-2.amazonaws.com/gs-docker-codebuild:latest' .format(self.account_id), "logConfiguration": { "logDriver": "awslogs", "options": { "awslogs-group": log_group_name, "awslogs-region": log_group_region, "awslogs-stream-prefix": log_group_stream_prefix } }, }], "requiresCompatibilities": ['FARGATE'] } try: return self.ecs.register_task_definition( **kwargs).get('taskDefinition') except Exception as error: return "{0}".format(error) def task_run(self, cluster, task_arn, subnet_id, security_group): kwargs = { 'cluster': cluster, 'taskDefinition': task_arn, 'launchType': 'FARGATE', 'networkConfiguration': { 'awsvpcConfiguration': { 'subnets': [subnet_id], 'securityGroups': [security_group], 'assignPublicIp': 'ENABLED' } }, } try: return self.ecs.run_task(**kwargs).get('tasks')[0] except Exception as error: return "{0}".format(error) def task_delete(self, task_arn): return self.ecs.deregister_task_definition(taskDefinition=task_arn) def task_details(self, cluster, task_arn): result = self.ecs.describe_tasks(cluster=cluster, tasks=[task_arn]) return result.get('tasks').pop() def task_wait_for_completion(self, cluster, task_arn, sleep_for=1, max_attempts=30, log_status=False): for i in range(0, max_attempts): build_info = self.task_details(cluster, task_arn) last_Status = build_info.get('lastStatus') #current_phase = build_info.get('currentPhase') if log_status: Dev.pprint("[{0}] {1}".format(i, last_Status)) if last_Status == 'DEPROVISIONING' or last_Status == 'STOPPED': return build_info sleep(sleep_for) return build_info def tasks(self): return self.ecs.list_task_definitions().get('taskDefinitionArns') def policy_create_for_task_role(self, role_name): policy = { "Version": "2008-10-17", "Statement": [{ "Effect": "Allow", "Principal": { "Service": "ecs-tasks.amazonaws.com" }, "Action": "sts:AssumeRole" }] } IAM(role_name=role_name).role_create(policy) def policy_create_for_execution_role(self, role_name): region = 'eu-west-2' cloud_watch_arn = "arn:aws:logs:{0}:{1}:log-group:awslogs-*".format( region, self.account_id) role_policy = { "Version": "2008-10-17", "Statement": [{ "Effect": "Allow", "Principal": { "Service": "ecs-tasks.amazonaws.com" }, "Action": "sts:AssumeRole" }] } policy = { "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:DescribeRepositories", "ecr:ListImages", "ecr:DescribeImages", "ecr:BatchGetImage" ], "Resource": "*" }, { "Effect": "Allow", "Action": ["logs:CreateLogStream", "logs:PutLogEvents"], "Resource": [cloud_watch_arn] }] } policy_name = 'policy_for_{0}'.format(role_name) iam = IAM(role_name=role_name) iam.role_create(role_policy) iam.policy_delete(policy_name) policy_arn = iam.policy_create(policy_name, policy).get('policy_arn') iam.role_policy_attach(policy_arn)
def client(self): if self._logs is None: self._logs = Session().client('logs') return self._logs
def route_53_domains(self): if self._route_53_domains is None: self._route_53_domains = Session().client('route53domains',region_name='us-east-1') return self._route_53_domains
def route_53(self): if self._route_53 is None: self._route_53 = Session().client('route53',region_name='us-east-1') return self._route_53
def s3_resource(self): # todo refactor this method to be called resource() return Session().resource('s3')
def s3(self): # todo refactor this method to be called client() return Session().client('s3')
def setUp(self): super().setUp() self.session = Session()
class CodeBuild: def __init__(self, project_name, role_name): self.codebuild = Session().client('codebuild') self.iam = IAM(role_name=role_name) self.project_name = project_name return def _invoke_via_paginator(self, method, field_id, use_paginator, **kwargs): paginator = self.codebuild.get_paginator(method) for page in paginator.paginate(**kwargs): for id in page.get(field_id): yield id if use_paginator is False: return def all_builds_ids(self, use_paginator = False): return self._invoke_via_paginator('list_builds','ids',use_paginator) def build_info(self, build_id): builds = self.codebuild.batch_get_builds(ids=[build_id]).get('builds') return Misc.array_pop(builds,0) def build_start(self): kvargs = { 'projectName': self.project_name } return self.codebuild.start_build(**kvargs).get('build').get('arn') def build_wait_for_completion(self, build_id, sleep_for=0.5, max_attempts=20, log_status=False): for i in range(0,max_attempts): build_info = self.build_info(build_id) build_status = build_info.get('buildStatus') current_phase = build_info.get('currentPhase') if log_status: Dev.pprint("[{0}] {1} {2}".format(i,build_status,current_phase)) if build_status != 'IN_PROGRESS': return build_info sleep(sleep_for) return None def policies_create(self, policies): # does not update, only add new ones policies_arns = [] role_policies = list(self.iam.role_policies().keys()) for base_name, policy in policies.items(): policy_name = "{0}_{1}".format(base_name, self.project_name) if policy_name in role_policies: continue policies_arns.append(self.iam.policy_create(policy_name,policy).get('policy_arn')) return policies_arns def project_builds(self,ids): return self.codebuild.batch_get_builds(ids=ids) def project_create(self, project_repo, service_role): kvargs = { 'name': self.project_name, 'source': {'type': 'GITHUB', 'location': project_repo}, 'artifacts': {'type': 'NO_ARTIFACTS'}, 'environment': {'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/python:3.7.1-1.7.0', 'computeType': 'BUILD_GENERAL1_SMALL'}, 'serviceRole': service_role } return self.codebuild.create_project(**kvargs) def project_delete(self): if self.project_exists() is False: return False self.codebuild.delete_project(name=self.project_name) return self.project_exists() is False def project_exists(self): return self.project_name in self.projects() def project_info(self): projects = Misc.get_value(self.codebuild.batch_get_projects(names=[self.project_name]),'projects',[]) return Misc.array_pop(projects,0) def project_builds_ids(self, project_name, use_paginator=False): if use_paginator: kwargs = { 'projectName' : project_name } else: kwargs = { 'projectName' : project_name , 'sortOrder' : 'DESCENDING' } return self._invoke_via_paginator('list_builds_for_project', 'ids',use_paginator, **kwargs) def projects(self): return self.codebuild.list_projects().get('projects')
def setUp(self): self.session = Session()
def __init__(self, project_name, role_name): self.codebuild = Session().client('codebuild') self.iam = IAM(role_name=role_name) self.project_name = project_name return
def __init__(self): self.aws_athena = Session().client('athena')
def sqs(self): return Session().client('sqs')
def client(self): from osbot_aws.apis.Session import Session # recursive dependency return Session().client('sts', check_credentials=False)
def client(self): return Session().client('events')
def client(self): return Session().client('lambda')
def __init__(self): self.cloudwatch = Session().client('cloudwatch')
class API_Gateway: def __init__(self): self.api_gateway = Session().client('apigateway') # helper methods def _call_method(self, method_name, params): try: return getattr(self.api_gateway, method_name)(**params) except Exception as error: return {'error': f'{error}'} # see this tweet to understand the use of the 'data_key' param https://twitter.com/DinisCruz/status/1226113182240038912 # see this tweet thread to see more info about performance issues with this api https://twitter.com/DinisCruz/status/1226504297439023104 # todo: add support for getting all the data using the position field def _call_method_return_items(self, method_name, params={}, index_by='id', data_key='items'): try: raw_data = getattr(self.api_gateway, method_name)(**params) data = {} for item in raw_data.get(data_key): data[item.get(index_by)] = item return data except Exception as error: return {'error' : f'{error}'} def _call_method_return_arrays(self, method_name, params={}, index_by='id', data_key='items'): try: raw_data = getattr(self.api_gateway, method_name)(**params) data = {} for key,value in raw_data.get(data_key).items(): data[key] = value return data except Exception as error: return {'error' : f'{error}'} def _get(self, target, params={}, index_by='id'): return self._call_method_return_items(method_name=f"get_{target}", params=params, index_by=index_by) def _get_using_api_id(self, target, api_id, index_by='id', data_key='items'): return self._call_method_return_items(method_name=f"get_{target}", params={'restApiId': api_id}, index_by=index_by, data_key=data_key) def _index_by(self, values, index_by=None): if index_by is None: return list(values) results = {} for item in values: results[item.get(index_by)] = item return results # main methods def account(self): return self.api_gateway.get_account() def api_keys(self, index_by='id',include_values=False): return self._call_method_return_items(method_name="get_api_keys", params={'includeValues':include_values}, index_by=index_by) def api_key(self, api_key, include_value=False): try: return self.api_gateway.get_api_key(apiKey=api_key,includeValue=include_value) except: return None def api_key_create(self,key_name, enabled=True): return self.api_gateway.create_api_key(name=key_name,enabled=enabled) def api_key_delete(self, key_id_or_name): if self.api_exists(key_id_or_name): # see if it an api_key value self.api_gateway.delete_api_key(apiKey=key_id_or_name) return True for key_id, value in self.api_keys().items(): # try to find api_key value if value.get('name') == key_id_or_name: # via its name self.api_gateway.delete_api_key(apiKey=key_id) return True return False def api_exists(self, api_key): if self.api_key(api_key): return True return False def deployments(self, api_id): return self._call_method_return_items(method_name="get_deployments", params={'restApiId':api_id}) def deployment_create(self, api_id, stage): params = {'restApiId': api_id, 'stageName': stage} return self._call_method('create_deployment',params) def domain_name_add_path_mapping(self, rest_api_id, domain_name, base_path, stage='Prod'): try: params = { 'domainName' : domain_name , 'basePath' : base_path , 'restApiId' : rest_api_id , 'stage' : stage } return self.api_gateway.create_base_path_mapping(**params) except Exception as error: return {'error': f'{error}'} def domain_name_create__regional(self, domain_name, certificate_arn): try: params = { 'domainName' : domain_name, 'regionalCertificateArn': certificate_arn, 'endpointConfiguration': { 'types': ['REGIONAL'] }, 'securityPolicy' :'TLS_1_2'} return self.api_gateway.create_domain_name(**params) except Exception as error: return {'error': f'{error}'} def domain_name_delete(self, domain_name): try: return self.api_gateway.delete_domain_name(domainName=domain_name) except Exception as error: return {'error': f'{error}'} def domain_names(self, index_by=None): return self._index_by(self.api_gateway.get_domain_names().get('items'), index_by=index_by) def integration(self, api_id, resource_id, http_method): return self.api_gateway.get_integration(restApiId=api_id, resourceId=resource_id, httpMethod=http_method) def integration_create__http(self, api_id, resource_id, uri, http_method='GET', integration_http_method='GET'): input_type = 'HTTP' try: return self.api_gateway.put_integration(restApiId=api_id, resourceId=resource_id, httpMethod=http_method, integrationHttpMethod=integration_http_method, type=input_type, uri=uri) except Exception as error: return f'{error}' def integration_create__lambda(self, api_id, resource_id, lambda_name, http_method): iam = IAM() aws_acct_id = iam.account_id() aws_region = iam.region() input_type = 'AWS_PROXY' uri = f'arn:aws:apigateway:{aws_region}:lambda:path/2015-03-31/functions/arn:aws:lambda:{aws_region}:{aws_acct_id}:function:{lambda_name}/invocations' integration_http_method = 'POST' try: return self.api_gateway.put_integration(restApiId=api_id, resourceId=resource_id, httpMethod=http_method, integrationHttpMethod=integration_http_method,type=input_type, uri=uri) except Exception as error: return f'{error}' def integration_add_permission_to_lambda(self,api_id, lambda_name): # create permission to allow lambda function to be invoked by API Gateway iam = IAM() aws_acct_id = iam.account_id() aws_region = iam.region() aws_lambda = Lambda(lambda_name) function_arn = aws_lambda.function_Arn()#'gw_bot.lambdas.dev.hello_world' statement_id = 'allow-api-gateway-invoke' action = 'lambda:InvokeFunction' principal = 'apigateway.amazonaws.com' source_arn = f'arn:aws:execute-api:{aws_region}:{aws_acct_id}:{api_id}/*/GET/' aws_lambda.permission_delete(function_arn, statement_id) # remove in case there was already a permission with this name return aws_lambda.permission_add(function_arn, statement_id, action, principal, source_arn) def integration_response(self, api_id, resource_id, http_method, status_code): params = {'restApiId': api_id, 'resourceId': resource_id, 'httpMethod': http_method, 'statusCode': status_code, } return self._call_method('get_integration_response', params) def integration_response_create(self, api_id, resource_id, http_method, status_code,response_templates): params = {'restApiId' : api_id, 'resourceId' : resource_id, 'httpMethod' : http_method, 'statusCode' : status_code, 'responseTemplates': response_templates } return self._call_method('put_integration_response', params) def method(self, api_id, resource_id, http_method): try: return self.api_gateway.get_method(restApiId=api_id, resourceId= resource_id, httpMethod= http_method) except Exception as error: return {'error': f'{error}'} def method_create(self, api_id, resource_id, http_method, authorization_type='NONE'): return self._call_method("put_method", { 'restApiId': api_id, 'resourceId':resource_id, 'httpMethod':http_method, 'authorizationType':authorization_type}) # return self.api_gateway.put_method(restApiId=api_id, resourceId=resource_id, httpMethod=http_method, authorizationType=authorization_type) def method_delete(self, api_id, resource_id, http_method): return self.api_gateway.delete_method(restApiId=api_id, resourceId=resource_id, httpMethod=http_method) def method_invoke_test(self, api_id,resource_id,http_method, path_with_query_string='',body='',headers={}): params = { 'restApiId' : api_id , 'resourceId' : resource_id, 'httpMethod' : http_method, 'pathWithQueryString': path_with_query_string, 'body' : body , 'headers' : headers} return self._call_method("test_invoke_method", params) def method_response(self,api_id,resource_id,http_method,status_code): params = { 'restApiId' : api_id , 'resourceId': resource_id, 'httpMethod': http_method, 'statusCode': status_code, } return self._call_method('get_method_response', params) def method_response_create(self,api_id,resource_id,http_method,status_code,response_models): params = { 'restApiId' : api_id , 'resourceId' : resource_id , 'httpMethod' : http_method , 'statusCode' : status_code , 'responseModels': response_models } return self._call_method('put_method_response', params) def models(self, api_id): return self._get_using_api_id('models', api_id) def stage(self, api_id, stage_name): return self._call_method("get_stage", {'restApiId' : api_id, 'stageName':stage_name}) def stage_url(self,api_id, region, stage, resource=''): return f'https://{api_id}.execute-api.{region}.amazonaws.com/{stage}/{resource}' def stages(self, api_id, index_by='deploymentId'): return self._get_using_api_id('stages', api_id, index_by=index_by, data_key='item') #def stages(self, api_id, index_by='deploymentId'): # return self._call_method_return_items(method_name="get_stages", params={'restApiId':api_id},index_by=index_by) def resource(self, api_id, path): return self.resources(api_id, index_by='path').get(path,{}) def resource_id(self, api_id, path): return self.resource(api_id,path).get('id') def resource_methods(self, api_id, path): return list(set(self.resource(api_id,path).get('resourceMethods',[]))) def resource_create(self, api_id, parent_id, path): return self.api_gateway.create_resource(restApiId=api_id, parentId=parent_id,pathPart=path) def resource_delete(self, api_id, resource_id): return self.api_gateway.delete_resource(restApiId=api_id, resourceId=resource_id) def resources(self, api_id_or_name, index_by='id'): result = self._get_using_api_id('resources', api_id=api_id_or_name, index_by=index_by) if result.get('error') is None: return result rest_apis = self.rest_apis(index_by='name') if api_id_or_name in rest_apis: api_id = rest_apis.get(api_id_or_name).get('id') return self._get_using_api_id('resources', api_id=api_id, index_by=index_by) return {'error': f'API not found: {api_id_or_name}'} def rest_api(self, api_name): return self.rest_apis(index_by='name').get(api_name,{}) def rest_api_create(self, api_name): rest_apis = self.rest_apis(index_by='name') # get existing Rest APIs if api_name in rest_apis: # see if it already exists return rest_apis.get(api_name) # return if it does params = { 'name' : api_name , 'endpointConfiguration': {"types": ["REGIONAL"]}} return self.api_gateway.create_rest_api(**params) # if not, create it def rest_api_delete(self, api_id): if self.rest_api_exists(api_id): self.api_gateway.delete_rest_api(restApiId=api_id) def rest_api_exists(self, api_id): return self.rest_api_info(api_id).get('id') == api_id def rest_api_id(self, api_name): return self.rest_api(api_name).get('id') def rest_api_info(self, api_id): try: return self.api_gateway.get_rest_api(restApiId=api_id) except Exception as error: return {'error': f'{error}'} def rest_apis(self, index_by='id'): return self._call_method_return_items(method_name="get_rest_apis",index_by=index_by) def usage_raw(self, usage_plan_id, days): start_date = (date.today() - timedelta(days=days)).strftime("%Y-%m-%d") end_date = date.today().strftime("%Y-%m-%d") raw_data = self._call_method_return_arrays(method_name= 'get_usage',params={'usagePlanId':usage_plan_id ,'startDate' : start_date , 'endDate' : end_date}) return raw_data def usage(self, usage_plan_id, days): if days > 90: days =90 results = {} raw_data = self.usage_raw(usage_plan_id, days) for key, value in raw_data.items(): key_results = {} for i in range(0,days): row_date = (date.today() - timedelta(days=i)).strftime("%Y-%m-%d") key_results[row_date] = value[days-i][0] results[key] = key_results return results # this method helps to create a table that is usable by graph engines like Google Charts def usage__as_chart_data(self,usage_plan_id, days): api_keys = self.api_keys() usage = self.usage(usage_plan_id, days) headers = ['Days','Totals'] keys = sorted(list(set(usage))) for key in keys: name = api_keys[key].get('name') headers.append(name) rows = [headers] if len(keys) > 0: days = usage[keys[0]].keys() for day in days: row_key = day.replace('2020-','') row = [row_key] total = 0 for key in keys: value = usage[key][day] row.append(value) total += value row.insert(1,total) rows.insert(1,row) return rows def usage_plan_keys(self, usage_plan_id): return self._get('usage_plan_keys', {'usagePlanId':usage_plan_id}) def usage_plan_add_key(self,usage_plan_id, key_id): return self._call_method('create_usage_plan_key', { 'usagePlanId': usage_plan_id, 'keyId' : key_id, 'keyType' : 'API_KEY'}) def usage_plan_id(self, usage_plan_name): return self.usage_plans(index_by='name').get(usage_plan_name,{}).get('id') def usage_plan_remove_key(self, usage_plan_id, key_id): return self._call_method('delete_usage_plan_key', { 'usagePlanId': usage_plan_id, 'keyId' : key_id }) def usage_plans(self, index_by='id'): return self._get('usage_plans', index_by=index_by)
def kinesis(self): return Session().client('kinesis')
def __init__(self): self.cloudtrail = Session().client('cloudtrail') self.s3 = S3()
def ssm(self): if self._ssm is None: self._ssm = Session().client('ssm') return self._ssm