def test__list_build_ids(self): code_build = CodeBuild(None,None) self.last_10_builds = list(code_build.project_builds_ids('OSBot-Jupyter'))[0:10] print() for build_id in self.last_10_builds: build_info = code_build.build_info(build_id) print(build_id,build_info.get('buildStatus'))
def __init__(self): self.project_name = 'OSBot-Jupyter' self.code_build = CodeBuild(project_name=self.project_name, role_name=None) self.max_builds = 10 self.build_timeout = 240 self.server_sizes = { 'small': 'BUILD_GENERAL1_SMALL', 'medium': 'BUILD_GENERAL1_MEDIUM', 'large': 'BUILD_GENERAL1_LARGE' }
def __init__(self,account_id,project_name): self.account_id = account_id self.project_name = project_name self.project_repo = 'https://github.com/pbx-gs/{0}'.format(self.project_name) self.service_role = 'arn:aws:iam::{0}:role/{1}' .format(self.account_id, self.project_name) self.project_arn = 'arn:aws:codebuild:eu-west-2:{0}:project/{1}'.format(self.account_id, self.project_name ) self.assume_policy = {'Statement': [{'Action' : 'sts:AssumeRole' , 'Effect' : 'Allow' , 'Principal': {'Service': 'codebuild.amazonaws.com'}}]} self.code_build = CodeBuild(role_name=self.project_name, project_name=self.project_name)
def setUpClass(cls): code_build = CodeBuild(project_name=project_name, role_name=project_name) iam = IAM(role_name=project_name) if delete_on_setup: code_build.project_delete() iam.role_delete() if code_build.project_exists() is False: assert code_build.project_exists() is False iam.role_create(assume_policy) # create role assert iam.role_info().get('Arn') == service_role # confirm the role exists sleep(1) code_build.project_create(project_repo, service_role) # in a non-deterministic way, this sometimes throws the error: CodeBuild is not authorized to perform: sts:AssumeRole
def run(event, context): project_name = 'Code_Build_Test' build_id = CodeBuild(project_name=project_name, role_name=project_name).build_start() return { 'headers': { 'Content-Type': 'application/json' }, "statusCode": 200, "body": Misc.json_format({'build_id': build_id}) }
class CodeBuild_Jupyter: def __init__(self, build_id, build_info=None): self.project_name = 'OSBot-Jupyter' self.code_build = CodeBuild(project_name=self.project_name,role_name=None) self.build_id = build_id self._build_info = build_info def build_info(self, reload_data=False): if reload_data or self._build_info is None: self._build_info = self.code_build.build_info(self.build_id) return self._build_info def build_environment_variables(self): variables = {} build_info = self.build_info() if build_info: for variable in build_info.get('environment').get('environmentVariables'): variables[variable.get('name')] = variable.get('value') return variables def build_status(self): return self.build_info(reload_data=True).get('buildStatus') def build_phase(self): return self.build_info(reload_data=True).get('currentPhase') def build_log_messages(self): build_info = self.build_info() group_name = build_info.get('logs').get('groupName') stream_name = build_info.get('logs').get('streamName') logs = Logs(group_name=group_name, stream_name=stream_name) return logs.messages() def build_stop(self): self.code_build.codebuild.stop_build(id=self.build_id).get('build') return self.build_status() def get_server_details_from_logs(self): def find_in(array, text): return [item for item in array if text in item] try: messages = self.build_log_messages() ngrok_url = find_in(messages, 'name=command_line addr')[0].split('url=')[1].strip() jupyter_token = find_in(messages, 'token=')[0].split('token=')[1].strip() return ngrok_url,jupyter_token except: return None,None def url(self): ngrok_url, jupyter_token = self.get_server_details_from_logs() if ngrok_url: return "{0}?token={1}".format(ngrok_url,jupyter_token)
def tearDownClass(cls): code_build = CodeBuild(project_name=project_name,role_name=project_name) iam = IAM(role_name=project_name) assert code_build.project_exists() is True assert iam.role_exists() is True if delete_on_teardown: code_build.project_delete() iam .role_delete() assert code_build.project_exists() is False assert iam.role_exists() is False
def __init__(self, build_id, build_info=None): self.project_name = 'OSBot-Jupyter' self.code_build = CodeBuild(project_name=self.project_name,role_name=None) self.build_id = build_id self._build_info = build_info
class Create_Code_Build: def __init__(self,account_id,project_name): self.account_id = account_id self.project_name = project_name self.project_repo = 'https://github.com/pbx-gs/{0}'.format(self.project_name) self.service_role = 'arn:aws:iam::{0}:role/{1}' .format(self.account_id, self.project_name) self.project_arn = 'arn:aws:codebuild:eu-west-2:{0}:project/{1}'.format(self.account_id, self.project_name ) self.assume_policy = {'Statement': [{'Action' : 'sts:AssumeRole' , 'Effect' : 'Allow' , 'Principal': {'Service': 'codebuild.amazonaws.com'}}]} self.code_build = CodeBuild(role_name=self.project_name, project_name=self.project_name) def create_role_and_policies(self, policies): self.code_build.iam.role_create(self.assume_policy) policies_arn = self.code_build.policies_create(policies) self.code_build.iam.role_policies_attach(policies_arn) return self def create_project_with_container__docker(self): kvargs = { 'name' : self.project_name, 'source' : { 'type' : 'GITHUB', 'location' : self.project_repo }, 'artifacts' : {'type' : 'NO_ARTIFACTS' }, 'environment' : {'type' : 'LINUX_CONTAINER' , 'image' : 'aws/codebuild/docker:18.09.0' , 'computeType' : 'BUILD_GENERAL1_LARGE' }, 'serviceRole' : self.service_role } return self.code_build.codebuild.create_project(**kvargs) def create_project_with_container__gs_docker_codebuild(self): kvargs = { 'name' : self.project_name, 'source' : { 'type' : 'GITHUB', 'location' : self.project_repo }, 'artifacts' : {'type' : 'NO_ARTIFACTS' }, 'environment' : {'type' : 'LINUX_CONTAINER' , 'image' : '{0}.dkr.ecr.eu-west-2.amazonaws.com/gs-docker-codebuild:latest'.format(self.account_id) , 'computeType' : 'BUILD_GENERAL1_LARGE' , 'imagePullCredentialsType' : 'SERVICE_ROLE' }, 'serviceRole' : self.service_role } return self.code_build.codebuild.create_project(**kvargs) def policies__for_docker_build(self): cloud_watch_arn = "arn:aws:logs:eu-west-2:{0}:log-group:/aws/codebuild/{1}:log-stream:*".format(self.account_id, self.project_name) policies = {"Cloud-Watch-Policy": {"Version": "2012-10-17", "Statement": [{"Sid": "GsBotPolicy", "Effect": "Allow", "Action": ["logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents"], "Resource": [cloud_watch_arn]}]}, "Secret-Manager": { "Version": "2012-10-17", "Statement": [{"Sid": "GsBotPolicy", "Effect": "Allow", "Action": ["secretsmanager:GetSecretValue", "secretsmanager:DescribeSecret"], "Resource": [ "arn:aws:secretsmanager:eu-west-2:244560807427:secret:slack-gs-bot-*", "arn:aws:secretsmanager:eu-west-2:244560807427:secret:elastic_gsuite_data-*"]}]}, "Create-Docker-Image": { "Version": "2012-10-17", "Statement": [{"Effect": "Allow", "Action": ["ecr:BatchCheckLayerAvailability", "ecr:CompleteLayerUpload", "ecr:GetAuthorizationToken", "ecr:InitiateLayerUpload", "ecr:PutImage", "ecr:UploadLayerPart"], "Resource": "*"}]}, "Create-Repository": { "Version": "2012-10-17", "Statement": [{"Effect": "Allow", "Action": ["ecr:CreateRepository"], "Resource": "*"}]}} return policies def policies__with_ecr(self): cloud_watch_arn = "arn:aws:logs:eu-west-2:{0}:log-group:/aws/codebuild/{1}:log-stream:*".format(self.account_id,self.project_name) policies = {"Cloud-Watch-Policy" : { "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [ "logs:CreateLogGroup" , "logs:CreateLogStream" , "logs:PutLogEvents" ], "Resource": [ cloud_watch_arn ]}]}, "Download_Image" : { "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:DescribeRepositories", "ecr:ListImages", "ecr:DescribeImages", "ecr:BatchGetImage"], "Resource": "*"}]}} return policies def policies__with_ecr_and_3_secrets(self): cloud_watch_arn = "arn:aws:logs:eu-west-2:{0}:log-group:/aws/codebuild/{1}:log-stream:*".format(self.account_id,self.project_name) policies = {"Cloud-Watch-Policy" : { "Version": "2012-10-17", "Statement": [{ "Sid": "GsBotPolicy", "Effect": "Allow", "Action": [ "logs:CreateLogGroup" , "logs:CreateLogStream" , "logs:PutLogEvents" ], "Resource": [ cloud_watch_arn ]}]}, "Download_Image" : { "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:DescribeRepositories", "ecr:ListImages", "ecr:DescribeImages", "ecr:BatchGetImage"], "Resource": "*"}]}, "Secret-Manager" : { "Version" : "2012-10-17", "Statement": [{ "Effect" : "Allow", "Action" : [ "secretsmanager:GetSecretValue","secretsmanager:DescribeSecret" ], "Resource": [ "arn:aws:secretsmanager:eu-west-2:244560807427:secret:slack-gs-bot-*" , "arn:aws:secretsmanager:eu-west-2:244560807427:secret:elastic*" , "arn:aws:secretsmanager:eu-west-2:244560807427:secret:gsuite*" ]}]}} return policies def delete_project_role_and_policies(self): if self.code_build.iam.role_exists(): policies_arns = list(self.code_build.iam.role_policies().values()) self.code_build.iam.role_policies_detach(policies_arns) # first detach policies self.code_build.iam.policies_delete(policies_arns) # then delete the policies self.code_build.iam.role_delete() # then delete the role if self.code_build.project_exists(): self.code_build.project_delete() # finally delete the project
def setUp(self): self.code_build = CodeBuild(project_name=project_name,role_name=project_name) self.iam = IAM(role_name=project_name)
class test_CodeBuild(TestCase): @classmethod def setUpClass(cls): code_build = CodeBuild(project_name=project_name, role_name=project_name) iam = IAM(role_name=project_name) if delete_on_setup: code_build.project_delete() iam.role_delete() if code_build.project_exists() is False: assert code_build.project_exists() is False iam.role_create(assume_policy) # create role assert iam.role_info().get('Arn') == service_role # confirm the role exists sleep(1) code_build.project_create(project_repo, service_role) # in a non-deterministic way, this sometimes throws the error: CodeBuild is not authorized to perform: sts:AssumeRole @classmethod def tearDownClass(cls): code_build = CodeBuild(project_name=project_name,role_name=project_name) iam = IAM(role_name=project_name) assert code_build.project_exists() is True assert iam.role_exists() is True if delete_on_teardown: code_build.project_delete() iam .role_delete() assert code_build.project_exists() is False assert iam.role_exists() is False def setUp(self): self.code_build = CodeBuild(project_name=project_name,role_name=project_name) self.iam = IAM(role_name=project_name) def test_all_builds_ids(self): # LONG running test ids = list(self.code_build.all_builds_ids()) Assert(ids).size_is(100) ids = list(self.code_build.all_builds_ids(use_paginator=True)) Assert(ids).is_bigger_than(1000) #@unittest.skip("Reduce the number of policies created and improve tests") def test_create_policies(self): policies = { "Download_Image" : { "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:DescribeRepositories", "ecr:ListImages", "ecr:DescribeImages", "ecr:BatchGetImage"], "Resource": "*"}]}, "CodeBuildBasePolicy" : {"Version" : "2012-10-17", "Statement": [ { "Effect" : "Allow", "Resource" : [ "arn:aws:logs:eu-west-2:244560807427:log-group:/aws/codebuild/GSBot_code_build", "arn:aws:logs:eu-west-2:244560807427:log-group:/aws/codebuild/GSBot_code_build:*"], "Action" : [ "logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents" ]}, { "Effect" : "Allow", "Resource" : [ "arn:aws:s3:::codepipeline-eu-west-2-*"], "Action" : [ "s3:PutObject", "s3:GetObject", "s3:GetObjectVersion", "s3:GetBucketAcl", "s3:GetBucketLocation"]}]}, "Cloud_Watch_Policy": { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": [ "logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents" ], "Resource": ["*"]}, { "Effect": "Allow", "Action": ["codecommit:GitPull"], "Resource": ["*"] }, { "Effect": "Allow", "Action": [ "s3:GetObject", "s3:GetObjectVersion"], "Resource": ["*"]}, { "Sid": "S3PutObjectPolicy", "Effect": "Allow", "Action": [ "s3:PutObject" ], "Resource": [ "*" ] } ] } , "Access_Secret_Manager": { "Version": "2012-10-17", "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": [ "secretsmanager:GetResourcePolicy", "secretsmanager:GetSecretValue", "secretsmanager:DescribeSecret", "secretsmanager:ListSecretVersionIds" ], "Resource": "arn:aws:secretsmanager:*:*:secret:*" } ] }, "Invoke_Lambda_Functions": { "Version": "2012-10-17", "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": "lambda:InvokeFunction", "Resource": "arn:aws:lambda:*:*:function:*" } ] }, "Create_Update_Lambda_Functions": { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": ["lambda:ListFunctions","lambda:GetFunction","lambda:CreateFunction","lambda:UpdateFunctionCode"], "Resource": "arn:aws:lambda:*:*:function:*" } ]}, "ECS_Management" : { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": ["ecs:ListClusters", "ecs:ListTaskDefinitions"], "Resource": "*" } ]}, "Pass_Role": { "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [ "iam:GetRole", "iam:PassRole" ], "Resource": "arn:aws:iam::244560807427:role/lambda_with_s3_access" }] } } policies_arns = list(self.code_build.iam.role_policies().values()) policies_names = list(self.code_build.iam.role_policies().keys()) self.code_build.iam.role_policies_detach(policies_arns) for policies_name in policies_names: self.code_build.iam.policy_delete(policies_name) self.code_build.policies_create(policies) def test_build_start(self): build_id = self.code_build.build_start() build_info = self.code_build.build_wait_for_completion(build_id,1, 60) build_phases = build_info.get('phases') phase = build_phases.pop(-2) Dev.pprint(phase.get('phaseType'),phase.get('phaseStatus'),phase.get('contexts')[0].get('message') ) def test_project_builds(self): ids = list(self.code_build.project_builds_ids('GSBot_code_build')) assert len(self.code_build.project_builds(ids[0:2])) == 3 def test_project_builds_ids(self): # LONG running test assert len(list(self.code_build.project_builds_ids('GSBot_code_build' ))) > 20 assert len(list(self.code_build.project_builds_ids('pbx-group-security-site' ))) == 100 assert len(list(self.code_build.project_builds_ids('GSBot_code_build' , True))) > 20 assert len(list(self.code_build.project_builds_ids('pbx-group-security-site', True))) > 1200 def test_project_info(self): project = self.code_build.project_info() ( Assert(project).field_is_equal('arn' , project_arn ) .field_is_equal('name' , project_name) .field_is_equal('serviceRole', service_role) ) def test_projects(self): result = self.code_build.projects() Assert(result).is_bigger_than(2).is_smaller_than(100)
class CodeBuild_Jupyter_Helper: def __init__(self): self.project_name = 'OSBot-Jupyter' self.code_build = CodeBuild(project_name=self.project_name, role_name=None) self.max_builds = 10 self.build_timeout = 240 self.server_sizes = { 'small': 'BUILD_GENERAL1_SMALL', 'medium': 'BUILD_GENERAL1_MEDIUM', 'large': 'BUILD_GENERAL1_LARGE' } def get_active_build_id(self, project_name=None): builds = self.get_active_builds(project_name=project_name, stop_when_match=True) return Misc.array_pop(list(set(builds))) def get_active_builds(self, project_name=None, stop_when_match=False): build_ids = list(self.code_build.project_builds_ids( self.project_name))[0:self.max_builds] build_infos = self.code_build.codebuild.batch_get_builds( ids=build_ids).get('builds') builds = {} for build_info in build_infos: build_id = build_info.get('id') if build_info.get('currentPhase') != 'COMPLETED': if project_name is None or project_name == build_info.get( 'projectName'): builds[build_id] = CodeBuild_Jupyter(build_id=build_id, build_info=build_info) if stop_when_match: return builds return builds def get_active_server_details(self): build_id = self.get_active_build_id() if build_id is None: return None #self.start_build_and_wait_for_jupyter_load() #build_id = self.get_active_build_id() code_build = CodeBuild_Jupyter(build_id) return code_build.get_server_details_from_logs() def start_build(self): build_arn = self.code_build.build_start() build_id = build_arn.split('build/').pop() return CodeBuild_Jupyter(build_id=build_id) def start_build_and_wait_for_jupyter_load(self, max_seconds=60): build = self.start_build() return self.wait_for_jupyter_load(build, max_seconds) def start_build_for_repo(self, repo_name, user='******', server_size='small'): aws_secret = "git__{0}".format(repo_name) data = Secrets(aws_secret).value_from_json_string() if not data: return None repo_url = data['repo_url'] kvargs = { 'projectName': self.project_name, 'timeoutInMinutesOverride': self.build_timeout, 'sourceLocationOverride': repo_url, 'computeTypeOverride': self.server_sizes[server_size], 'environmentVariablesOverride': [{ 'name': 'repo_name', 'value': repo_name, 'type': 'PLAINTEXT' }, { 'name': 'user', 'value': user, 'type': 'PLAINTEXT' }] } build_id = self.code_build.codebuild.start_build( **kvargs).get('build').get('arn') return {'status': 'ok', 'data': build_id} def start_build_for_repo_and_wait_for_jupyter_load(self, repo_name, user='******', server_size='small'): result = self.start_build_for_repo(repo_name=repo_name, user=user, server_size=server_size) if result: build_id = result.get('data') build = CodeBuild_Jupyter(build_id=build_id) return self.wait_for_jupyter_load(build) def stop_all_active(self): available_builds = self.get_active_builds() stopped = [] for build_id in available_builds.keys(): self.code_build.codebuild.stop_build(id=build_id).get('build') stopped.append(build_id) return stopped def save_active_server_details(self, file): build_id = self.get_active_build_id() server, token = CodeBuild_Jupyter( build_id).get_server_details_from_logs() config = {'build_id': build_id, 'server': server, 'token': token} Json.save_file(file, config) return config def wait_for_jupyter_load( self, build, max_seconds=150 ): # make it 2.5 minutes since sometimes it takes 90 secs for aws to fetch (was 90 seconds originally) seconds_sleep = 5 for i in range(0, max_seconds, seconds_sleep): sleep(seconds_sleep) (ngrok_url, jupyter_token) = build.get_server_details_from_logs() if ngrok_url is not None: return "{0}?token={1}".format(ngrok_url, jupyter_token)