def setUp(self): self.s3 = S3() assert self.s3.__class__.__name__ == 'S3' self.temp_file_name = "aaa.txt" self.temp_file_contents = "some contents" self.test_bucket = "gs-lambda-tests" self.test_folder = "unit_tests"
def setUpClass(cls) -> None: #STS().check_current_session_credentials() cls.s3 = S3() cls.aws_config = AWS_Config() cls.account_id = cls.aws_config.aws_session_account_id() cls.s3_bucket = cls.aws_config.lambda_s3_bucket() cls.region = cls.aws_config.aws_session_region_name()
def run(event, context): channel = event.get('channel') png_data = event.get('png_data') s3_bucket = event.get('s3_bucket') s3_key = event.get('s3_key') title = event.get('title') team_id = event.get('team_id') #aws_secrets_id = event.get('aws_secrets_id') #if team_id == 'T7F3AUXGV': aws_secrets_id = 'slack-gs-bot' # hard coded values #if team_id == 'T0SDK1RA8': aws_secrets_id = 'slack-gsbot-for-pbx' # need to move to special function aws_secrets_id = 'slack-bot-oauth' bot_token = Secrets(aws_secrets_id).value() if png_data: #(fd, tmp_file) = tempfile.mkstemp('png') tmp_file = Files.temp_file('.png') with open(tmp_file, "wb") as fh: fh.write(base64.decodebytes(png_data.encode())) else: if s3_bucket and s3_key: tmp_file = S3().file_download_and_delete(s3_bucket, s3_key) else: return None return send_file_to_slack(tmp_file, title, bot_token, channel)
def load_dependency(target): if os.getenv('AWS_REGION') is None: return from osbot_aws.apis.S3 import S3 import shutil import sys s3 = S3() s3_bucket = 'gw-bot-lambdas' s3_key = 'lambdas-dependencies/{0}.zip'.format(target) tmp_dir = Files.path_combine('/tmp/lambdas-dependencies', target) #return s3.file_exists(s3_bucket,s3_key) if s3.file_exists(s3_bucket, s3_key) is False: raise Exception( "In Lambda load_dependency, could not find dependency for: {0}". format(target)) if Files.not_exists( tmp_dir ): # if the tmp folder doesn't exist it means that we are loading this for the first time (on a new Lambda execution environment) zip_file = s3.file_download( s3_bucket, s3_key, False) # download zip file with dependencies shutil.unpack_archive(zip_file, extract_dir=tmp_dir) # unpack them sys.path.append( tmp_dir ) # add tmp_dir to the path that python uses to check for dependencies return Files.exists(tmp_dir)
def setUp(self): super().setUp() self.s3 = S3() self.temp_file_name = "aaa.txt" self.temp_file_contents = "some contents" self.test_bucket = "gs-lambda-tests" self.test_folder = "unit_tests"
def check_access_key_for_user(self): (aws_access_key_id, aws_secret_access_key) = self.get_access_key_for_user(new_key=False) #return (aws_access_key, aws_secret) s3 = S3() return s3.buckets()
def upload_dependency(target): s3 = S3() s3_bucket = AWS_Config().lambda_s3_bucket() s3_file = 'lambdas-dependencies/{0}.zip'.format(target) path_libs = Files.path_combine('../../../_lambda_dependencies/', target) if Files.not_exists(path_libs): raise Exception(f"In Lambda upload_dependency, could not find dependency for: {target} , which resolved to {path_libs}") s3.folder_upload(path_libs, s3_bucket, s3_file) return s3.file_exists(s3_bucket, s3_file)
def __init__(self): self.bot_name = 'oss_bot' self.profile_name = 'gs-detect-aws' # 654386450934 self.region_name = 'eu-west-2' self.account_id = '654386450934' self.role_lambdas = "arn:aws:iam::{0}:role/service-role/osbot-lambdas".format( self.account_id) self.s3_bucket_lambdas = '{0}-lambdas'.format(self.bot_name).replace( '_', '-') self.s3 = S3()
def get_graph_data(self, graph_name): params = {'params': ['raw_data', graph_name, 'details'], 'data': {}} data = Lambda('lambdas.gsbot.gsbot_graph').invoke(params) if type(data) is str: s3_key = data s3_bucket = 'gs-lambda-tests' tmp_file = S3().file_download_and_delete(s3_bucket, s3_key) data = Json.load_json_and_delete(tmp_file) return data return data
def get_graph_data(self, graph_name): params = {'params': ['raw_data', graph_name, 'details'], 'data': {}} data = Lambda('osbot_jira.lambdas.graph').invoke(params) if type(data) is str: s3_key = data s3_bucket = Globals.lambda_s3_bucket tmp_file = S3().file_download_and_delete(s3_bucket, s3_key) data = Json.load_file_and_delete(tmp_file) return data return data
def send_png_file_to_slack(self, team_id, channel, target, png_file): if team_id and channel: s3_bucket = 'gs-lambda-tests' s3_key = S3().file_upload_as_temp_file(png_file, s3_bucket) png_to_slack = Lambda('utils.png_to_slack') payload = {'s3_bucket': s3_bucket, 's3_key': s3_key, 'team_id': team_id, 'channel': channel, 'title': target } png_to_slack.invoke_async(payload) return None, None else: return base64.b64encode(open(png_file, 'rb').read()).decode()
def __init__(self, file_name): self.file_name = file_name self.s3 = S3() self.folder = Files.temp_folder('tmp_lambda_') self.lambda_code = "def run(event, context): return 'hello {0}'.format(event.get('name'))" self.tmp_file = None self.s3_bucket = Globals.lambda_s3_bucket self.lambda_name = 'tmp_lambda_dev_test' self.s3_key = 'unit_tests/lambdas/{0}.zip'.format(self.lambda_name) self.create_temp_file()
def upload_dependency(target): s3 = S3() s3_bucket = 'gw-bot-lambdas' s3_file = 'lambdas-dependencies/{0}.zip'.format(target) path_libs = Files.path_combine('../../../_lambda_dependencies/', target) if Files.not_exists(path_libs): raise Exception( "In Lambda upload_dependency, could not find dependency for: {0}". format(target)) s3.folder_upload(path_libs, s3_bucket, s3_file) return s3.file_exists(s3_bucket, s3_file)
def __init__(self,region='eu-west-2'): self.profile_name = 'gw-customer-a' self.profile_name = '832789828058_AdministratorAccess' self.account_id = 'cloudsdkcustomera-glasswall' self.lambda_s3_bucket = f'{self.profile_name}-osbot-lambdas' self.region = region self.osbot_setup = OSBot_Setup(profile_name = self.profile_name , account_id = self.account_id , region_name = self.region , lambda_s3_bucket = self.lambda_s3_bucket) self.s3 = S3()
def setUp(self): self.aws_config = AWS_Config() self.lambda_ = Lambda() self.s3 = S3() self.sts = STS() self.expected_account_id = '785217600689' self.expected_region = 'eu-west-1' self.expected_s3_prefix = 'lambdas' self.expected_role_name = None self.expected_s3_bucket = f'{self.expected_account_id}-osbot-{self.expected_s3_prefix}' self.expected_module = 'osbot_aws.lambdas.dev.hello_world' self.function_name = 'osbot_aws_lambdas_dev_hello_world' self.lambda_handler = run
def raw_data(team_id=None, channel=None, params=None, data=None): data = None text = None attachments = [] if len(params) < 1: text = ':red_circle: Hi, for the `data` command, you need to provide a graph name' else: graph_name = params.pop(0) graph = Lambda_Graph().get_gs_graph___by_name(graph_name) if graph: data = { 'graph_name': graph_name, 'nodes': graph.nodes, 'edges': graph.edges } if len(params) == 1 and params.pop(0) == 'details': data['nodes'] = graph.get_nodes_issues() s3_bucket = 'gw-bot-lambdas' import tempfile with tempfile.NamedTemporaryFile(suffix='.json') as temp: temp.write(str.encode(json.dumps(data))) temp.flush() data = S3().file_upload_as_temp_file( temp.name, s3_bucket) attachments = [{ "color": "good", "text": "{0}".format(pprint.pformat(data)) }] else: from osbot_jira.api.API_Issues import API_Issues # if a graph wasn't found try to get the issue with that name issue = API_Issues().issue(graph_name) if issue: data = { 'nodes': { graph_name: issue }, # return as the first node 'edges': [] # with no edges } else: text = ':red_circle: Graph or issue with name `{0}` not found! Use the command `graph last` to see a list of the latest graphs generated'.format( graph_name) slack_message(text, attachments, channel, team_id) return data
def _test_s3_bucket_to_sqs(self): s3 = S3() queue = Queue('unit_tests_temp_queue') #.create() bucket_name = 'bucket-42-temp' region = 'eu-west-2' lambda_obj = Lambda_Package( 'osbot_aws.lambdas.pocs.send_event_data_to_queue' ).update_with_root_folder() lambda_arn = lambda_obj._lambda.function_Arn() bucket_arn = s3.bucket_arn(bucket_name) queue_arn = queue.arn() #lambda_obj.invoke({'a':43}) #s3.bucket_create(s3_bucket, region) def add_notification(source_arn): result = lambda_obj._lambda.client().permission_add( FunctionName=lambda_arn, StatementId='1', Action='lambda:InvokeFunction', Principal='s3.amazonaws.com', SourceArn=source_arn, SourceAccount=IAM().account_id()) Dev.pprint(result) #Dev.pprint(result == { 'Statement': '{"Sid":"1","Effect":"Allow","Principal":{"Service":"s3.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:eu-west-2:244560807427:function:osbot_aws_lambdas_pocs_send_event_data_to_queue","Condition":{"StringEquals":{"AWS:SourceAccount":"244560807427"},"ArnLike":{"AWS:SourceArn":"arn:aws:sqs:eu-west-2:244560807427:unit_tests_temp_queue"}}}'}) #add_notification(bucket_arn) #return # resource = s3.s3_bucket_notification(bucket_name) # config = { 'LambdaFunctionConfigurations': [{ 'LambdaFunctionArn': lambda_arn , # 'Events' : ['s3:ObjectCreated:*']}]} # Dev.pprint(config) # Dev.pprint(resource.put(NotificationConfiguration=config)) #Dev.pprint(resource.lambda_function_configurations) #Dev.pprint(s3.s3().get_bucket_notification_configuration(Bucket=bucket_name)) #bucket_notification = s3.BucketNotification('bucket_name') #Dev.pprint(bucket_notification) #s3.boto_client_s3().bucket_notification() Dev.pprint(queue.pull()) Dev.pprint(queue.pull())
def __init__(self, bot_name= None, profile_name = None, account_id=None, region_name=None, lambda_s3_bucket=None, lambda_role_name=None): aws_config = AWS_Config() if bot_name : aws_config.set_bot_name (bot_name) if profile_name : aws_config.set_aws_session_profile_name(profile_name ) if account_id : aws_config.set_aws_session_account_id (account_id ) if region_name : aws_config.set_aws_session_region_name (region_name ) if lambda_s3_bucket : aws_config.set_lambda_s3_bucket (lambda_s3_bucket) if lambda_role_name : aws_config.set_lambda_role_name (lambda_role_name) self.bot_name = aws_config.bot_name() self.profile_name = aws_config.aws_session_profile_name() self.region_name = aws_config.aws_session_region_name() self.account_id = aws_config.aws_session_account_id() self.s3_bucket_lambdas = aws_config.lambda_s3_bucket() self.lambda_role_name = aws_config.lambda_role_name() self.lambda_role_arn = f"arn:aws:iam::{self.account_id}:role/{self.lambda_role_name}" self.s3 = S3()
def load_dependency(target): if os.getenv('AWS_REGION') is None: return from osbot_aws.apis.S3 import S3 import shutil import sys s3 = S3() s3_bucket = AWS_Config().lambda_s3_bucket() s3_key = 'lambdas-dependencies/{0}.zip'.format(target) tmp_dir = Files.path_combine('/tmp/lambdas-dependencies', target) #return s3.file_exists(s3_bucket,s3_key) if s3.file_exists(s3_bucket,s3_key) is False: raise Exception("In Lambda load_dependency, could not find dependency for: {0}".format(target)) if file_not_exists(tmp_dir): # download dependency zip_file = s3.file_download(s3_bucket, s3_key,False) # download zip file with dependencies shutil.unpack_archive(zip_file, extract_dir = tmp_dir) # unpack them if tmp_dir not in sys.path: # if not currently in the path sys.path.append(tmp_dir) # add tmp_dir to the path that python uses to check for dependencies return Files.exists(tmp_dir)
def __init__(self): self.cloudtrail = Session().client('cloudtrail') self.s3 = S3()
def test_setup_aws_environment(self): self.aws_setup.setup_aws_environment() assert 'gw-customer-a-osbot-lambdas' in S3().buckets()
def s3(self): return S3()
def test_second(self): s3 = S3() print(s3.buckets())
def s3(self): if self._s3 is None: self._s3 = S3() return self._s3