def cli(context, args): util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_SHARED_BUCKET] = context.config.configuration_bucket_name os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_SERVICE_ROLE] = resources[c.RES_SERVICE_ROLE] os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[c.ENV_STACK_ID] os.environ[c.ENV_EVENT_EMITTER] = resources[c.RES_EVENT_EMITTER] os.environ[c.IS_LOCALLY_RUN] = "True" os.environ[ "AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ[ "AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) main({c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}, type('obj', (object, ), {}))
def cli(context, args): util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.IS_LOCALLY_RUN] = "True" os.environ[ "AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ[ "AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) eval(args.function)( { 'RequestType': args.event_type, c.ENV_STACK_ID: resources[c.ENV_STACK_ID] }, type('obj', (object, ), {'function_name': resources[c.RES_LAMBDA_FIFOCONSUMER]}))
def cli(context, args): util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ['err'] = str( args.erroneous_metrics) if args.erroneous_metrics else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ["AWS_LAMBDA_FUNCTION_NAME"] = resources[ c.RES_LAMBDA_FIFOPRODUCER] os.environ[ "AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ[ "AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) os.environ[c.ENV_LAMBDA_PRODUCER] = resources[c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[c.ENV_STACK_ID] generate_threads(resources[c.RES_LAMBDA_FIFOPRODUCER], args.threads, args.iterations_per_thread, args.events_per_iteration, args.sleep_duration_between_jobs, args.use_lambda, args.event_type, args.sensitivity_type, args.compression_type)
def cli(context, args): util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[ c.RES_LAMBDA_FIFOCONSUMER] os.environ[c.ENV_LAMBDA_PRODUCER] = resources[c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_AMOEBA] = resources[c.RES_AMOEBA] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ[ "AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ[ "AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) print( eval(args.function)(type('obj', (object, ), {c.ENV_STACK_ID: resources[c.ENV_STACK_ID]})))
def cli(context, args): #this import is only available when you execute via cli import util util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_LAMBDA_CONSUMER] = resources[c.RES_LAMBDA_FIFOCONSUMER] os.environ[c.ENV_LAMBDA_PRODUCER] = resources[c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_AMOEBA] = resources[c.RES_AMOEBA] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[c.ENV_STACK_ID] os.environ["AWS_LAMBDA_FUNCTION_NAME"] = os.environ[c.ENV_LAMBDA_PRODUCER] os.environ[ "AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ[ "AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) print( query(type('obj', (object, ), {c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}), {"sql": args.sql}, sync=True))
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ["AWS_LAMBDA_FUNCTION_NAME"] = resources[c.RES_S3_STORAGE] main({c.ENV_STACK_ID:resources[c.ENV_STACK_ID]}, type('obj', (object,), {'function_name' : resources[c.RES_DB_TABLE_CONTEXT]}))
def cli(context, args): #this import is only available when you execute via cli import util resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_REGION] = context.config.project_region print eval(args.function)(type( 'obj', (object, ), {c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}))
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.IS_LOCALLY_RUN] = "True" os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[c.ENV_STACK_ID] launch({}, dict({'aws_request_id': 123}))
def debug_file(context, args): if args.file_path: debug_local_file(context,args) else: s3 = s3fs.S3FileSystem() resources = util.get_resources(context) bucket = resources[c.RES_S3_STORAGE] key = args.s3_key if key.index("/") == 0: key = key[1:] print read(s3,bucket,key)
def main(context, args): import util resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region handler({ 'RequestType': args.event_type, 'LogicalResourceId': "1234" }, type('obj', (object, ), {'function_name': resources[c.RES_LAMBDA_FIFOCONSUMER]}))
def cli(context, args): from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ["AWS_ACCESS_KEY"] = credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ["AWS_SECRET_KEY"] = credentials.get( args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION)
def debug_file(context, args): if args.file_path: debug_local_file(context, args) else: s3 = s3fs.S3FileSystem() resources = util.get_resources(context) bucket = resources[c.RES_S3_STORAGE] key = args.s3_key if not key.startswith('/'): key = "/{}".format(key) print(read(s3, bucket, key))
def cli(context, args): util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ["AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get(args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ["AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get(args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) print eval(args.function)({}, args.param )
def cli(context, args): #this import is only available when you execute via cli import util util.set_logger(args.verbose) from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_REGION] = context.config.project_region os.environ["AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get(args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ["AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get(args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) print eval(args.function)( type('obj', (object,), {c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}))
def cli(context, args): from resource_manager_common import constant credentials = context.aws.load_credentials() resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[c.ENV_STACK_ID] os.environ["AWS_LAMBDA_FUNCTION_NAME"] = resources[c.RES_S3_STORAGE] os.environ["AWS_ACCESS_KEY"] = args.aws_access_key if args.aws_access_key else credentials.get(args.profile if args.profile else context.config.user_default_profile, constant.ACCESS_KEY_OPTION) os.environ["AWS_SECRET_KEY"] = args.aws_secret_key if args.aws_secret_key else credentials.get(args.profile if args.profile else context.config.user_default_profile, constant.SECRET_KEY_OPTION) main({c.ENV_STACK_ID:resources[c.ENV_STACK_ID]}, type('obj', (object,), {'function_name' : resources[c.RES_DB_TABLE_CONTEXT]}))
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ['err'] = str( args.erroneous_metrics) if args.erroneous_metrics else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ["AWS_LAMBDA_FUNCTION_NAME"] = resources[ c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_LAMBDA_PRODUCER] = resources[c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[c.ENV_STACK_ID] generate_threads(resources[c.RES_LAMBDA_FIFOPRODUCER], args.threads, args.iterations_per_thread, args.events_per_iteration, args.sleep_duration_between_jobs, args.use_lambda, args.event_type, args.sensitivity_type)
def cli(context, args): #this import is only available when you execute via cli import util resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_LAMBDA_CONSUMER] = resources[c.RES_LAMBDA_FIFOCONSUMER] os.environ[c.ENV_LAMBDA_PRODUCER] = resources[c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_AMOEBA_1] = resources[c.RES_AMOEBA_1] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] os.environ["AWS_LAMBDA_FUNCTION_NAME"] = os.environ[c.ENV_LAMBDA_PRODUCER] print query( type('obj', (object, ), {c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}), {"sql": args.sql})
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] os.environ[c.ENV_DEPLOYMENT_STACK_ARN] = resources[ c.RES_LAMBDA_FIFOCONSUMER] os.environ[c.ENV_LAMBDA_PRODUCER] = resources[c.RES_LAMBDA_FIFOPRODUCER] os.environ[c.ENV_AMOEBA_1] = resources[c.RES_AMOEBA_1] os.environ[c.ENV_AMOEBA_2] = resources[c.RES_AMOEBA_2] os.environ[c.ENV_AMOEBA_3] = resources[c.RES_AMOEBA_3] os.environ[c.ENV_AMOEBA_4] = resources[c.RES_AMOEBA_4] os.environ[c.ENV_AMOEBA_5] = resources[c.RES_AMOEBA_5] os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_S3_STORAGE] = resources[c.RES_S3_STORAGE] print eval(args.function)(type( 'obj', (object, ), {c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}))
def write_geo_files(context, args): resources = util.get_resources(context) s3_bucket = resources[c.RES_S3_STORAGE] s3 = s3fs.S3FileSystem() s3_open = s3.open geo_data_files = [ "GeoLite2/GeoLite2_Blocks/IPv4/GeoLite2-Country-Blocks-IPv4.csv", "GeoLite2/GeoLite2_Blocks/IPv6/GeoLite2-Country-Blocks-IPv6.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-de.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-en.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-es.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-fr.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-ja.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-pt-BR.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-ru.csv", "GeoLite2/GeoLite2_Locations/GeoLite2-Country-Locations-zh-CN.csv", "GeoLite2/GeoLite2-COPYRIGHT.txt", "GeoLite2/GeoLite2-LICENSE.txt", "OpenStreetMap/level_2_polygons.json.gz" ] cwd = os.getcwd() bucket = "s3://{}".format(s3_bucket) for path in geo_data_files: parts = path.split(".") path_with_filename = parts[0] extension = parts[1] if len(parts) == 3: extension = parts[2] rel_path = "Gems/CloudGemMetric/v1/{}".format(path) if extension == 'gz': file_object = gzip.open(rel_path) extension = parts[1] else: file_object = open(rel_path, "r") content = file_object.read() write_file(content, bucket, path, s3, path_with_filename, extension)
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT] print eval(args.function)({}, args.param)
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_DB_TABLE_CONTEXT] = resources[c.RES_DB_TABLE_CONTEXT]
def cli(context, args): resources = util.get_resources(context) os.environ[c.ENV_REGION] = context.config.project_region os.environ[c.ENV_VERBOSE] = str(args.verbose) if args.verbose else "" os.environ[c.IS_LOCALLY_RUN] = "True" eval(args.function)({'RequestType': args.event_type, c.ENV_STACK_ID: resources[c.ENV_STACK_ID]}, type('obj', (object,), {'function_name' : resources[c.RES_LAMBDA_FIFOCONSUMER]}))