def lambda_handler(event, context): aws_access_key_id = common.get_access_key(event) aws_secret_access_key = common.get_secret_access_key(event) aws_session_token = common.get_session_token(event) regions = common.get_regions(event) for region in regions: regionName = region['RegionName'] ec2client = boto3.client('ec2', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) reservations = ec2client.describe_instances()['Reservations'] for reservation in reservations: instances = reservation['Instances'] for instance in instances: instanceId = instance['InstanceId'] state = instance['State']['Name'] if (state == "running") or (state == "stopped"): print('Terminating instance ' + instanceId) ec2client.terminate_instances(InstanceIds=[instanceId]) return event
def lambda_handler(event, context): aws_access_key_id = common.get_access_key(event) aws_secret_access_key = common.get_secret_access_key(event) aws_session_token = common.get_session_token(event) regions = common.get_regions(event) for region in regions: regionName = region['RegionName'] rds = boto3.client('rds', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) clusters = rds.describe_db_clusters()['DBClusters'] for cluster in clusters: clusterId = cluster['DBClusterIdentifier'] rds.delete_db_cluster(DBClusterIdentifier=clusterId, SkipFinalSnapshot=True) subnets = rds.describe_db_subnet_groups()["DBSubnetGroups"] for subnet in subnets: subnet_group_name = subnet["DBSubnetGroupName"] rds.delete_db_subnet_group(DBSubnetGroupName=subnet_group_name) return event
def lambda_handler(event, context): aws_access_key_id = common.get_access_key(event) aws_secret_access_key = common.get_secret_access_key(event) aws_session_token = common.get_session_token(event) regions = common.get_regions(event) for region in regions: regionName = region['RegionName'] try: sagemaker = boto3.client( 'sagemaker', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) endpoints = sagemaker.list_endpoints() for endpoint in endpoints['Endpoints']: endpointName = endpoint['EndpointName'] sagemaker.delete_endpoint(EndpointName=endpointName) instances = sagemaker.list_notebook_instances() for instance in instances['NotebookInstances']: instanceName = instance['NotebookInstanceName'] sagemaker.delete_notebook_instance( NotebookInstanceName=instanceName) except Exception as e: print(e) return event
def lambda_handler(event, context): aws_access_key_id = common.get_access_key(event) aws_secret_access_key = common.get_secret_access_key(event) aws_session_token = common.get_session_token(event) sts = boto3.client('sts', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token) accountId = sts.get_caller_identity()["Account"] regions = common.get_regions(event) for region in regions: regionName = region['RegionName'] ec2client = boto3.client('ec2', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) volumes = ec2client.describe_volumes() for volume in volumes['Volumes']: print('Deleting EBS (' + volume['VolumeId'] + ')') ec2client.delete_volume(VolumeId=volume['VolumeId'], DryRun=False) snapshots = ec2client.describe_snapshots(OwnerIds=[accountId]) for snapshot in snapshots['Snapshots']: print('Deleting Snapshot (' + snapshot['SnapshotId'] + ')') ec2client.delete_snapshot(SnapshotId=snapshot['SnapshotId'], DryRun=False) return event
def get_data(): ds = get_merged_data(['U', 'V', 'SLI', 'QT'], RUNS) # compute vorticity ds['VORT'] = vorticity(ds.U, ds.V) regions = common.get_regions(ds.y) rms = ds.groupby(regions).apply(global_mass_weighted_rms) return rms
def lambda_handler(event, context): aws_access_key_id = common.get_access_key(event) aws_secret_access_key = common.get_secret_access_key(event) aws_session_token = common.get_session_token(event) sts = boto3.client('sts', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token) accountId = sts.get_caller_identity()["Account"] regions = common.get_regions(event) for region in regions: regionName = region['RegionName'] try: sagemaker = boto3.client( 'sagemaker', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) print(regionName) instances = sagemaker.list_notebook_instances() for instance in instances['NotebookInstances']: instanceName = instance['NotebookInstanceName'] sagemaker.stop_notebook_instance( NotebookInstanceName=instanceName) print("Stopping (SageMaker) :" + instanceName) except Exception as e: print(e) return event
def lambda_handler(event, context): aws_access_key_id = common.get_access_key(event) aws_secret_access_key = common.get_secret_access_key(event) aws_session_token = common.get_session_token(event) regions = common.get_regions(event) for region in regions: try: regionName = region['RegionName'] client = boto3.client('athena', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) s3_bucket = str(uuid.uuid4()) response = client.start_query_execution( QueryString='show databases;', ResultConfiguration={ 'OutputLocation': 's3://' + s3_bucket, 'EncryptionConfiguration': { 'EncryptionOption': 'SSE_S3', 'KmsKey': 'string' } }) databases_names_file = response['QueryExecutionId'] + ".txt" time.sleep(2) s3 = boto3.resource('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=regionName, aws_session_token=aws_session_token) s3.meta.client.download_file(s3_bucket, databases_names_file, "/tmp/" + databases_names_file) # Reading a file with open("/tmp/" + databases_names_file, 'r') as myfile: databases_string = myfile.read() databases = databases_string.splitlines() for database in databases: response = client.start_query_execution( QueryString='show tables', QueryExecutionContext={'Database': database}, ResultConfiguration={ 'OutputLocation': 's3://' + s3_bucket, 'EncryptionConfiguration': { 'EncryptionOption': 'SSE_S3', 'KmsKey': 'string' } }) tables_names_file = response['QueryExecutionId'] + ".txt" time.sleep(2) s3.meta.client.download_file(s3_bucket, tables_names_file, "/tmp/" + tables_names_file) # Reading a file with open("/tmp/" + tables_names_file, 'r') as myfile: tables_file = myfile.read() tables = tables_file.splitlines() for table in tables: client.start_query_execution( QueryString='drop table ' + table, QueryExecutionContext={'Database': database}, ResultConfiguration={ 'OutputLocation': 's3://' + s3_bucket, 'EncryptionConfiguration': { 'EncryptionOption': 'SSE_S3', 'KmsKey': 'string' } }) print('Deleting table (' + table + ')') client.start_query_execution( QueryString='drop database ' + database, ResultConfiguration={ 'OutputLocation': 's3://' + s3_bucket, 'EncryptionConfiguration': { 'EncryptionOption': 'SSE_S3', 'KmsKey': 'string' } }) print('Deleting database (' + database + ')') except EndpointConnectionError: print("Endpoint Connection Error - OK") return event
from jacobian import * from src.data import open_data import common import pandas as pd # bootstrap sample size n = 20 hatch_threshold = 10 # compute jacobians training = open_data('training') training['region'] = common.get_regions(training.y) tropics = training.isel(y=slice(30,34)).load() tropics['time_of_day'] = tropics.time % 1 p = tropics.p[0].values model = common.get_model('NN-All') samples = list(bootstrap_samples(tropics, n)) jacobians = [get_jacobian(model, sample) for sample in samples] # make plot fig, axs = plt.subplots( 4, 5, figsize=(common.textwidth, common.textwidth-2), sharex=True, sharey=True) plt.rcParams['hatch.color'] = '0.5' axs[0,0].invert_yaxis() axs[0,0].invert_xaxis() norm = SymLogNorm(1, 2, vmin=-1e5, vmax=1e5) for ax, jac in zip(axs.flat, jacobians): qt_qt = jac['QT']['QT'].detach().numpy()