def test_CreateDataBucket(self): s3client = AwsHelper.boto3_client('s3', region_name='ap-southeast-2') sts = AwsHelper.boto3_client('sts') id = str(sts.get_caller_identity()['Account']) os.environ['SHELVERY_MONO_THREAD'] = '1' share_with_id = destination_account os.environ["shelvery_share_aws_account_ids"] = str(share_with_id) engine = ShelveryEngine() print("Creating Data Buckets") engine.create_data_buckets() bucket_name = f"shelvery.data.{id}-ap-southeast-2.base2tools" response = s3client.get_bucket_policy(Bucket=bucket_name)['Policy'] policy = json.loads(response)['Statement'] print("Policy: " + str(policy)) valid = False #Add other checks on policy? for statement in policy: if statement['Effect'] == "Allow" and str( share_with_id) in statement['Principal']['AWS']: valid = True self.assertTrue(valid)
def test_CreateSharingInfo(self): ebs_backups_engine = ShelveryEBSBackup() try: os.environ["shelvery_share_aws_account_ids"] = str(self.share_with_id) backups = ebs_backups_engine.create_backups() except Exception as e: print(e) print(f"Failed with {e}") traceback.print_exc(file=sys.stdout) raise e finally: del os.environ["shelvery_share_aws_account_ids"] valid = False for backup in backups: if backup.entity_id == self.volume['VolumeId']: account_id = ebs_backups_engine.account_id s3path = f"{S3_DATA_PREFIX}/shared/{self.share_with_id}/{ebs_backups_engine.get_engine_type()}/{backup.name}.yaml" s3bucket = ShelveryEngine.get_local_bucket_name() bucket = boto3.resource('s3').Bucket(s3bucket) object = bucket.Object(s3path) content = object.get()['Body'].read() restored_br = yaml.load(content) engine_backup = ebs_backups_engine.get_backup_resource(backup.region, backup.backup_id) self.assertEquals(restored_br.backup_id, engine_backup.backup_id) self.assertEquals(restored_br.name, engine_backup.name) self.assertEquals(restored_br.region, engine_backup.region) for tag in ['name','date_created','entity_id','region','retention_type']: self.assertEquals( restored_br.tags[f"{RuntimeConfig.get_tag_prefix()}:{tag}"], engine_backup.tags[f"{RuntimeConfig.get_tag_prefix()}:{tag}"] ) valid = True self.assertTrue(valid)
def test_CreateBackupData(self): ebs_backups_engine = ShelveryEBSBackup() try: backups = ebs_backups_engine.create_backups() except Exception as e: print(e) print(f"Failed with {e}") traceback.print_exc(file=sys.stdout) raise e ec2client = boto3.client('ec2') valid = False # validate there is for backup in backups: if backup.entity_id == self.volume['VolumeId']: snapshot_id = backup.backup_id self.created_snapshots.append(snapshot_id) # wait for snapshot to become available ebs_backups_engine.wait_backup_available(backup.region, backup.backup_id, None, None) # allow buffer period for engine to write data to s3 time.sleep(20) # this is the backup that gets stored in s3 engine_backup = ebs_backups_engine.get_backup_resource(backup.region, backup.backup_id) # verify the s3 data account_id = ebs_backups_engine.account_id s3path = f"{S3_DATA_PREFIX}/{ebs_backups_engine.get_engine_type()}/{engine_backup.name}.yaml" s3bucket = ShelveryEngine.get_local_bucket_name() print(f"Usingbucket {s3bucket}") print(f"Using path {s3path}") bucket = boto3.resource('s3').Bucket(s3bucket) object = bucket.Object(s3path) content = object.get()['Body'].read() restored_br = yaml.load(content) self.assertEquals(restored_br.backup_id, engine_backup.backup_id) self.assertEquals(restored_br.name, engine_backup.name) self.assertEquals(restored_br.region, engine_backup.region) print(f"Tags restored: \n{yaml.dump(restored_br.tags)}\n") print(f"Tags backup: \n{yaml.dump(engine_backup.tags)}\n") self.assertEquals(restored_br.tags['Name'], engine_backup.tags['Name']) for tag in ['name','date_created','entity_id','region','retention_type']: self.assertEquals( restored_br.tags[f"{RuntimeConfig.get_tag_prefix()}:{tag}"], engine_backup.tags[f"{RuntimeConfig.get_tag_prefix()}:{tag}"] ) valid = True self.assertTrue(valid)
def test_CleanBackupData(self): ebs_backups_engine = ShelveryEBSBackup() try: backups = ebs_backups_engine.create_backups() except Exception as e: print(e) print(f"Failed with {e}") traceback.print_exc(file=sys.stdout) raise e ec2client = boto3.client('ec2') valid = False # validate there is for backup in backups: if backup.entity_id == self.volume['VolumeId']: snapshot_id = backup.backup_id snapshots = ec2client.describe_snapshots(SnapshotIds=[snapshot_id])['Snapshots'] self.assertEqual(len(snapshots), 1) ec2client.create_tags( Resources=[snapshot_id], Tags=[{'Key': f"{RuntimeConfig.get_tag_prefix()}:date_created", 'Value': datetime(2000, 1, 1).strftime(BackupResource.TIMESTAMP_FORMAT) }] ) ebs_backups_engine.clean_backups() with self.assertRaises(botocore.exceptions.ClientError) as context: ec2client.describe_snapshots(SnapshotIds=[snapshot_id])['Snapshots'] self.assertTrue('does not exist' in context.exception.response['Error']['Message']) self.assertEqual('InvalidSnapshot.NotFound', context.exception.response['Error']['Code']) account_id = ebs_backups_engine.account_id s3path = f"{S3_DATA_PREFIX}/{ebs_backups_engine.get_engine_type()}/removed/{backup.name}.yaml" s3bucket = ShelveryEngine.get_local_bucket_name() bucket = boto3.resource('s3').Bucket(s3bucket) object = bucket.Object(s3path) content = object.get()['Body'].read() restored_br = yaml.load(content) self.assertEquals(restored_br.backup_id, backup.backup_id) self.assertEquals(restored_br.name, backup.name) self.assertEquals(restored_br.region, backup.region) self.assertIsNotNone(restored_br.date_deleted) self.assertEquals(restored_br.date_created, datetime(2000, 1, 1)) valid = True self.assertTrue(valid)
def __init__(self): ShelveryEngine.__init__(self) # default region will be picked up in AwsHelper.boto3_client call self.region = boto3.session.Session().region_name
def __init__(self): ShelveryEngine.__init__(self) self.redshift_client = AwsHelper.boto3_client( 'redshift', arn=self.role_arn, external_id=self.role_external_id) # default region will be picked up in AwsHelper.boto3_client call self.region = boto3.session.Session().region_name
def __init__(self): ShelveryEngine.__init__(self) self.ec2client = boto3.client('ec2') # default region will be picked up in boto3.client call self.region = boto3.session.Session().region_name