def _connect_to_region(self, **kwargs): if self._isRegionInfo: return RDSConnection(aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key, **kwargs) for region in self.all_region(self.name): if region.name == self.region: self.region = region return RDSConnection(aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key, region=self.region, **kwargs)
def test_db_subnet_group(self): vpc_api = VPCConnection() rds_api = RDSConnection() vpc = vpc_api.create_vpc('10.0.0.0/16') az_list = vpc_api.get_all_zones(filters={'state': 'available'}) subnet = list() n = 0 for az in az_list: try: subnet.append( vpc_api.create_subnet(vpc.id, '10.0.' + str(n) + '.0/24', availability_zone=az.name)) n = n + 1 except: pass grp_name = 'db_subnet_group' + str(int(time.time())) subnet_group = rds_api.create_db_subnet_group( grp_name, grp_name, [subnet[0].id, subnet[1].id]) if not _is_ok(subnet_group, vpc.id, grp_name, [subnet[0].id, subnet[1].id]): raise Exception("create_db_subnet_group returned bad values") rds_api.modify_db_subnet_group(grp_name, description='new description') subnet_grps = rds_api.get_all_db_subnet_groups(name=grp_name) if not _is_ok(subnet_grps[0], vpc.id, 'new description', [subnet[0].id, subnet[1].id]): raise Exception( "modifying the subnet group desciption returned bad values") rds_api.modify_db_subnet_group(grp_name, subnet_ids=[subnet[1].id, subnet[2].id]) subnet_grps = rds_api.get_all_db_subnet_groups(name=grp_name) if not _is_ok(subnet_grps[0], vpc.id, 'new description', [subnet[1].id, subnet[2].id]): raise Exception( "modifying the subnet group subnets returned bad values") rds_api.delete_db_subnet_group(subnet_group.name) try: rds_api.get_all_db_subnet_groups(name=grp_name) raise Exception(subnet_group.name + " still accessible after delete_db_subnet_group") except: pass while n > 0: n = n - 1 vpc_api.delete_subnet(subnet[n].id) vpc_api.delete_vpc(vpc.id)
def connect_rds(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.rds.RDSConnection` :return: A connection to RDS """ from boto.rds import RDSConnection return RDSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def snapshot_rds(): """ dumb script that cleans up all the duplicate ebs snapshots our two cron servers create while backing up redis """ (key, secret) = aws conn = RDSConnection(key, secret) for db in conn.get_all_dbinstances(): print "backing up rds", db.id, "..." now = datetime.datetime.now() conn.create_dbsnapshot( "snapshot-backup-{0}".format(now.strftime("%Y-%m-%d")), db.id)
@author: Puneeth U Bharadwaj ''' import time from boto.s3.key import Key from boto.s3.connection import S3Connection from boto.rds import RDSConnection # AWS ACCESS DETAILS AWSAccessKeyId = AWSAccessKeyId AWSSecretKey = AWSSecretKey DefaultRegionName = DefaultRegionName s3_conn = S3Connection(AWSAccessKeyId, AWSSecretKey) rds_conn = RDSConnection(AWSAccessKeyId, AWSSecretKey) def s3_stuff(): # Create a new bucket. Buckets must have a globally unique name (not just # unique to your account). bucket = s3_conn.create_bucket('your-bucket-name') k = Key(bucket) k.key = 'all_month.csv' start = time.time() k.set_contents_from_filename('all_month.csv') k.make_public() # k.get_contents_to_filename('testdl.txt') end = time.time()
def setUp(self): self.conn = RDSConnection() self.masterDB_name = "boto-db-%s" % str(int(time.time())) self.replicaDB_name = "replica-%s" % self.masterDB_name self.renamedDB_name = "renamed-replica-%s" % self.masterDB_name