def _createExternalStore(self): import boto.s3 s3 = boto.s3.connect_to_region(self.awsRegion()) try: return s3.create_bucket(bucket_name='import-export-test-%s' % uuid.uuid4(), location=region_to_bucket_location(self.awsRegion())) except: with panic(log=logger): s3.close()
def _createExternalStore(self): import boto.s3 s3 = boto.s3.connect_to_region(self.testRegion) try: return s3.create_bucket(bucket_name='import-export-test-%s' % uuid.uuid4(), location=region_to_bucket_location(self.testRegion)) except: with panic(log=logger): s3.close()
def test_key_pair(self, aws_access_key_id, aws_secret_access_key): try: s3 = boto.connect_s3(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) s3.close() return True except Exception, e: print "AWS Access Key ID and Access Key are incorrect!" s3.close() return False
def _getKeyForUrl(url, existing=None): """ Extracts a key from a given s3:// URL. On return, but not on exceptions, this method leaks an S3Connection object. The caller is responsible to close that by calling key.bucket.connection.close(). :param bool existing: If True, key is expected to exist. If False, key is expected not to exists and it will be created. If None, the key will be created if it doesn't exist. :rtype: Key """ # Get the bucket's region to avoid a redirect per request with closing(boto.connect_s3()) as s3: region = bucket_location_to_region(s3.get_bucket(url.netloc).get_location()) # Note that caller is responsible for closing the connection s3 = boto.s3.connect_to_region(region) try: bucket = s3.get_bucket(url.netloc) key = bucket.get_key(url.path[1:]) if existing is True: if key is None: raise RuntimeError('Key does not exist.') elif existing is False: if key is not None: raise RuntimeError('Key exists.') elif existing is None: pass else: assert False if key is None: key = bucket.new_key(url.path[1:]) except: with panic(): s3.close() else: return key
course_properties = [ {'log_bucket': "cse255-emr", 'pem_name': "May2015HadoopKeyPair"}, {'log_bucket': "mas-dse-emr", 'pem_name': "sachin_student_sachin-Aspire-E5-571P_1426883088"} ] for course_property in course_properties: try: log_bucket = s3.get_bucket(course_property["log_bucket"]).name pem_name = course_property["pem_name"] break except Exception, e: log_bucket = "" pem_name = "" continue s3.close() # Create ~/.mrjob.conf with AWS credentials s3_scratch_uri = "%stmp/" % s3_bucket s3_log_uri = "s3://%s/log/" % log_bucket logging.info("Creating ~/.mrjob.conf") template = open('mrjob.conf.template').read() filled_template = template % (key_id, secret_key, s3_scratch_uri, s3_log_uri, pem_name, pem) logging.info("~/.mrjob.conf template filled") home = os.environ['HOME'] mrjob_outfile = "%s/.mrjob.conf" % home try:
"mas-dse-emr", 'pem_name': "sachin_student_sachin-Aspire-E5-571P_1426883088" }] for course_property in course_properties: try: log_bucket = s3.get_bucket(course_property["log_bucket"]).name pem_name = course_property["pem_name"] break except Exception, e: log_bucket = "" pem_name = "" continue s3.close() # Create ~/.mrjob.conf with AWS credentials s3_scratch_uri = "%stmp/" % s3_bucket s3_log_uri = "s3://%s/log/" % log_bucket logging.info("Creating ~/.mrjob.conf") template = open('mrjob.conf.template').read() filled_template = template % (key_id, secret_key, s3_scratch_uri, s3_log_uri, pem_name, pem) logging.info("~/.mrjob.conf template filled") home = os.environ['HOME'] mrjob_outfile = "%s/.mrjob.conf" % home