def get_bucket(self, bucket_name): """Get the bucket, connecting through the appropriate endpoint.""" s3_conn = self.make_s3_conn() bucket = s3_conn.get_bucket(bucket_name) if self._s3_endpoint: return bucket try: location = bucket.get_location() except boto.exception.S3ResponseError as e: # it's possible to have access to a bucket but not access # to its location metadata. This happens on the 'elasticmapreduce' # bucket, for example (see #1170) if e.status == 403: log.warning('Could not infer endpoint for bucket %s; ' 'assuming %s', bucket_name, s3_conn.host) return bucket raise if (s3_endpoint_for_region(location) != s3_conn.host): s3_conn = self.make_s3_conn(location) bucket = s3_conn.get_bucket(bucket_name) return bucket
def make_s3_conn(self, region=''): """Create a connection to S3. :param region: region to use to choose S3 endpoint. If you are doing anything with buckets other than creating them or fetching basic metadata (name and location), it's best to use :py:meth:`get_bucket` because it chooses the appropriate S3 endpoint automatically. :return: a :py:class:`boto.s3.connection.S3Connection`, wrapped in a :py:class:`mrjob.retry.RetryWrapper` """ # give a non-cryptic error message if boto isn't installed if boto is None: raise ImportError('You must install boto to connect to S3') # self._s3_endpoint overrides region host = self._s3_endpoint or s3_endpoint_for_region(region) log.debug('creating S3 connection (to %s)' % host) raw_s3_conn = boto.connect_s3( aws_access_key_id=self._aws_access_key_id, aws_secret_access_key=self._aws_secret_access_key, host=host, security_token=self._aws_security_token) return wrap_aws_conn(raw_s3_conn)
def get_bucket(self, bucket_name): """Get the bucket, connecting through the appropriate endpoint.""" s3_conn = self.make_s3_conn() bucket = s3_conn.get_bucket(bucket_name) if self._s3_endpoint: return bucket try: location = bucket.get_location() except boto.exception.S3ResponseError as e: # it's possible to have access to a bucket but not access # to its location metadata. This happens on the 'elasticmapreduce' # bucket, for example (see #1170) if e.status == 403: log.warning( 'Could not infer endpoint for bucket %s; ' 'assuming %s', bucket_name, s3_conn.host) return bucket raise if (s3_endpoint_for_region(location) != s3_conn.host): s3_conn = self.make_s3_conn(location) bucket = s3_conn.get_bucket(bucket_name) return bucket
def get_bucket(self, bucket_name): """Get the bucket, connecting through the appropriate endpoint.""" s3_conn = self.make_s3_conn() bucket = s3_conn.get_bucket(bucket_name) location = bucket.get_location() # connect to bucket on proper endpoint if (not self._s3_endpoint and s3_endpoint_for_region(location) != s3_conn.host): s3_conn = self.make_s3_conn(location) bucket = s3_conn.get_bucket(bucket_name) return bucket
def get_bucket(self, bucket_name): """Get the bucket, connecting through the appropriate endpoint.""" s3_conn = self.make_s3_conn() bucket = s3_conn.get_bucket(bucket_name) try: location = bucket.get_location() except boto.exception.S3ResponseError as e: if e.status == 403: log.warning("Could not infer aws region for bucket %s; " "assuming it's %s", bucket_name, s3_conn.host) return bucket raise # connect to bucket on proper endpoint if not self._s3_endpoint and s3_endpoint_for_region(location) != s3_conn.host: s3_conn = self.make_s3_conn(location) bucket = s3_conn.get_bucket(bucket_name) return bucket
def get_bucket(self, bucket_name): """Get the bucket, connecting through the appropriate endpoint.""" s3_conn = self.make_s3_conn() bucket = s3_conn.get_bucket(bucket_name) try: location = bucket.get_location() except boto.exception.S3ResponseError as e: if e.status == 403: log.warning("Could not infer aws region for bucket %s; " "assuming it's %s", bucket_name, s3_conn.host) return bucket raise # connect to bucket on proper endpoint if (not self._s3_endpoint and s3_endpoint_for_region(location) != s3_conn.host): s3_conn = self.make_s3_conn(location) bucket = s3_conn.get_bucket(bucket_name) return bucket