def get_dist_name_for_bucket(self, uri): if (uri.type == "cf"): return uri if (uri.type != "s3"): raise ParameterError("CloudFront or S3 URI required instead of: %s" % uri) debug("_get_dist_name_for_bucket(%r)" % uri) if CloudFront.dist_list is None: response = self.GetList() CloudFront.dist_list = {} for d in response['dist_list'].dist_summs: if d.info.has_key("S3Origin"): CloudFront.dist_list[getBucketFromHostname(d.info['S3Origin']['DNSName'])[0]] = d.uri() elif d.info.has_key("CustomOrigin"): # Aral: This used to skip over distributions with CustomOrigin, however, we mustn't # do this since S3 buckets that are set up as websites use custom origins. # Thankfully, the custom origin URLs they use start with the URL of the # S3 bucket. Here, we make use this naming convention to support this use case. distListIndex = getBucketFromHostname(d.info['CustomOrigin']['DNSName'])[0]; distListIndex = distListIndex[:len(uri.bucket())] CloudFront.dist_list[distListIndex] = d.uri() else: # Aral: I'm not sure when this condition will be reached, but keeping it in there. continue debug("dist_list: %s" % CloudFront.dist_list) try: return CloudFront.dist_list[uri.bucket()] except Exception, e: debug(e) raise ParameterError("Unable to translate S3 URI to CloudFront distribution name: %s" % uri)
def get_dist_name_for_bucket(self, uri): if (uri.type == "cf"): return uri if (uri.type != "s3"): raise ParameterError( "CloudFront or S3 URI required instead of: %s" % uri) debug("_get_dist_name_for_bucket(%r)" % uri) if CloudFront.dist_list is None: response = self.GetList() CloudFront.dist_list = {} for d in response['dist_list'].dist_summs: if d.info.has_key("S3Origin"): CloudFront.dist_list[getBucketFromHostname( d.info['S3Origin']['DNSName'])[0]] = d.uri() elif d.info.has_key("CustomOrigin"): # Aral: This used to skip over distributions with CustomOrigin, however, we mustn't # do this since S3 buckets that are set up as websites use custom origins. # Thankfully, the custom origin URLs they use start with the URL of the # S3 bucket. Here, we make use this naming convention to support this use case. distListIndex = getBucketFromHostname( d.info['CustomOrigin']['DNSName'])[0] distListIndex = distListIndex[:len(uri.bucket())] CloudFront.dist_list[distListIndex] = d.uri() else: # Aral: I'm not sure when this condition will be reached, but keeping it in there. continue debug("dist_list: %s" % CloudFront.dist_list) try: return CloudFront.dist_list[uri.bucket()] except Exception, e: debug(e) raise ParameterError( "Unable to translate S3 URI to CloudFront distribution name: %s" % uri)
def parse(self, tree): self.info = getDictFromTree(tree) self.info['Enabled'] = (self.info['Enabled'].lower() == "true") if not self.info.has_key("CNAME"): self.info['CNAME'] = [] if type(self.info['CNAME']) != list: self.info['CNAME'] = [self.info['CNAME']] self.info['CNAME'] = [cname.lower() for cname in self.info['CNAME']] if not self.info.has_key("Comment"): self.info['Comment'] = "" if not self.info.has_key("DefaultRootObject"): self.info['DefaultRootObject'] = "" ## Figure out logging - complex node not parsed by getDictFromTree() logging_nodes = tree.findall(".//Logging") if logging_nodes: logging_dict = getDictFromTree(logging_nodes[0]) logging_dict['Bucket'], success = getBucketFromHostname( logging_dict['Bucket']) if not success: warning("Logging to unparsable bucket name: %s" % logging_dict['Bucket']) self.info['Logging'] = S3UriS3(u"s3://%(Bucket)s/%(Prefix)s" % logging_dict) else: self.info['Logging'] = None
def _https_connection(hostname, port=None): check_hostname = True try: context = http_connection._ssl_context() # Wilcard certificates do not work with DNS-style named buckets. bucket_name, success = getBucketFromHostname(hostname) if success and '.' in bucket_name: # this merely delays running the hostname check until # after the connection is made and we get control # back. We then run the same check, relaxed for S3's # wildcard certificates. debug(u'Bucket name contains "." character, disabling initial SSL hostname check') check_hostname = False if context: context.check_hostname = False conn = httplib.HTTPSConnection(hostname, port, context=context, check_hostname=check_hostname) debug(u'httplib.HTTPSConnection() has both context and check_hostname') except TypeError: try: # in case check_hostname parameter is not present try again conn = httplib.HTTPSConnection(hostname, port, context=context) debug(u'httplib.HTTPSConnection() has only context') except TypeError: # in case even context parameter is not present try one last time conn = httplib.HTTPSConnection(hostname, port) debug(u'httplib.HTTPSConnection() has neither context nor check_hostname') return conn
def get_dist_name_for_bucket(self, uri): if (uri.type == "cf"): return uri if (uri.type != "s3"): raise ParameterError( "CloudFront or S3 URI required instead of: %s" % arg) debug("_get_dist_name_for_bucket(%r)" % uri) if CloudFront.dist_list is None: response = self.GetList() CloudFront.dist_list = {} for d in response['dist_list'].dist_summs: if d.info.has_key("S3Origin"): CloudFront.dist_list[getBucketFromHostname( d.info['S3Origin']['DNSName'])[0]] = d.uri() else: # Skip over distributions with CustomOrigin continue debug("dist_list: %s" % CloudFront.dist_list) try: return CloudFront.dist_list[uri.bucket()] except Exception, e: debug(e) raise ParameterError( "Unable to translate S3 URI to CloudFront distribution name: %s" % arg)
def _get_dist_name_for_bucket(uri): cf = CloudFront(Config()) debug("_get_dist_name_for_bucket(%r)" % uri) assert(uri.type == "s3") if Cmd.dist_list is None: response = cf.GetList() Cmd.dist_list = {} for d in response['dist_list'].dist_summs: Cmd.dist_list[getBucketFromHostname(d.info['Origin'])[0]] = d.uri() debug("dist_list: %s" % Cmd.dist_list) return Cmd.dist_list[uri.bucket()]
def get_dist_name_for_bucket(self, uri): if (uri.type == "cf"): return uri if (uri.type != "s3"): raise ParameterError("CloudFront or S3 URI required instead of: %s" % arg) debug("_get_dist_name_for_bucket(%r)" % uri) if CloudFront.dist_list is None: response = self.GetList() CloudFront.dist_list = {} for d in response['dist_list'].dist_summs: CloudFront.dist_list[getBucketFromHostname(d.info['S3Origin']['DNSName'])[0]] = d.uri() debug("dist_list: %s" % CloudFront.dist_list) try: return CloudFront.dist_list[uri.bucket()] except Exception, e: debug(e) raise ParameterError("Unable to translate S3 URI to CloudFront distribution name: %s" % arg)
def parse(self, tree): self.info = getDictFromTree(tree) self.info['Enabled'] = (self.info['Enabled'].lower() == "true") if not self.info.has_key("CNAME"): self.info['CNAME'] = [] if type(self.info['CNAME']) != list: self.info['CNAME'] = [self.info['CNAME']] self.info['CNAME'] = [cname.lower() for cname in self.info['CNAME']] if not self.info.has_key("Comment"): self.info['Comment'] = "" ## Figure out logging - complex node not parsed by getDictFromTree() logging_nodes = tree.findall(".//Logging") if logging_nodes: logging_dict = getDictFromTree(logging_nodes[0]) logging_dict['Bucket'], success = getBucketFromHostname(logging_dict['Bucket']) if not success: warning("Logging to unparsable bucket name: %s" % logging_dict['Bucket']) self.info['Logging'] = S3UriS3("s3://%(Bucket)s/%(Prefix)s" % logging_dict) else: self.info['Logging'] = None