def adjust_cors(s3wrapper, clobber=False): """Set CORS headers on a bucket, removing pre-existing headers set by the OSF. Optionally clear all pre-existing headers. :param S3Wrapper s3wrapper: S3 wrapper instance :param bool clobber: Remove all pre-existing rules. Note: if this option is set to True, remember to warn or prompt the user first! """ rules = s3wrapper.get_cors_rules() # Remove some / all pre-existing rules if clobber: rules = CORSConfiguration([]) else: rules = CORSConfiguration( [rule for rule in rules if 'osf-s3' not in (rule.id or '')]) # Add new rule rules.add_rule( ['PUT', 'GET'], s3_settings.ALLOWED_ORIGIN, allowed_header=['*'], id='osf-s3-{0}'.format(ObjectId()), ) # Save changes s3wrapper.set_cors_rules(rules)
def test_bucket_cors(): buckets, zone_bucket = create_bucket_per_zone_in_realm() for _, bucket in zone_bucket: cors_cfg = CORSConfiguration() cors_cfg.add_rule(['DELETE'], 'https://www.example.com', allowed_header='*', max_age_seconds=3000) bucket.set_cors(cors_cfg) assert(bucket.get_cors().to_xml() == cors_cfg.to_xml())
def get_or_create_bucket(name, public=True, cors=None): import boto from boto.s3.cors import CORSConfiguration conn = boto.connect_s3() # read AWS env vars bucket = conn.lookup(name) if bucket is None: print('Creating bucket %s' % name) bucket = conn.create_bucket(name) if public: bucket.set_acl('public-read') if cors: cors_cfg = CORSConfiguration() cors_cfg.add_rule(['GET', 'POST'], 'http://*', allowed_header='*', max_age_seconds=604800) cors_cfg.add_rule(['GET', 'POST'], 'https://*', allowed_header='*', max_age_seconds=604800) cors_cfg.add_rule('GET', '*', allowed_header='*', max_age_seconds=604800) bucket.set_cors(cors_cfg) bucket.set_policy(get_bucket_policy(name, cors), headers=None) return bucket
def __init__(self): """ Constructor. """ self.default_cors = CORSConfiguration() self.default_cors.add_rule('GET', '*', allowed_header='*') self.main()
def test_cors(self): self.cfg = CORSConfiguration() self.cfg.add_rule(['PUT', 'POST', 'DELETE'], 'http://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption', id='foobar_rule') assert self.bucket.set_cors(self.cfg) time.sleep(5) cfg = self.bucket.get_cors() for i, rule in enumerate(cfg): self.assertEqual(rule.id, self.cfg[i].id) self.assertEqual(rule.max_age_seconds, self.cfg[i].max_age_seconds) methods = zip(rule.allowed_method, self.cfg[i].allowed_method) for v1, v2 in methods: self.assertEqual(v1, v2) origins = zip(rule.allowed_origin, self.cfg[i].allowed_origin) for v1, v2 in origins: self.assertEqual(v1, v2) headers = zip(rule.allowed_header, self.cfg[i].allowed_header) for v1, v2 in headers: self.assertEqual(v1, v2) headers = zip(rule.expose_header, self.cfg[i].expose_header) for v1, v2 in headers: self.assertEqual(v1, v2) self.bucket.delete_cors() time.sleep(5) try: self.bucket.get_cors() self.fail('CORS configuration should not be there') except S3ResponseError: pass
def _create_user_s3_bucket_internal(self, real_bucket_name, location=None): method_list = ['PUT', 'POST', 'DELETE', 'GET', 'HEAD'] new_bucket = self.s3_conn.create_bucket(real_bucket_name, location=location) cors_cfg = CORSConfiguration() cors_cfg.add_rule(method_list, allowed_origin=['*'], allowed_header=['*']) new_bucket.set_cors(cors_cfg)
def __create_bucket(self): self.bucket = self.conn.create_bucket(config.bucket) from boto.s3.cors import CORSConfiguration cors_cfg = CORSConfiguration() cors_cfg.add_rule([ 'GET', ], '*', allowed_header='*') cors_cfg.add_rule('GET', '*') self.bucket.set_cors(cors_cfg)
def test_one_rule_with_id(self): cfg = CORSConfiguration() cfg.add_rule(['PUT', 'POST', 'DELETE'], 'http://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption', id='foobar_rule') self.assertEqual(cfg.to_xml(), CORS_BODY_1)
def test_two_rules(self): cfg = CORSConfiguration() cfg.add_rule(['PUT', 'POST', 'DELETE'], 'http://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption') cfg.add_rule('GET', '*', allowed_header='*', max_age_seconds=3000) self.assertEqual(cfg.to_xml(), CORS_BODY_2)
def handle(self, *args, **options): c = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = c.lookup(settings.AWS_STORAGE_BUCKET_NAME) if not bucket: bucket = c.create_bucket(settings.AWS_STORAGE_BUCKET_NAME) cors_cfg = CORSConfiguration() cors_cfg.add_rule('GET', '*') bucket.set_cors(cors_cfg)
def handle(self, *args, **options): conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) cors_cfg = CORSConfiguration() cors_cfg.add_rule(['GET', 'POST', 'PUT'], '*', allowed_header='*') try: b = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME) b.set_acl('public-read') b.set_cors(cors_cfg) except boto.exception.S3ResponseError: b = conn.create_bucket(settings.AWS_STORAGE_BUCKET_NAME) b.set_acl('public-read') b.set_cors(cors_cfg)
def set_cors_config(self, bucket): ''' Set up a CORS config on the given bucket. ''' bucket_cors_set = CORSConfiguration() bucket_rule_id = "Rule 1: Origin example1 can write, with all headers allowed" bucket_allowed_origins = ('http://www.example1.com') bucket_allowed_methods = ('PUT', 'POST', 'DELETE') bucket_allowed_headers = ('*') bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id, bucket_allowed_headers) bucket_rule_id = "Rule 2: Origin example2 can GET only" bucket_allowed_origins = ('http://www.example2.com') bucket_allowed_methods = ('GET') bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id) bucket_rule_id = "Rule 3: Any origin can HEAD" bucket_allowed_origins = ('*') bucket_allowed_methods = ('HEAD') bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id) bucket_rule_id = "Rule 4: Either of these wildcarded origins can do any method, " "can cache the response for 50 minutes, " "can only send request headers that begin x-amz- or Content-, " "and can expose the listed ExposeHeaders to clients." bucket_allowed_origins = ('http://www.corstest*.com', 'http://*.sample.com') bucket_allowed_methods = ('GET', 'HEAD', 'PUT', 'POST', 'DELETE') bucket_allowed_headers = ('x-amz-*', 'Content-*') bucket_max_age_seconds = 3000 bucket_expose_headers = ("x-amz-server-side-encryption", "x-amz-request-id", "x-amz-id-2") bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id, bucket_allowed_headers, bucket_max_age_seconds, bucket_expose_headers) bucket.set_cors(bucket_cors_set) # Uncomment the below to make set-vs-retrieved configs different, # to test the comparison test code. # bucket_cors_set.add_rule(bucket_allowed_methods, # bucket_allowed_origins, # bucket_rule_id, # bucket_allowed_headers, # bucket_max_age_seconds, # bucket_expose_headers) return bucket_cors_set
def sync_s3(self): """ Walks the media directory and syncs files to S3 """ bucket, key = self.open_s3() os.path.walk( self.DIRECTORY, self.upload_s3, (bucket, key, self.AWS_STORAGE_BUCKET_NAME, self.DIRECTORY)) from boto.s3.cors import CORSConfiguration cors_cfg = CORSConfiguration() cors_cfg.add_rule(['GET', 'POST', 'PUT'], '*', allowed_header='*') bucket.set_cors(cors_cfg) bucket.set_acl('public-read')
def setup_s3(bucket="bucket"): from splice.environment import Environment from boto.s3.cors import CORSConfiguration env = Environment.instance() bucket = env.s3.get_bucket(env.config.S3[bucket]) cors = CORSConfiguration() cors.add_rule("GET", "*", allowed_header="*") bucket.set_cors(cors) headers = { 'Cache-Control': 'public, max-age=31536000', 'Content-Disposition': 'inline', } return bucket, headers
def sync_slides(workers): """Tile openslide-testdata and synchronize into S3.""" # Initialize metadata metadata = { 'openslide': openslide.__library_version__, 'openslide_python': openslide.__version__, 'stamp': sha256('%s %s %s' % (openslide.__library_version__, openslide.__version__, STAMP_VERSION)).hexdigest()[:8], 'groups': [], } print 'OpenSlide %(openslide)s, OpenSlide Python %(openslide_python)s' % metadata # Get openslide-testdata index r = requests.get(urljoin(DOWNLOAD_BASE_URL, DOWNLOAD_INDEX)) r.raise_for_status() slides = r.json() # Connect to S3 bucket = connect_bucket() # Set bucket configuration print "Configuring bucket..." cors = CORSConfiguration() cors.add_rule(['GET'], CORS_ORIGINS) bucket.set_cors(cors) # Store static files print "Storing static files..." for relpath, opts in BUCKET_STATIC.iteritems(): key = bucket.new_key(relpath) key.set_contents_from_string(opts.get('data', ''), headers=opts.get('headers', {}), policy='public-read') # If the stamp is changing, mark bucket dirty try: old_stamp = json.loads( bucket.new_key(METADATA_NAME).get_contents_as_string()).get( 'stamp') except S3ResponseError, e: if e.status == 404: old_stamp = None else: raise
def create_bucket(site_name): """ Creates a bucket for the project/env """ bucket_name = '%s-%s' % (site_name, PROJECT_NAME) print 'Trying to create bucket %s' % bucket_name try: s3 = boto.connect_s3() s3.create_bucket(bucket_name) from boto.s3.cors import CORSConfiguration cors_cfg = CORSConfiguration() #cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], 'https://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption') cors_cfg.add_rule('GET', '*') bucket = s3.lookup(bucket_name) bucket.set_cors(cors_cfg) except boto.exception.S3CreateError: print 'AWS returned 409 Conflict. Does the bucket already exist?'
def test_set_cors(): """ PUTs arbitraty CORS Rule and checks whether GET CORS API call returns 200 and other CORS metadata is as set in PUT call """ bucket = helpers.get_bucket() cors_cfg = CORSConfiguration() # Setting arbitrary CORS Rule which allows cross-origin GET requests from all origins. cors_cfg.add_rule('POST', 'https://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption') bucket.set_cors(cors_cfg) response = bucket.get_cors() assert 'https://www.example.com' in response[0].allowed_origin
def send_to_s3(data, aws_access_key, aws_secret_key, s3_bucket, file_name='data.json'): """Sends the reporter data to S3 :param data: list of dicts :param aws_access_key: :param aws_secret_key: :param s3_bucket: :param file_name: """ conn = S3Connection(aws_access_key, aws_secret_key) bucket = conn.get_bucket(s3_bucket) config = CORSConfiguration() config.add_rule('GET', '*') bucket.set_cors(config) k = Key(bucket) k.key = file_name k.set_contents_from_string(json.dumps(data)) k.set_acl('public-read')
def get_or_create_bucket(name, public=True, cors=None): with cd(env.app_path), prefix(venv()): import boto from boto.s3.cors import CORSConfiguration conn = boto.connect_s3() # read AWS env vars bucket = conn.lookup(name) if bucket is None: print('Creating bucket %s' % name) bucket = conn.create_bucket(name) if public: bucket.set_acl('public-read') if cors: cors_cfg = CORSConfiguration() cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], cors, allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption') cors_cfg.add_rule('GET', '*') bucket.set_cors(cors_cfg) return bucket
def enable_bucket_cors(bucket): """ For direct upload to work, the bucket needs to enable cross-origin request scripting. """ try: cors_cfg = bucket.get_cors() except S3ResponseError: cors_cfg = CORSConfiguration() rules = [r.id for r in cors_cfg] changed = False if 'spendb_put' not in rules: cors_cfg.add_rule(['PUT', 'POST'], '*', allowed_header='*', id='spendb_put', max_age_seconds=3000, expose_header='x-amz-server-side-encryption') changed = True if 'spendb_get' not in rules: cors_cfg.add_rule('GET', '*', id='spendb_get') changed = True if changed: bucket.set_cors(cors_cfg)
def test_minimal(self): cfg = CORSConfiguration() cfg.add_rule('GET', '*') self.assertEqual(cfg.to_xml(), CORS_BODY_3)
def get_cors_rules(self): try: return self.bucket.get_cors() except: return CORSConfiguration()
XSU, XST = swift_api_auth(user, key) headers = {"X-Auth-Token": XST, "limit": 10, "offset": 0, "format": "json"} ''' out_storage_info(XSU,headers) requests.put(XSU+"/abc",headers=headers) requests.put(XSU+"/abc/def",headers=headers) requests.put(XSU+"/abc/def/a",headers=headers,data="wo") requests.put(XSU+"/abc/def/b",headers=headers,data="go") requests.put(XSU+"/abc/def/c",headers=headers,data="op") requests.put(XSU+"/abc/def/d",headers=headers,data="no") out_storage_info(XSU,headers) ''' access_key = info_s3[0]["access_key"] secret_key = info_s3[0]["secret_key"] s3_conn = s3_connect(access_key, secret_key) #import pdb;pdb.set_trace() s3_conn.create_bucket("eisoo") bkt = s3_conn.get_bucket('eisoo') #cors=bkt.get_cors() config = CORSConfiguration() config.add_rule('POST', '*') bkt.set_cors(config) out_storage_info(XSU, headers) ''' for bucket in s3_conn.get_all_buckets(): key=bucket.get_key("def") key.set_contents_from_filename("/etc/hosts") #print "{name}\t{created}".format(name=bucket.name,created=bucket.creation_date) out_storage_info(XSU,headers) '''
def distribute(data, channel_id, deploy, scheduled_dt=None): """Upload tile data to S3 :data: tile data :channel_id: channel id for which to distribute tile data :deploy: whether to deploy tiles to firefox immediately :scheduled_dt: an optional scheduled date in the future for deploy. overrides deploy """ command_logger.info("Generating Data") from splice.models import Channel from splice.environment import Environment env = Environment.instance() if scheduled_dt: now = datetime.utcnow() if now > scheduled_dt: raise ScheduleError("scheduled date needs to be in the future") elif deploy: raise ScheduleError( "cannot specify deploy and schedule at the same time") channel = (env.db.session.query(Channel).filter( Channel.id == channel_id).one()) artifacts = generate_artifacts(data, channel.name, deploy) command_logger.info("Uploading to S3 for channel {0}".format(channel.name)) bucket = Environment.instance().s3.get_bucket( Environment.instance().config.S3["bucket"]) cors = CORSConfiguration() cors.add_rule("GET", "*", allowed_header="*") bucket.set_cors(cors) distributed = [] headers = { 'Cache-Control': 'public, max-age=31536000', 'Content-Disposition': 'inline', } # upload individual files for file in artifacts: if "mime" in file: headers['Content-Type'] = file["mime"] else: # default to JSON for artifacts headers['Content-Type'] = "application/json" key = bucket.get_key(file["key"]) uploaded = False if key is None or file.get("force_upload"): key = Key(bucket) key.name = file["key"] key.set_contents_from_string(file["data"], headers=headers) key.set_acl("public-read") uploaded = True url = key.generate_url(expires_in=0, query_auth=False) # remove x-amz-security-token, which is inserted even if query_auth=False # ref: https://github.com/boto/boto/issues/1477 uri = furl(url) try: uri.args.pop('x-amz-security-token') except: pass url = uri.url if uploaded: command_logger.info("UPLOADED {0}".format(url)) else: command_logger.info("SKIPPED {0}".format(url)) distributed.append([url, uploaded]) if file.get("dist", False): insert_distribution(url, channel_id, deploy, scheduled_dt) return distributed
def set_bucket_policy(bucket): from boto.s3.cors import CORSConfiguration cors_cfg = CORSConfiguration() cors_cfg.add_rule(['PUT', 'POST', 'GET'], '*', allowed_header='*', max_age_seconds=3000, expose_header='ETag') bucket.set_cors(cors_cfg)
def test_cors_config_mgmt(self): ''' Method: Tests setting, getting, and deleting the CORS config on a bucket ''' test_bucket = self.bucket_prefix + "-simple-test-bucket" self.buckets_used.add(test_bucket) self.tester.debug( "Starting CORS config management tests, using bucket name: " + test_bucket) try: bucket = self.tester.s3.create_bucket(test_bucket) if bucket == None: self.tester.s3.delete_bucket(test_bucket) self.fail(test_bucket + " was not created correctly") except (S3ResponseError, S3CreateError) as e: self.fail(test_bucket + " create caused exception: " + str(e)) # Get the CORS config (none yet). # Should get 404 Not Found, with "NoSuchCORSConfiguration" in the body. try: self.tester.debug("Getting (empty) CORS config") bucket.get_cors() #self.tester.s3.delete_bucket(test_bucket) #LPT self.fail("Did not get an S3ResponseError getting CORS config when none exists yet.") except S3ResponseError as e: if (e.status == 404 and e.reason == "Not Found" and e.code == "NoSuchCORSConfiguration"): self.tester.debug( "Caught S3ResponseError with expected contents, " + "getting CORS config when none exists yet.") else: self.tester.s3.delete_bucket(test_bucket) self.fail( "Caught S3ResponseError getting CORS config when none exists yet," + "but exception contents were unexpected: " + str(e)) # Set a simple CORS config. try: self.tester.debug("Setting a CORS config") bucket_cors_set = CORSConfiguration() bucket_rule_id = "ManuallyAssignedId1" bucket_allowed_methods = ['GET', 'PUT'] bucket_allowed_origins = ['*'] bucket_allowed_headers = ['*'] bucket_max_age_seconds = 3000 #bucket_expose_headers = [] bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id, bucket_allowed_headers, bucket_max_age_seconds) bucket.set_cors(bucket_cors_set) except S3ResponseError as e: self.tester.s3.delete_bucket(test_bucket) self.fail("Caught S3ResponseError setting CORS config: " + str(e)) # Get the CORS config. Should get the config we just set. try: self.tester.debug("Getting the CORS config we just set") bucket_cors_retrieved = bucket.get_cors() assert (bucket_cors_retrieved.to_xml() == bucket_cors_set.to_xml( )), 'Bucket CORS config: Expected ' + bucket_cors_set.to_xml( ) + ', Retrieved ' + bucket_cors_retrieved.to_xml() except S3ResponseError as e: self.tester.s3.delete_bucket(test_bucket) self.fail( "Caught S3ResponseError getting CORS config, after setting it successfully: " + str(e)) # Delete the CORS config. try: self.tester.debug("Deleting the CORS config") bucket.delete_cors() except S3ResponseError as e: self.tester.s3.delete_bucket(test_bucket) self.fail( "Caught S3ResponseError deleting CORS config, after setting and validating it successfully: " + str(e)) # Get the CORS config (none anymore). # Should get 404 Not Found, with "NoSuchCORSConfiguration" in the body. try: self.tester.debug("Getting (empty again) CORS config") bucket.get_cors() self.tester.s3.delete_bucket(test_bucket) self.fail( "Did not get an S3ResponseError getting CORS config after being deleted." ) except S3ResponseError as e: self.tester.s3.delete_bucket(test_bucket) if (e.status == 404 and e.reason == "Not Found" and e.code == "NoSuchCORSConfiguration"): self.tester.debug( "Caught S3ResponseError with expected contents, " + "getting CORS config after being deleted.") else: self.fail( "Caught S3ResponseError getting CORS config after being deleted," + "but exception contents were unexpected: " + str(e))
max_age_seconds = module.params.get("max_age_seconds") expose_header = module.params.get("expose_header") changed = False try: bucket = connection.get_bucket(name) except S3ResponseError, e: module.fail_json(msg=str(get_error_message(e))) # Get the bucket's current CORS rules try: current_lifecycle_obj = bucket.get_cors() error_code = get_error_code(e.args[2]) except S3ResponseError, e: if error_code == "NoSuchLifecycleConfiguration": current_cors_obj = CORSConfiguration() else: module.fail_json(msg=str(get_error_message(e))) # Create CORS rule cors_rule = CORSRule(allowed_method=allowed_methods, allowed_origin=allowed_origin, id=rule_id, allowed_header=allowed_header, max_age_seconds=max_age_seconds, expose_header=expose_header) # Create lifecycle cors_obj = CORSConfiguration() # Check if rule exists
In addition to accessing specific Tree via the create_tree method you can also get a list of all available trees' name that you have created. """ rs = conn1.list() print "you have %s trees in your garden" % str(len(rs)) print "Tree names list : " + str(rs) """ Setting/Getting/Deleting CORS Configuration on a Bucket Cross-origin resource sharing (CORS) defines a way for client web applications that are loaded in one domain to interact with resources in a different domain. With CORS support in Amazon S3, you can build rich client-side web applications with Amazon S3 and selectively allow cross-origin access to your Amazon S3 resources. """ cors_cfg = CORSConfiguration() cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], 'https://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption') cors_cfg.add_rule('GET', '*') """ The above code creates a CORS configuration object with two rules. The first rule allows cross-origin PUT, POST, and DELETE requests from the https://www.example.com/ origin. The rule also allows all headers in preflight OPTIONS request through the Access-Control-Request-Headers header. In response to any preflight OPTIONS request, Amazon S3 will return any requested headers. The second rule allows cross-origin GET requests from all origins. To associate this configuration with a bucket: """ mytree1 = conn1.lookup('my_tree1') mytree1.set_cors(cors_cfg)
def site_config(self, site): with hook('site config %s' % self.name, self, site): setup_aws_access_key(site) from boto import connect_s3 from boto.s3.bucket import Bucket from boto.s3.key import Key for bucket_config in self.settings['buckets']: # Connect and make sure the bucket exists print bold(u'Configuring bucket %s...' % bucket_config['name']) connection = connect_s3() try: bucket = connection.get_bucket(bucket_config['name']) except: bucket = connection.create_bucket(bucket_config['name']) # Set the bucket policy if bucket_config.has_key('policy'): bucket.set_policy(bucket_config['policy']) # Setup CORS, array of rules # http://boto.readthedocs.org/en/latest/ref/s3.html#boto.s3.cors.CORSConfiguration if bucket_config.has_key( 'cors') and bucket_config['cors'] is None: # If explicity set to None, then remove the cors policy bucket.delete_cors() else: if not bucket_config.has_key('cors'): # If not specified, use the default GET policy bucket_config['cors'] = (DEFAULT_CORS_RULE, ) from boto.s3.cors import CORSConfiguration cors_config = CORSConfiguration() for rule in bucket_config['cors']: cors_config.add_rule(**rule) bucket.set_cors(cors_config) # Setup the lifecycle, array of rules # http://boto.readthedocs.org/en/latest/ref/s3.html#boto.s3.lifecycle.Lifecycle if bucket_config.has_key('lifecycle'): from boto.s3.lifecycle import Lifecycle lifecycle_config = Lifecycle() for rule in bucket_config['lifecycle']: lifecycle_config.add_rule(**rule) bucket.configure_lifecycle(lifecycle_config) else: bucket.delete_lifecycle_configuration() # Setup the bucket website hosting {suffix, error_key, routing_rules, redirect_all_requests_to} # http://boto.readthedocs.org/en/latest/ref/s3.html # https://github.com/boto/boto/blob/develop/boto/s3/website.py if bucket_config.has_key('website'): # Expand the routing rules, array of {condition, redirect} if bucket_config['website'].has_key('routing_rules'): from boto.s3.website import RoutingRules, RoutingRule routing_rules = RoutingRules() for rule in bucket_config['website']['routing_rules']: routing_rules.add_rule(RoutingRule(**rule)) bucket_config['website'][ 'routing_rules'] = routing_rules # Expand the redirect, redirect_all_requests_to is {hostname, protocol} if bucket_config['website'].has_key( 'redirect_all_requests_to'): from boto.s3.website import RedirectLocation bucket_config['website'][ 'redirect_all_requests_to'] = RedirectLocation( **bucket_config['website'] ['redirect_all_requests_to']) bucket.configure_website(**bucket_config['website']) else: bucket.delete_website_configuration()