コード例 #1
0
 def test_cors(self):
     self.cfg = CORSConfiguration()
     self.cfg.add_rule(['PUT', 'POST', 'DELETE'],
                       'http://www.example.com',
                       allowed_header='*',
                       max_age_seconds=3000,
                       expose_header='x-amz-server-side-encryption',
                       id='foobar_rule')
     assert self.bucket.set_cors(self.cfg)
     time.sleep(5)
     cfg = self.bucket.get_cors()
     for i, rule in enumerate(cfg):
         self.assertEqual(rule.id, self.cfg[i].id)
         self.assertEqual(rule.max_age_seconds, self.cfg[i].max_age_seconds)
         methods = zip(rule.allowed_method, self.cfg[i].allowed_method)
         for v1, v2 in methods:
             self.assertEqual(v1, v2)
         origins = zip(rule.allowed_origin, self.cfg[i].allowed_origin)
         for v1, v2 in origins:
             self.assertEqual(v1, v2)
         headers = zip(rule.allowed_header, self.cfg[i].allowed_header)
         for v1, v2 in headers:
             self.assertEqual(v1, v2)
         headers = zip(rule.expose_header, self.cfg[i].expose_header)
         for v1, v2 in headers:
             self.assertEqual(v1, v2)
     self.bucket.delete_cors()
     time.sleep(5)
     try:
         self.bucket.get_cors()
         self.fail('CORS configuration should not be there')
     except S3ResponseError:
         pass
コード例 #2
0
ファイル: utils.py プロジェクト: KerryKDiehl/osf.io
def adjust_cors(s3wrapper, clobber=False):
    """Set CORS headers on a bucket, removing pre-existing headers set by the
    OSF. Optionally clear all pre-existing headers.

    :param S3Wrapper s3wrapper: S3 wrapper instance
    :param bool clobber: Remove all pre-existing rules. Note: if this option
        is set to True, remember to warn or prompt the user first!

    """
    rules = s3wrapper.get_cors_rules()

    # Remove some / all pre-existing rules
    if clobber:
        rules = CORSConfiguration([])
    else:
        rules = CORSConfiguration([
            rule
            for rule in rules
            if 'osf-s3' not in (rule.id or '')
        ])

    # Add new rule
    rules.add_rule(
        ['PUT', 'GET'],
        s3_settings.ALLOWED_ORIGIN,
        allowed_header=['*'],
        id='osf-s3-{0}'.format(ObjectId()),
    )

    # Save changes
    s3wrapper.set_cors_rules(rules)
コード例 #3
0
 def __init__(self):
     """
 Constructor.
 """
     self.default_cors = CORSConfiguration()
     self.default_cors.add_rule('GET', '*', allowed_header='*')
     self.main()
コード例 #4
0
ファイル: tests.py プロジェクト: TsaiJin/ceph
def test_bucket_cors():
    buckets, zone_bucket = create_bucket_per_zone_in_realm()
    for _, bucket in zone_bucket:
        cors_cfg = CORSConfiguration()
        cors_cfg.add_rule(['DELETE'], 'https://www.example.com', allowed_header='*', max_age_seconds=3000)
        bucket.set_cors(cors_cfg)
        assert(bucket.get_cors().to_xml() == cors_cfg.to_xml())
コード例 #5
0
def test_bucket_cors():
    buckets, zone_bucket = create_bucket_per_zone_in_realm()
    for _, bucket in zone_bucket:
        cors_cfg = CORSConfiguration()
        cors_cfg.add_rule(['DELETE'], 'https://www.example.com', allowed_header='*', max_age_seconds=3000)
        bucket.set_cors(cors_cfg)
        assert(bucket.get_cors().to_xml() == cors_cfg.to_xml())
コード例 #6
0
 def test_one_rule_with_id(self):
     cfg = CORSConfiguration()
     cfg.add_rule(['PUT', 'POST', 'DELETE'],
                  'http://www.example.com',
                  allowed_header='*',
                  max_age_seconds=3000,
                  expose_header='x-amz-server-side-encryption',
                  id='foobar_rule')
     self.assertEqual(cfg.to_xml(), CORS_BODY_1)
コード例 #7
0
 def _create_user_s3_bucket_internal(self, real_bucket_name, location=None):
     method_list = ['PUT', 'POST', 'DELETE', 'GET', 'HEAD']
     new_bucket = self.s3_conn.create_bucket(real_bucket_name,
                                             location=location)
     cors_cfg = CORSConfiguration()
     cors_cfg.add_rule(method_list,
                       allowed_origin=['*'],
                       allowed_header=['*'])
     new_bucket.set_cors(cors_cfg)
コード例 #8
0
 def test_one_rule_with_id(self):
     cfg = CORSConfiguration()
     cfg.add_rule(['PUT', 'POST', 'DELETE'],
                  'http://www.example.com',
                  allowed_header='*',
                  max_age_seconds=3000,
                  expose_header='x-amz-server-side-encryption',
                  id='foobar_rule')
     self.assertEqual(cfg.to_xml(), CORS_BODY_1)
コード例 #9
0
    def handle(self, *args, **options):
        c = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucket = c.lookup(settings.AWS_STORAGE_BUCKET_NAME)
        if not bucket:
            bucket = c.create_bucket(settings.AWS_STORAGE_BUCKET_NAME)

        cors_cfg = CORSConfiguration()
        cors_cfg.add_rule('GET', '*')
        bucket.set_cors(cors_cfg)
コード例 #10
0
    def handle(self, *args, **options):
        c = S3Connection(settings.AWS_ACCESS_KEY_ID,
                         settings.AWS_SECRET_ACCESS_KEY)
        bucket = c.lookup(settings.AWS_STORAGE_BUCKET_NAME)
        if not bucket:
            bucket = c.create_bucket(settings.AWS_STORAGE_BUCKET_NAME)

        cors_cfg = CORSConfiguration()
        cors_cfg.add_rule('GET', '*')
        bucket.set_cors(cors_cfg)
コード例 #11
0
    def set_cors_config(self, bucket):
        '''
        Set up a CORS config on the given bucket.
        '''
        bucket_cors_set = CORSConfiguration()

        bucket_rule_id = "Rule 1: Origin example1 can write, with all headers allowed"
        bucket_allowed_origins = ('http://www.example1.com')
        bucket_allowed_methods = ('PUT', 'POST', 'DELETE')
        bucket_allowed_headers = ('*')
        bucket_cors_set.add_rule(bucket_allowed_methods,
                                 bucket_allowed_origins, bucket_rule_id,
                                 bucket_allowed_headers)

        bucket_rule_id = "Rule 2: Origin example2 can GET only"
        bucket_allowed_origins = ('http://www.example2.com')
        bucket_allowed_methods = ('GET')
        bucket_cors_set.add_rule(bucket_allowed_methods,
                                 bucket_allowed_origins, bucket_rule_id)

        bucket_rule_id = "Rule 3: Any origin can HEAD"
        bucket_allowed_origins = ('*')
        bucket_allowed_methods = ('HEAD')
        bucket_cors_set.add_rule(bucket_allowed_methods,
                                 bucket_allowed_origins, bucket_rule_id)

        bucket_rule_id = "Rule 4: Either of these wildcarded origins can do any method, "
        "can cache the response for 50 minutes, "
        "can only send request headers that begin x-amz- or Content-, "
        "and can expose the listed ExposeHeaders to clients."
        bucket_allowed_origins = ('http://www.corstest*.com',
                                  'http://*.sample.com')
        bucket_allowed_methods = ('GET', 'HEAD', 'PUT', 'POST', 'DELETE')
        bucket_allowed_headers = ('x-amz-*', 'Content-*')
        bucket_max_age_seconds = 3000
        bucket_expose_headers = ("x-amz-server-side-encryption",
                                 "x-amz-request-id", "x-amz-id-2")
        bucket_cors_set.add_rule(bucket_allowed_methods,
                                 bucket_allowed_origins, bucket_rule_id,
                                 bucket_allowed_headers,
                                 bucket_max_age_seconds, bucket_expose_headers)

        bucket.set_cors(bucket_cors_set)

        # Uncomment the below to make set-vs-retrieved configs different,
        # to test the comparison test code.
        #         bucket_cors_set.add_rule(bucket_allowed_methods,
        #                                  bucket_allowed_origins,
        #                                  bucket_rule_id,
        #                                  bucket_allowed_headers,
        #                                  bucket_max_age_seconds,
        #                                  bucket_expose_headers)
        return bucket_cors_set
コード例 #12
0
ファイル: create_s3_bucket.py プロジェクト: rubythonode/seed
 def handle(self, *args, **options):
     conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     cors_cfg = CORSConfiguration()
     cors_cfg.add_rule(['GET', 'POST', 'PUT'], '*', allowed_header='*')
     try:
         b = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
         b.set_acl('public-read')
         b.set_cors(cors_cfg)
     except boto.exception.S3ResponseError:
         b = conn.create_bucket(settings.AWS_STORAGE_BUCKET_NAME)
         b.set_acl('public-read')
         b.set_cors(cors_cfg)
コード例 #13
0
ファイル: sync_static.py プロジェクト: Maalka/seed
    def sync_s3(self):
        """
        Walks the media directory and syncs files to S3
        """
        bucket, key = self.open_s3()
        os.path.walk(self.DIRECTORY, self.upload_s3, (bucket, key, self.AWS_STORAGE_BUCKET_NAME, self.DIRECTORY))
        from boto.s3.cors import CORSConfiguration

        cors_cfg = CORSConfiguration()
        cors_cfg.add_rule(["GET", "POST", "PUT"], "*", allowed_header="*")
        bucket.set_cors(cors_cfg)
        bucket.set_acl("public-read")
コード例 #14
0
ファイル: sync_static.py プロジェクト: john-h-kastner/seed
 def sync_s3(self):
     """
     Walks the media directory and syncs files to S3
     """
     bucket, key = self.open_s3()
     os.path.walk(
         self.DIRECTORY, self.upload_s3,
         (bucket, key, self.AWS_STORAGE_BUCKET_NAME, self.DIRECTORY))
     from boto.s3.cors import CORSConfiguration
     cors_cfg = CORSConfiguration()
     cors_cfg.add_rule(['GET', 'POST', 'PUT'], '*', allowed_header='*')
     bucket.set_cors(cors_cfg)
     bucket.set_acl('public-read')
コード例 #15
0
def get_or_create_bucket(name, public=True, cors=None):
    import boto
    from boto.s3.cors import CORSConfiguration
    conn = boto.connect_s3()  # read AWS env vars
    bucket = conn.lookup(name)
    if bucket is None:
        print('Creating bucket %s' % name)
        bucket = conn.create_bucket(name)
        if public:
            bucket.set_acl('public-read')
        if cors:
            cors_cfg = CORSConfiguration()
            cors_cfg.add_rule(['GET', 'POST'],
                              'http://*',
                              allowed_header='*',
                              max_age_seconds=604800)
            cors_cfg.add_rule(['GET', 'POST'],
                              'https://*',
                              allowed_header='*',
                              max_age_seconds=604800)
            cors_cfg.add_rule('GET',
                              '*',
                              allowed_header='*',
                              max_age_seconds=604800)
            bucket.set_cors(cors_cfg)
            bucket.set_policy(get_bucket_policy(name, cors), headers=None)
    return bucket
コード例 #16
0
ファイル: s3_common.py プロジェクト: mozilla/splice
def setup_s3(bucket="bucket"):
    from splice.environment import Environment
    from boto.s3.cors import CORSConfiguration

    env = Environment.instance()
    bucket = env.s3.get_bucket(env.config.S3[bucket])
    cors = CORSConfiguration()
    cors.add_rule("GET", "*", allowed_header="*")
    bucket.set_cors(cors)
    headers = {
        'Cache-Control': 'public, max-age=31536000',
        'Content-Disposition': 'inline',
    }
    return bucket, headers
コード例 #17
0
def sync_slides(workers):
    """Tile openslide-testdata and synchronize into S3."""

    # Initialize metadata
    metadata = {
        'openslide':
        openslide.__library_version__,
        'openslide_python':
        openslide.__version__,
        'stamp':
        sha256('%s %s %s' %
               (openslide.__library_version__, openslide.__version__,
                STAMP_VERSION)).hexdigest()[:8],
        'groups': [],
    }
    print 'OpenSlide %(openslide)s, OpenSlide Python %(openslide_python)s' % metadata

    # Get openslide-testdata index
    r = requests.get(urljoin(DOWNLOAD_BASE_URL, DOWNLOAD_INDEX))
    r.raise_for_status()
    slides = r.json()

    # Connect to S3
    bucket = connect_bucket()

    # Set bucket configuration
    print "Configuring bucket..."
    cors = CORSConfiguration()
    cors.add_rule(['GET'], CORS_ORIGINS)
    bucket.set_cors(cors)

    # Store static files
    print "Storing static files..."
    for relpath, opts in BUCKET_STATIC.iteritems():
        key = bucket.new_key(relpath)
        key.set_contents_from_string(opts.get('data', ''),
                                     headers=opts.get('headers', {}),
                                     policy='public-read')

    # If the stamp is changing, mark bucket dirty
    try:
        old_stamp = json.loads(
            bucket.new_key(METADATA_NAME).get_contents_as_string()).get(
                'stamp')
    except S3ResponseError, e:
        if e.status == 404:
            old_stamp = None
        else:
            raise
コード例 #18
0
ファイル: tasks.py プロジェクト: jshaw/django-awseb-tasks
def create_bucket(site_name):
    """ Creates a bucket for the project/env """
    bucket_name = '%s-%s' % (site_name, PROJECT_NAME)
    print 'Trying to create bucket %s' % bucket_name
    try:
        s3 = boto.connect_s3()
        s3.create_bucket(bucket_name)
        from boto.s3.cors import CORSConfiguration
        cors_cfg = CORSConfiguration()
        #cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], 'https://www.example.com', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption')
        cors_cfg.add_rule('GET', '*')
        bucket = s3.lookup(bucket_name)
        bucket.set_cors(cors_cfg)
    except boto.exception.S3CreateError:
        print 'AWS returned 409 Conflict. Does the bucket already exist?'
コード例 #19
0
ファイル: mbtiles2s3.py プロジェクト: MinnPost/mbtiles2s3
 def __init__(self):
   """
   Constructor.
   """
   self.default_cors = CORSConfiguration()
   self.default_cors.add_rule('GET', '*', allowed_header = '*')
   self.main()
コード例 #20
0
ファイル: tests_s3.py プロジェクト: iraj465/Boto-S3-tests
def test_set_cors():
    """
     PUTs arbitraty CORS Rule and checks whether GET CORS API call returns 200 
     and other CORS metadata is as set in PUT call 
    """
    bucket = helpers.get_bucket()
    cors_cfg = CORSConfiguration()
    # Setting arbitrary CORS Rule which allows cross-origin GET requests from all origins.
    cors_cfg.add_rule('POST',
                      'https://www.example.com',
                      allowed_header='*',
                      max_age_seconds=3000,
                      expose_header='x-amz-server-side-encryption')
    bucket.set_cors(cors_cfg)
    response = bucket.get_cors()
    assert 'https://www.example.com' in response[0].allowed_origin
コード例 #21
0
ファイル: test_cors.py プロジェクト: JohnnyFang/boto
 def test_cors(self):
     self.cfg = CORSConfiguration()
     self.cfg.add_rule(
         ["PUT", "POST", "DELETE"],
         "http://www.example.com",
         allowed_header="*",
         max_age_seconds=3000,
         expose_header="x-amz-server-side-encryption",
         id="foobar_rule",
     )
     assert self.bucket.set_cors(self.cfg)
     time.sleep(5)
     cfg = self.bucket.get_cors()
     for i, rule in enumerate(cfg):
         self.assertEqual(rule.id, self.cfg[i].id)
         self.assertEqual(rule.max_age_seconds, self.cfg[i].max_age_seconds)
         methods = zip(rule.allowed_method, self.cfg[i].allowed_method)
         for v1, v2 in methods:
             self.assertEqual(v1, v2)
         origins = zip(rule.allowed_origin, self.cfg[i].allowed_origin)
         for v1, v2 in origins:
             self.assertEqual(v1, v2)
         headers = zip(rule.allowed_header, self.cfg[i].allowed_header)
         for v1, v2 in headers:
             self.assertEqual(v1, v2)
         headers = zip(rule.expose_header, self.cfg[i].expose_header)
         for v1, v2 in headers:
             self.assertEqual(v1, v2)
     self.bucket.delete_cors()
     time.sleep(5)
     try:
         self.bucket.get_cors()
         self.fail("CORS configuration should not be there")
     except S3ResponseError:
         pass
コード例 #22
0
ファイル: cors_tests.py プロジェクト: eucalyptus/nephoria
    def set_cors_config(self, bucket):
        """
        Set up a CORS config on the given bucket.
        """
        bucket_cors_set = CORSConfiguration()

        bucket_rule_id = "Rule 1: Origin example1 can write, with all headers allowed"
        bucket_allowed_origins = "http://www.example1.com"
        bucket_allowed_methods = ("PUT", "POST", "DELETE")
        bucket_allowed_headers = "*"
        bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id, bucket_allowed_headers)

        bucket_rule_id = "Rule 2: Origin example2 can GET only"
        bucket_allowed_origins = "http://www.example2.com"
        bucket_allowed_methods = "GET"
        bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id)

        bucket_rule_id = "Rule 3: Any origin can HEAD"
        bucket_allowed_origins = "*"
        bucket_allowed_methods = "HEAD"
        bucket_cors_set.add_rule(bucket_allowed_methods, bucket_allowed_origins, bucket_rule_id)

        bucket_rule_id = "Rule 4: Either of these wildcarded origins can do any method, "
        "can cache the response for 50 minutes, "
        "can only send request headers that begin x-amz- or Content-, "
        "and can expose the listed ExposeHeaders to clients."
        bucket_allowed_origins = ("http://www.corstest*.com", "http://*.sample.com")
        bucket_allowed_methods = ("GET", "HEAD", "PUT", "POST", "DELETE")
        bucket_allowed_headers = ("x-amz-*", "Content-*")
        bucket_max_age_seconds = 3000
        bucket_expose_headers = ("x-amz-server-side-encryption", "x-amz-request-id", "x-amz-id-2")
        bucket_cors_set.add_rule(
            bucket_allowed_methods,
            bucket_allowed_origins,
            bucket_rule_id,
            bucket_allowed_headers,
            bucket_max_age_seconds,
            bucket_expose_headers,
        )

        bucket.set_cors(bucket_cors_set)

        # Uncomment the below to make set-vs-retrieved configs different,
        # to test the comparison test code.
        #         bucket_cors_set.add_rule(bucket_allowed_methods,
        #                                  bucket_allowed_origins,
        #                                  bucket_rule_id,
        #                                  bucket_allowed_headers,
        #                                  bucket_max_age_seconds,
        #                                  bucket_expose_headers)
        return bucket_cors_set
コード例 #23
0
ファイル: amazon.py プロジェクト: digitaldreamer/electricity
    def create_bucket(cls, cdn, name):
        """
        Create the bucket in the CDN
        """
        try:
            bucket = cdn.create_bucket(name)
        except S3ResponseError:
            bucket = None
        else:
            # set permisssions
            cors_config = CORSConfiguration()
            cors_config.add_rule('GET', '*')

            bucket.set_cors(cors_config)
            bucket.set_acl('public-read')

        return bucket
コード例 #24
0
def send_to_s3(data, aws_access_key, aws_secret_key, s3_bucket,
               file_name='data.json'):
    """Sends the reporter data to S3

    :param data: list of dicts
    :param aws_access_key:
    :param aws_secret_key:
    :param s3_bucket:
    :param file_name:
    """
    conn = S3Connection(aws_access_key, aws_secret_key)
    bucket = conn.get_bucket(s3_bucket)
    config = CORSConfiguration()
    config.add_rule('GET', '*')
    bucket.set_cors(config)
    k = Key(bucket)
    k.key = file_name
    k.set_contents_from_string(json.dumps(data))
    k.set_acl('public-read')
コード例 #25
0
def sync_slides(workers):
    """Tile openslide-testdata and synchronize into S3."""

    # Initialize metadata
    metadata = {
        'openslide': openslide.__library_version__,
        'openslide_python': openslide.__version__,
        'stamp': sha256('%s %s %s' % (openslide.__library_version__,
                openslide.__version__, STAMP_VERSION)).hexdigest()[:8],
        'groups': [],
    }
    print 'OpenSlide %(openslide)s, OpenSlide Python %(openslide_python)s' % metadata

    # Get openslide-testdata index
    r = requests.get(urljoin(DOWNLOAD_BASE_URL, DOWNLOAD_INDEX))
    r.raise_for_status()
    slides = r.json()

    # Connect to S3
    bucket = connect_bucket()

    # Set bucket configuration
    print "Configuring bucket..."
    cors = CORSConfiguration()
    cors.add_rule(['GET'], CORS_ORIGINS)
    bucket.set_cors(cors)

    # Store static files
    print "Storing static files..."
    for relpath, opts in BUCKET_STATIC.iteritems():
        key = bucket.new_key(relpath)
        key.set_contents_from_string(opts.get('data', ''),
                headers=opts.get('headers', {}), policy='public-read')

    # If the stamp is changing, mark bucket dirty
    try:
        old_stamp = json.loads(bucket.new_key(METADATA_NAME).
                get_contents_as_string()).get('stamp')
    except S3ResponseError, e:
        if e.status == 404:
            old_stamp = None
        else:
            raise
コード例 #26
0
 def test_two_rules(self):
     cfg = CORSConfiguration()
     cfg.add_rule(['PUT', 'POST', 'DELETE'],
                  'http://www.example.com',
                  allowed_header='*',
                  max_age_seconds=3000,
                  expose_header='x-amz-server-side-encryption')
     cfg.add_rule('GET', '*', allowed_header='*', max_age_seconds=3000)
     self.assertEqual(cfg.to_xml(), CORS_BODY_2)
コード例 #27
0
ファイル: utils.py プロジェクト: sankroh/s3fileup
def generate_signed_url(obj_name, obj_type, method='GET'):
    """
    This method generates a temporary URL for an object in S3
    """
    AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
    AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')

    if AWS_ACCESS_KEY_ID is None or AWS_SECRET_ACCESS_KEY is None:
        raise ImproperlyConfigured('You need to specify AWS access keys!')

    conn = S3Connection(
        aws_access_key_id=AWS_ACCESS_KEY_ID,
        aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
        calling_format=OrdinaryCallingFormat()
    )

    bucket_name = generate_bucket_name()

    try:
        bucket = conn.create_bucket(bucket_name)
        is_new = True
    except S3CreateError:
        bucket = conn.lookup(bucket_name)
        is_new = False

    if method in ('PUT', 'POST') and is_new:
        # Setting CORS config (Cross Origin Resource)
        cors_cfg = CORSConfiguration()
        cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], DOMAIN, allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption')
        cors_cfg.add_rule('GET', '*')
        bucket.set_cors(cors_cfg)

    headers= {
        'Content-Type': obj_type,
        'x-amz-acl' : 'public-read',
    }

    url = conn.generate_url(MAX_AGE, method, bucket=bucket_name, key=obj_name, headers=headers)

    return url
コード例 #28
0
def adjust_cors(s3wrapper, clobber=False):
    """Set CORS headers on a bucket, removing pre-existing headers set by the
    OSF. Optionally clear all pre-existing headers.

    :param S3Wrapper s3wrapper: S3 wrapper instance
    :param bool clobber: Remove all pre-existing rules. Note: if this option
        is set to True, remember to warn or prompt the user first!

    """
    rules = s3wrapper.get_cors_rules()

    # Remove some / all pre-existing rules
    if clobber:
        rules = CORSConfiguration([])
    else:
        rules = CORSConfiguration(
            [rule for rule in rules if 'osf-s3' not in (rule.id or '')])

    # Add new rule
    rules.add_rule(
        ['PUT', 'GET'],
        s3_settings.ALLOWED_ORIGIN,
        allowed_header=['*'],
        id='osf-s3-{0}'.format(ObjectId()),
    )

    # Save changes
    s3wrapper.set_cors_rules(rules)
コード例 #29
0
ファイル: upload.py プロジェクト: CivicVision/datahub
def enable_bucket_cors(bucket):
    """ For direct upload to work, the bucket needs to enable
    cross-origin request scripting. """
    try:
        cors_cfg = bucket.get_cors()
    except S3ResponseError:
        cors_cfg = CORSConfiguration()
    rules = [r.id for r in cors_cfg]
    changed = False
    if 'spendb_put' not in rules:
        cors_cfg.add_rule(['PUT', 'POST'], '*',
                          allowed_header='*',
                          id='spendb_put',
                          max_age_seconds=3000,
                          expose_header='x-amz-server-side-encryption')
        changed = True
    if 'spendb_get' not in rules:
        cors_cfg.add_rule('GET', '*', id='spendb_get')
        changed = True

    if changed:
        bucket.set_cors(cors_cfg)
コード例 #30
0
ファイル: ceph.py プロジェクト: JackSlateur/raven
 def __create_bucket(self):
     self.bucket = self.conn.create_bucket(config.bucket)
     from boto.s3.cors import CORSConfiguration
     cors_cfg = CORSConfiguration()
     cors_cfg.add_rule([
         'GET',
     ], '*', allowed_header='*')
     cors_cfg.add_rule('GET', '*')
     self.bucket.set_cors(cors_cfg)
コード例 #31
0
 def test_two_rules(self):
     cfg = CORSConfiguration()
     cfg.add_rule(['PUT', 'POST', 'DELETE'],
                  'http://www.example.com',
                  allowed_header='*',
                  max_age_seconds=3000,
                  expose_header='x-amz-server-side-encryption')
     cfg.add_rule('GET', '*', allowed_header='*', max_age_seconds=3000)
     self.assertEqual(cfg.to_xml(), CORS_BODY_2)
コード例 #32
0
def get_or_create_bucket(name, public=True, cors=None):
    import boto
    from boto.s3.cors import CORSConfiguration
    conn = boto.connect_s3() # read AWS env vars
    bucket = conn.lookup(name)
    if bucket is None:
        print('Creating bucket %s' % name)
        bucket = conn.create_bucket(name)
        if public:
            bucket.set_acl('public-read')
        if cors:
            cors_cfg = CORSConfiguration()
            cors_cfg.add_rule(['GET', 'POST'], 'http://*', allowed_header='*', max_age_seconds=604800)
            cors_cfg.add_rule(['GET', 'POST'], 'https://*', allowed_header='*', max_age_seconds=604800)
            cors_cfg.add_rule('GET', '*', allowed_header='*', max_age_seconds=604800)
            bucket.set_cors(cors_cfg)
            bucket.set_policy(get_bucket_policy(name, cors), headers=None)
    return bucket
コード例 #33
0
ファイル: fabfile.py プロジェクト: paulocheque/cdn
def get_or_create_bucket(name, public=True, cors=None):
    with cd(env.app_path), prefix(venv()):
        import boto
        from boto.s3.cors import CORSConfiguration
        conn = boto.connect_s3()  # read AWS env vars
        bucket = conn.lookup(name)
        if bucket is None:
            print('Creating bucket %s' % name)
            bucket = conn.create_bucket(name)
            if public:
                bucket.set_acl('public-read')
            if cors:
                cors_cfg = CORSConfiguration()
                cors_cfg.add_rule(['PUT', 'POST', 'DELETE'],
                                  cors,
                                  allowed_header='*',
                                  max_age_seconds=3000,
                                  expose_header='x-amz-server-side-encryption')
                cors_cfg.add_rule('GET', '*')
                bucket.set_cors(cors_cfg)
        return bucket
コード例 #34
0
ファイル: upload.py プロジェクト: leowmjw/spendb
def enable_bucket_cors(bucket):
    """ For direct upload to work, the bucket needs to enable
    cross-origin request scripting. """
    try:
        cors_cfg = bucket.get_cors()
    except S3ResponseError:
        cors_cfg = CORSConfiguration()
    rules = [r.id for r in cors_cfg]
    changed = False
    if 'spendb_put' not in rules:
        cors_cfg.add_rule(['PUT', 'POST'],
                          '*',
                          allowed_header='*',
                          id='spendb_put',
                          max_age_seconds=3000,
                          expose_header='x-amz-server-side-encryption')
        changed = True
    if 'spendb_get' not in rules:
        cors_cfg.add_rule('GET', '*', id='spendb_get')
        changed = True

    if changed:
        bucket.set_cors(cors_cfg)
コード例 #35
0
ファイル: connection.py プロジェクト: pooingx2/Dripbox
 def _create_user_s3_bucket_internal(self, real_bucket_name, location=None):
     method_list = ['PUT', 'POST', 'DELETE', 'GET', 'HEAD']
     new_bucket = self.s3_conn.create_bucket(real_bucket_name, location=location)
     cors_cfg = CORSConfiguration()
     cors_cfg.add_rule(method_list, allowed_origin=['*'], allowed_header=['*'])
     new_bucket.set_cors(cors_cfg)
コード例 #36
0
class MBTiles2S3():
    """
  Class to handle conversion.  This is built as a command line tool
  and not meant for library inclusion.
  """

    description = """
examples:

  Export an mbtiles file to an S3 bucket:
  $ mbtiles2s3 world.mbtiles bucket.example

  Export an mbtiles file to an S3 bucket and path:
  $ mbtiles2s3 world.mbtiles bucket.example -p path/to/tiles

  Use a Mapbox box directly to an S3 bucket and path:
  $ mbtiles2s3 -m mapbox_user.map_id bucket.example -p path/to/tiles


requirements:

  It is expected to have AWS credentials set as AWS_ACCESS_KEY_ID and
  AWS_SECRET_ACCESS_KEY.  These can be set on the command line like:

    export AWS_ACCESS_KEY_ID="xxxxx";
    export AWS_SECRET_ACCESS_KEY="xxxx";
  """

    default_acl = 'public-read'
    mime_png = 'image/png'
    mime_json = 'application/json'
    mime_jsonp = 'text/javascript'
    mime_mbtiles = 'application/octet-stream'

    def __init__(self):
        """
    Constructor.
    """
        self.default_cors = CORSConfiguration()
        self.default_cors.add_rule('GET', '*', allowed_header='*')
        self.main()

    def out(self, message):
        """
    Wrapper around stdout
    """
        sys.stdout.write(message)

    def error(self, message):
        """
    Wrapper around stderror
    """
        sys.stderr.write(message)

    def connect_s3(self):
        """
    Makes connection to S3 and gets the bucket to work in.
    """
        self.out('- Connecting to S3 and making bucket.\n')
        self.s3 = boto.connect_s3()
        self.bucket = self.s3.create_bucket(self.bucket_name)
        self.bucket = self.s3.get_bucket(self.bucket_name)
        self.bucket.set_acl(self.default_acl)
        self.bucket.set_cors(self.default_cors)

    def connect_mbtiles(self):
        """
    Connect to the MBTiles file which is just an sqlite file.
    """
        try:
            self.out('- Connecting to MBTiles.\n')
            self.mbtiles = sqlite3.connect(self.source)
        except Exception as e:
            self.error('Could not connect to MBTiles.\n')
            sys.exit(1)

    def send_file(self,
                  path,
                  content=None,
                  mime_type=None,
                  file=None,
                  cb=None):
        """
    Send a file to S3 given a path and conent.
    """
        mime_type = self.mime_png if mime_type is None else mime_type
        path = self.path + '/' + path if self.path else path

        # TODO: CORS headers don't seem to be set on new resource

        # Create resource at path
        key = self.bucket.new_key(path)
        key.content_type = mime_type

        # Set content
        if file is not None:
            key.set_contents_from_filename(file,
                                           replace=True,
                                           cb=cb,
                                           num_cb=100)
        else:
            key.set_contents_from_string(content, replace=True)

        # Set access
        self.bucket.set_acl(self.default_acl, key)

    def jsonp(self, content):
        """
    Make data into JSON and wrap if needed.
    """
        json_string = json.dumps(content, sort_keys=True)
        mime_type = self.mime_json

        if self.args.callback is not None and self.args.callback != '':
            json_string = '%s(%s);' % (self.args.callback, json_string)
            mime_type = self.mime_jsonp

        return (json_string, mime_type)

    def mbtiles_metadata(self):
        """
    Get metadata and upload.
    """
        self.metadata = dict(
            self.mbtiles.execute(
                'select name, value from metadata;').fetchall())
        (metadata, mime_type) = self.jsonp(self.metadata)
        self.send_file(self.tileset + '.json', metadata, mime_type)
        self.send_file(self.tileset + '/metadata.json', metadata, mime_type)
        self.out('- Uploading metadata.\n')

    def mbtiles_image_tiles(self):
        """
    Get image tiles and upload.
    """
        tile_count = self.mbtiles.execute(
            'select count(zoom_level) from tiles;').fetchone()[0]

        # Progress bar
        widgets = [
            '- Uploading %s image tiles: ' % (tile_count),
            progressbar.Percentage(), ' ',
            progressbar.Bar(), ' ',
            progressbar.ETA()
        ]
        progress = progressbar.ProgressBar(widgets=widgets,
                                           maxval=tile_count).start()
        completed = 0

        # Create eventlet pile
        pile = eventlet.GreenPile(self.args.concurrency)

        # Get tiles
        tiles = self.mbtiles.execute(
            'select zoom_level, tile_column, tile_row, tile_data from tiles;')
        t = tiles.fetchone()
        while t:
            key = '%s/%s/%s/%s.png' % (self.tileset, t[0], t[1], t[2])
            pile.spawn(self.send_file, key, t[3])

            # Get next and update
            t = tiles.fetchone()
            completed = completed + 1
            progress.update(completed)

        # Wait for pile and stop progress bar
        list(pile)
        progress.finish()

    def mbtiles_grid_tiles(self):
        """
    Get grid tiles and upload.
    """
        tile_count = self.mbtiles.execute(
            'select count(zoom_level) from grids;').fetchone()[0]
        if not tile_count > 0:
            return False

        # Progress bar
        widgets = [
            '- Uploading %s grid tiles: ' % (tile_count),
            progressbar.Percentage(), ' ',
            progressbar.Bar(), ' ',
            progressbar.ETA()
        ]
        progress = progressbar.ProgressBar(widgets=widgets,
                                           maxval=tile_count).start()
        completed = 0

        # Create eventlet pile
        pile = eventlet.GreenPile(self.args.concurrency)

        # Get tiles
        tiles = self.mbtiles.execute(
            'select zoom_level, tile_column, tile_row, grid from grids;')
        t = tiles.fetchone()
        while t:
            key = '%s/%s/%s/%s.grid.json' % (self.tileset, t[0], t[1], t[2])

            # Get actual json data
            grid_data = self.mbtiles.execute(
                'select key_name, key_json FROM grid_data WHERE zoom_level = %s and tile_column = %s and tile_row = %s;'
                % (t[0], t[1], t[2])).fetchall()
            grid_data_parse = {}
            for d in grid_data:
                grid_data_parse[d[0]] = json.loads(d[1])

            # Put together
            grid = json.loads(zlib.decompress(t[3]).decode('utf-8'))
            grid['data'] = grid_data_parse

            # Upload data
            (grid, mime_type) = self.jsonp(grid)
            pile.spawn(self.send_file, key, grid, mime_type=mime_type)

            # Get next and update
            t = tiles.fetchone()
            completed = completed + 1
            progress.update(completed)

        # Wait for pile and stop progress bar
        list(pile)
        progress.finish()

    def mbtiles_mbtiles(self):
        """
    Upload original mbtiles.
    """
        key = '%s.mbtiles' % (self.tileset)

        widgets = [
            '- Uploading MBTile file: ',
            progressbar.Percentage(), ' ',
            progressbar.Bar(), ' ',
            progressbar.ETA()
        ]
        progress = progressbar.ProgressBar(widgets=widgets, maxval=1).start()

        # Progress callback
        def report_progress(complete, total):
            progress.update(float(complete) / float(total))

        self.send_file(key,
                       file=self.source,
                       cb=report_progress,
                       mime_type=self.mime_mbtiles)
        progress.finish()

    def remove_export(self):
        """
    Removes export of same name.
    """
        prefix = self.path + '/' if self.path else ''
        tiles_path = '%s%s' % (prefix, self.tileset)
        metadata_path = '%s%s.json' % (prefix, self.tileset)
        mbtiles_path = '%s%s.mbtiles' % (prefix, self.tileset)

        # Get list for tiles
        tiles_path_set = self.bucket.list(prefix=tiles_path)

        # Progress
        widgets = [
            '- Removing old export, %s: ' % (self.tileset),
            progressbar.Percentage()
        ]
        progress = progressbar.ProgressBar(widgets=widgets, maxval=1).start()

        # Remove parts
        self.bucket.delete_keys([key.name for key in tiles_path_set])
        self.bucket.delete_key(tiles_path)
        progress.update(.5)
        self.bucket.delete_key(metadata_path)
        progress.update(.25)
        self.bucket.delete_key(mbtiles_path)
        progress.update(.25)
        progress.finish()

    def get_mapbox_mbtiles(self):
        """
    Download file from Mapbox.
    """
        mapbox_mbtiles = 'http://a.tiles.mapbox.com/v3/%s.mbtiles'
        local_mbtiles = '%s.mbtiles'
        remote_file = mapbox_mbtiles % (self.source)
        local_file = local_mbtiles % (self.source)

        # Check if file exists already
        if os.path.exists(local_file) and os.path.isfile(local_file):
            self.out('- Local file, %s, already exists; using this file.\n' %
                     (local_file))
        else:
            self.out('- Downloading file from Mapbox ...\n')
            urllib.urlretrieve(remote_file, local_file)

        self.source = local_file

    def main(self):
        """
    The main execution of the class and handles CLI arguments.
    """
        parser = argparse.ArgumentParser(
            description=self.description,
            formatter_class=argparse.RawDescriptionHelpFormatter,
        )

        # Source
        parser.add_argument(
            'source',
            help=
            'The .mbtiles file source.  If used with the --mapbox-source flag, then this should be a Mapbox map identifier.'
        )

        # Bucket
        parser.add_argument('bucket', help='The S3 bucket to send to.')

        # Bucket path
        parser.add_argument('-p',
                            '--path',
                            dest='path',
                            help='Path in bucket to send to.',
                            default='')

        # Option to add jsonp wrapper
        parser.add_argument(
            '-g',
            '--grid-callback',
            dest='callback',
            help=
            'Control JSONP callback for UTFGrid tiles.  Defaults to `grid`, use blank to remove JSONP',
            default='grid')

        # Tileset name
        parser.add_argument(
            '-t',
            '--tileset-name',
            dest='tileset_name',
            help=
            'The name of the tileset to use.  By default, this will be the file name of the source.',
            default='')

        # Concurency
        parser.add_argument(
            '-c',
            '--concurrency',
            dest='concurrency',
            help='Number of concurrent uploads.  Default is 32',
            type=int,
            default=32)

        # Option to use Mapbox file
        parser.add_argument(
            '-m',
            '--mapbox-source',
            action='store_true',
            help=
            'Interpret the source as a Mapbox map, usually in the format of `user.map_id`.'
        )

        # Remove old parts
        parser.add_argument(
            '-r',
            '--remove-first',
            action='store_true',
            help=
            'Remove old files first.  This is good if for some reason the map boundary has changed.'
        )

        # Do not upload mbtiles
        parser.add_argument(
            '--dont-upload-mbtiles',
            action='store_true',
            help=
            'Do not upload the original mbtiles file.  This is desierable for archivable purposes.'
        )

        # Do not upload image tiles
        parser.add_argument('--dont-upload-image-tiles',
                            action='store_true',
                            help='Do not upload the image tiles.')

        # Do not upload grid tiles
        parser.add_argument('--dont-upload-grid-tiles',
                            action='store_true',
                            help='Do not upload the grid tiles.')

        # Turn on debugging
        parser.add_argument('-d',
                            '--debug',
                            action='store_true',
                            help='Turn on debugging.')

        # Parse options
        args = parser.parse_args()

        # Set some properties
        self.args = args
        self.source = args.source
        self.bucket_name = args.bucket
        self.path = args.path

        # Debugging
        if self.args.debug:
            logging.basicConfig(level=logging.DEBUG)

        # If mapbox option, handle that
        if self.args.mapbox_source:
            self.get_mapbox_mbtiles()

        # Ensure that the file exists
        if not os.path.exists(self.source) or not os.path.isfile(self.source):
            self.error('The source file is not a file or does not exist.\n')
            sys.exit(1)

        # Ensure that we have AWS credentials set up
        if 'AWS_ACCESS_KEY_ID' not in os.environ or 'AWS_SECRET_ACCESS_KEY' not in os.environ:
            self.error(
                'AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY not found in the environment.\n'
            )
            sys.exit(1)

        # Determine tileset name
        self.tileset = self.args.tileset_name
        if self.tileset is None or self.tileset == '':
            self.tileset = os.path.splitext(os.path.basename(self.source))[0]

        # Normalize the path
        self.path = os.path.normcase(self.path.strip('/'))

        # Make initial connection to S3
        self.connect_s3()

        # Make initial connection to mbtiles
        self.connect_mbtiles()

        # Remove first
        if self.args.remove_first:
            self.remove_export()

        # Upload metadata
        self.mbtiles_metadata()

        # Upload tiles
        if not self.args.dont_upload_image_tiles:
            self.mbtiles_image_tiles()
        if not self.args.dont_upload_grid_tiles:
            self.mbtiles_grid_tiles()

        # Upload mbtiles
        if not self.args.dont_upload_mbtiles:
            self.mbtiles_mbtiles()

        # Done
        self.out('- Done.\n')
コード例 #37
0
ファイル: deploy.py プロジェクト: thrashr888/s3staticuploader
    print "Using bucket `%s`" % (bucket_name)

except S3ResponseError:
    bucket = False
    print "Bucket doesn't exist `%s`" % (bucket_name)

if not bucket:

    print "Creating new bucket `%s`..." % (bucket_name)

    bucket = conn.create_bucket(bucket_name)
    bucket.configure_website('index.html')

    print "Adding CORS settings..."

    cors_cfg = CORSConfiguration()
    cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], 'http://' + bucket_name + '.' + region, allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption')
    cors_cfg.add_rule(['PUT', 'POST', 'DELETE'], 'http://localhost', allowed_header='*', max_age_seconds=3000, expose_header='x-amz-server-side-encryption')
    cors_cfg.add_rule('GET', '*')
    bucket.set_cors(cors_cfg)

    print "Adding Lifecycle settings..."

    lifecycle_cfg = Lifecycle()
    lifecycle_cfg.add_rule('d1', 'u/d1/', 'Enabled', 1)
    lifecycle_cfg.add_rule('d2', 'u/d2/', 'Enabled', 2)
    lifecycle_cfg.add_rule('d3', 'u/d3/', 'Enabled', 3)
    lifecycle_cfg.add_rule('d4', 'u/d4/', 'Enabled', 4)
    lifecycle_cfg.add_rule('d5', 'u/d5/', 'Enabled', 5)
    lifecycle_cfg.add_rule('d6', 'u/d6/', 'Enabled', 6)
    lifecycle_cfg.add_rule('d7', 'u/d7/', 'Enabled', 7)
コード例 #38
0
ファイル: mbtiles2s3.py プロジェクト: MinnPost/mbtiles2s3
class MBTiles2S3():
  """
  Class to handle conversion.  This is built as a command line tool
  and not meant for library inclusion.
  """

  description = """
examples:

  Export an mbtiles file to an S3 bucket:
  $ mbtiles2s3 world.mbtiles bucket.example

  Export an mbtiles file to an S3 bucket and path:
  $ mbtiles2s3 world.mbtiles bucket.example -p path/to/tiles

  Use a Mapbox box directly to an S3 bucket and path:
  $ mbtiles2s3 -m mapbox_user.map_id bucket.example -p path/to/tiles


requirements:

  It is expected to have AWS credentials set as AWS_ACCESS_KEY_ID and
  AWS_SECRET_ACCESS_KEY.  These can be set on the command line like:

    export AWS_ACCESS_KEY_ID="xxxxx";
    export AWS_SECRET_ACCESS_KEY="xxxx";
  """

  default_acl = 'public-read'
  mime_png = 'image/png'
  mime_json = 'application/json'
  mime_jsonp = 'text/javascript'
  mime_mbtiles = 'application/octet-stream'


  def __init__(self):
    """
    Constructor.
    """
    self.default_cors = CORSConfiguration()
    self.default_cors.add_rule('GET', '*', allowed_header = '*')
    self.main()


  def out(self, message):
    """
    Wrapper around stdout
    """
    sys.stdout.write(message)


  def error(self, message):
    """
    Wrapper around stderror
    """
    sys.stderr.write(message)


  def connect_s3(self):
    """
    Makes connection to S3 and gets the bucket to work in.
    """
    self.out('- Connecting to S3 and making bucket.\n')
    self.s3 = boto.connect_s3()
    self.bucket = self.s3.create_bucket(self.bucket_name)
    self.bucket = self.s3.get_bucket(self.bucket_name)
    self.bucket.set_acl(self.default_acl)
    self.bucket.set_cors(self.default_cors)


  def connect_mbtiles(self):
    """
    Connect to the MBTiles file which is just an sqlite file.
    """
    try:
      self.out('- Connecting to MBTiles.\n')
      self.mbtiles = sqlite3.connect(self.source)
    except Exception as e:
      self.error('Could not connect to MBTiles.\n')
      sys.exit(1)


  def send_file(self, path, content = None, mime_type = None, file = None, cb = None):
    """
    Send a file to S3 given a path and conent.
    """
    mime_type = self.mime_png if mime_type is None else mime_type
    path = self.path + '/' + path if self.path else path

    # TODO: CORS headers don't seem to be set on new resource

    # Create resource at path
    key = self.bucket.new_key(path)
    key.content_type = mime_type

    # Set content
    if file is not None:
      key.set_contents_from_filename(file, replace = True, cb = cb, num_cb = 100)
    else:
      key.set_contents_from_string(content, replace = True)

    # Set access
    self.bucket.set_acl(self.default_acl, key)



  def jsonp(self, content):
    """
    Make data into JSON and wrap if needed.
    """
    json_string = json.dumps(content, sort_keys = True)
    mime_type = self.mime_json

    if self.args.callback is not None and self.args.callback != '':
      json_string = '%s(%s);' % (self.args.callback, json_string)
      mime_type = self.mime_jsonp

    return (json_string, mime_type)


  def mbtiles_metadata(self):
    """
    Get metadata and upload.
    """
    self.metadata = dict(self.mbtiles.execute('select name, value from metadata;').fetchall())
    (metadata, mime_type) = self.jsonp(self.metadata)
    self.send_file(self.tileset + '.json', metadata, mime_type)
    self.send_file(self.tileset + '/metadata.json', metadata, mime_type)
    self.out('- Uploading metadata.\n')


  def mbtiles_image_tiles(self):
    """
    Get image tiles and upload.
    """
    tile_count = self.mbtiles.execute('select count(zoom_level) from tiles;').fetchone()[0]

    # Progress bar
    widgets = ['- Uploading %s image tiles: ' % (tile_count), progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()]
    progress = progressbar.ProgressBar(widgets = widgets, maxval = tile_count).start()
    completed = 0

    # Create eventlet pile
    pile = eventlet.GreenPile(self.args.concurrency)

    # Get tiles
    tiles = self.mbtiles.execute('select zoom_level, tile_column, tile_row, tile_data from tiles;')
    t = tiles.fetchone()
    while t:
      key = '%s/%s/%s/%s.png' % (self.tileset, t[0], t[1], t[2])
      pile.spawn(self.send_file, key, t[3])

      # Get next and update
      t = tiles.fetchone()
      completed = completed + 1
      progress.update(completed)

    # Wait for pile and stop progress bar
    list(pile)
    progress.finish()


  def mbtiles_grid_tiles(self):
    """
    Get grid tiles and upload.
    """
    tile_count = self.mbtiles.execute('select count(zoom_level) from grids;').fetchone()[0]
    if not tile_count > 0:
      return False

    # Progress bar
    widgets = ['- Uploading %s grid tiles: ' % (tile_count), progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()]
    progress = progressbar.ProgressBar(widgets = widgets, maxval = tile_count).start()
    completed = 0

    # Create eventlet pile
    pile = eventlet.GreenPile(self.args.concurrency)

    # Get tiles
    tiles = self.mbtiles.execute('select zoom_level, tile_column, tile_row, grid from grids;')
    t = tiles.fetchone()
    while t:
      key = '%s/%s/%s/%s.grid.json' % (self.tileset, t[0], t[1], t[2])

      # Get actual json data
      grid_data = self.mbtiles.execute('select key_name, key_json FROM grid_data WHERE zoom_level = %s and tile_column = %s and tile_row = %s;' % (t[0], t[1], t[2])).fetchall()
      grid_data_parse = {}
      for d in grid_data:
        grid_data_parse[d[0]] = json.loads(d[1])

      # Put together
      grid = json.loads(zlib.decompress(t[3]).decode('utf-8'))
      grid['data'] = grid_data_parse

      # Upload data
      (grid, mime_type) = self.jsonp(grid)
      pile.spawn(self.send_file, key, grid, mime_type = mime_type)

      # Get next and update
      t = tiles.fetchone()
      completed = completed + 1
      progress.update(completed)

    # Wait for pile and stop progress bar
    list(pile)
    progress.finish()


  def mbtiles_mbtiles(self):
    """
    Upload original mbtiles.
    """
    key = '%s.mbtiles' % (self.tileset)

    widgets = ['- Uploading MBTile file: ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()]
    progress = progressbar.ProgressBar(widgets = widgets, maxval = 1).start()

    # Progress callback
    def report_progress(complete, total):
      progress.update(float(complete) / float(total))

    self.send_file(key, file = self.source, cb = report_progress, mime_type = self.mime_mbtiles)
    progress.finish()


  def remove_export(self):
    """
    Removes export of same name.
    """
    prefix = self.path + '/' if self.path else ''
    tiles_path = '%s%s' % (prefix, self.tileset)
    metadata_path = '%s%s.json' % (prefix, self.tileset)
    mbtiles_path = '%s%s.mbtiles' % (prefix, self.tileset)

    # Get list for tiles
    tiles_path_set = self.bucket.list(prefix = tiles_path)

    # Progress
    widgets = ['- Removing old export, %s: ' % (self.tileset), progressbar.Percentage()]
    progress = progressbar.ProgressBar(widgets = widgets, maxval = 1).start()

    # Remove parts
    self.bucket.delete_keys([key.name for key in tiles_path_set])
    self.bucket.delete_key(tiles_path)
    progress.update(.5)
    self.bucket.delete_key(metadata_path)
    progress.update(.25)
    self.bucket.delete_key(mbtiles_path)
    progress.update(.25)
    progress.finish()



  def get_mapbox_mbtiles(self):
    """
    Download file from Mapbox.
    """
    mapbox_mbtiles = 'http://a.tiles.mapbox.com/v3/%s.mbtiles'
    local_mbtiles = '%s.mbtiles'
    remote_file = mapbox_mbtiles % (self.source)
    local_file = local_mbtiles % (self.source)

    # Check if file exists already
    if os.path.exists(local_file) and os.path.isfile(local_file):
      self.out('- Local file, %s, already exists; using this file.\n' % (local_file))
    else:
      self.out('- Downloading file from Mapbox ...\n')
      urllib.urlretrieve (remote_file, local_file)

    self.source = local_file


  def main(self):
    """
    The main execution of the class and handles CLI arguments.
    """
    parser = argparse.ArgumentParser(description = self.description, formatter_class = argparse.RawDescriptionHelpFormatter,)

    # Source
    parser.add_argument(
      'source',
      help = 'The .mbtiles file source.  If used with the --mapbox-source flag, then this should be a Mapbox map identifier.'
    )

    # Bucket
    parser.add_argument(
      'bucket',
      help = 'The S3 bucket to send to.'
    )

    # Bucket path
    parser.add_argument(
      '-p', '--path',
      dest = 'path',
      help = 'Path in bucket to send to.',
      default = ''
    )

    # Option to add jsonp wrapper
    parser.add_argument(
      '-g', '--grid-callback',
      dest = 'callback',
      help = 'Control JSONP callback for UTFGrid tiles.  Defaults to `grid`, use blank to remove JSONP',
      default = 'grid'
    )

    # Tileset name
    parser.add_argument(
      '-t', '--tileset-name',
      dest = 'tileset_name',
      help = 'The name of the tileset to use.  By default, this will be the file name of the source.',
      default = ''
    )

    # Concurency
    parser.add_argument(
      '-c', '--concurrency',
      dest = 'concurrency',
      help = 'Number of concurrent uploads.  Default is 32',
      type = int,
      default = 32
    )

    # Option to use Mapbox file
    parser.add_argument(
      '-m', '--mapbox-source',
      action = 'store_true',
      help = 'Interpret the source as a Mapbox map, usually in the format of `user.map_id`.'
    )

    # Remove old parts
    parser.add_argument(
      '-r', '--remove-first',
      action = 'store_true',
      help = 'Remove old files first.  This is good if for some reason the map boundary has changed.'
    )

    # Do not upload mbtiles
    parser.add_argument(
      '--dont-upload-mbtiles',
      action = 'store_true',
      help = 'Do not upload the original mbtiles file.  This is desierable for archivable purposes.'
    )

    # Do not upload image tiles
    parser.add_argument(
      '--dont-upload-image-tiles',
      action = 'store_true',
      help = 'Do not upload the image tiles.'
    )

    # Do not upload grid tiles
    parser.add_argument(
      '--dont-upload-grid-tiles',
      action = 'store_true',
      help = 'Do not upload the grid tiles.'
    )

    # Turn on debugging
    parser.add_argument(
      '-d', '--debug',
      action = 'store_true',
      help = 'Turn on debugging.'
    )

    # Parse options
    args = parser.parse_args()

    # Set some properties
    self.args = args
    self.source = args.source
    self.bucket_name = args.bucket
    self.path = args.path

    # Debugging
    if self.args.debug:
      logging.basicConfig(level = logging.DEBUG)

    # If mapbox option, handle that
    if self.args.mapbox_source:
      self.get_mapbox_mbtiles()

    # Ensure that the file exists
    if not os.path.exists(self.source) or not os.path.isfile(self.source):
      self.error('The source file is not a file or does not exist.\n')
      sys.exit(1)

    # Ensure that we have AWS credentials set up
    if 'AWS_ACCESS_KEY_ID' not in os.environ or 'AWS_SECRET_ACCESS_KEY' not in os.environ:
      self.error('AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY not found in the environment.\n')
      sys.exit(1)

    # Determine tileset name
    self.tileset = self.args.tileset_name
    if self.tileset is None or self.tileset == '':
      self.tileset = os.path.splitext(os.path.basename(self.source))[0]

    # Normalize the path
    self.path = os.path.normcase(self.path.strip('/'))

    # Make initial connection to S3
    self.connect_s3()

    # Make initial connection to mbtiles
    self.connect_mbtiles()

    # Remove first
    if self.args.remove_first:
      self.remove_export()

    # Upload metadata
    self.mbtiles_metadata()

    # Upload tiles
    if not self.args.dont_upload_image_tiles:
      self.mbtiles_image_tiles()
    if not self.args.dont_upload_grid_tiles:
      self.mbtiles_grid_tiles()

    # Upload mbtiles
    if not self.args.dont_upload_mbtiles:
      self.mbtiles_mbtiles()

    # Done
    self.out('- Done.\n')
コード例 #39
0
ファイル: deploy.py プロジェクト: thrashr888/s3staticuploader
    print("Using bucket `%s`" % (bucket_name))

except S3ResponseError:
    bucket = False
    print("Bucket doesn't exist `%s`" % (bucket_name))

if not bucket:

    print("Creating new bucket `%s`..." % (bucket_name))

    bucket = conn.create_bucket(bucket_name)
    bucket.configure_website('index.html')

    print("Adding CORS settings...")

    cors_cfg = CORSConfiguration()
    cors_cfg.add_rule(['PUT', 'POST', 'DELETE'],
                      'http://' + bucket_name + '.' + region,
                      allowed_header='*',
                      max_age_seconds=3000,
                      expose_header='x-amz-server-side-encryption')
    cors_cfg.add_rule(['PUT', 'POST', 'DELETE'],
                      'http://localhost',
                      allowed_header='*',
                      max_age_seconds=3000,
                      expose_header='x-amz-server-side-encryption')
    cors_cfg.add_rule('GET', '*')
    bucket.set_cors(cors_cfg)

    print("Adding Lifecycle settings...")
コード例 #40
0
 def test_minimal(self):
     cfg = CORSConfiguration()
     cfg.add_rule('GET', '*')
     self.assertEqual(cfg.to_xml(), CORS_BODY_3)
コード例 #41
0
def set_bucket_policy(bucket):
    from boto.s3.cors import CORSConfiguration
    cors_cfg = CORSConfiguration()
    cors_cfg.add_rule(['PUT', 'POST', 'GET'], '*', allowed_header='*', max_age_seconds=3000, expose_header='ETag')
    bucket.set_cors(cors_cfg)
コード例 #42
0
    max_age_seconds = module.params.get("max_age_seconds")
    expose_header = module.params.get("expose_header")
    changed = False

    try:
        bucket = connection.get_bucket(name)
    except S3ResponseError, e:
        module.fail_json(msg=str(get_error_message(e)))

    # Get the bucket's current CORS rules
    try:
        current_lifecycle_obj = bucket.get_cors()
        error_code = get_error_code(e.args[2])
    except S3ResponseError, e:
        if error_code == "NoSuchLifecycleConfiguration":
            current_cors_obj = CORSConfiguration()
        else:
            module.fail_json(msg=str(get_error_message(e)))

    # Create CORS rule
    cors_rule = CORSRule(allowed_method=allowed_methods,
                         allowed_origin=allowed_origin,
                         id=rule_id,
                         allowed_header=allowed_header,
                         max_age_seconds=max_age_seconds,
                         expose_header=expose_header)

    # Create lifecycle
    cors_obj = CORSConfiguration()

    # Check if rule exists
コード例 #43
0
ファイル: cors_tests.py プロジェクト: euca-nightfury/eutester
    def test_cors_config_mgmt(self):
        '''
        Method: Tests setting, getting, and deleting the CORS config on a bucket
        '''
        test_bucket=self.bucket_prefix + "-simple-test-bucket"
        self.buckets_used.add(test_bucket)
        self.tester.debug("Starting CORS config management tests, using bucket name: " + test_bucket)
 
        try :
            bucket = self.tester.s3.create_bucket(test_bucket)                
            if bucket == None:
                self.tester.s3.delete_bucket(test_bucket)
                self.fail(test_bucket + " was not created correctly")
        except (S3ResponseError, S3CreateError) as e:
            self.fail(test_bucket + " create caused exception: " + str(e))
        
        # Get the CORS config (none yet). 
        # Should get 404 Not Found, with "NoSuchCORSConfiguration" in the body.
        try :    
            self.tester.debug("Getting (empty) CORS config")
            bucket.get_cors()
            #self.tester.s3.delete_bucket(test_bucket)
            #LPT self.fail("Did not get an S3ResponseError getting CORS config when none exists yet.")
        except S3ResponseError as e:
            if (e.status == 404 and e.reason == "Not Found" and e.code == "NoSuchCORSConfiguration"):
                self.tester.debug("Caught S3ResponseError with expected contents, " + 
                                  "getting CORS config when none exists yet.")
            else:
                self.tester.s3.delete_bucket(test_bucket)
                self.fail("Caught S3ResponseError getting CORS config when none exists yet," +
                          "but exception contents were unexpected: " + str(e))

        # Set a simple CORS config.
        try :    
            self.tester.debug("Setting a CORS config")
            bucket_cors_set = CORSConfiguration()
            bucket_rule_id = "ManuallyAssignedId1"
            bucket_allowed_methods = ['GET', 'PUT']
            bucket_allowed_origins = ['*']
            bucket_allowed_headers = ['*']
            bucket_max_age_seconds = 3000
            #bucket_expose_headers = []
            bucket_cors_set.add_rule(bucket_allowed_methods, 
                                     bucket_allowed_origins, 
                                     bucket_rule_id,
                                     bucket_allowed_headers, 
                                     bucket_max_age_seconds)
            bucket.set_cors(bucket_cors_set)
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            self.fail("Caught S3ResponseError setting CORS config: " + str(e))
                    
        # Get the CORS config. Should get the config we just set.
        try :    
            self.tester.debug("Getting the CORS config we just set")
            bucket_cors_retrieved = bucket.get_cors()
            assert (bucket_cors_retrieved.to_xml() == bucket_cors_set.to_xml()), 'Bucket CORS config: Expected ' + bucket_cors_set.to_xml() + ', Retrieved ' + bucket_cors_retrieved.to_xml()
            
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            self.fail("Caught S3ResponseError getting CORS config, after setting it successfully: " + str(e))
        
        # Delete the CORS config.
        try :    
            self.tester.debug("Deleting the CORS config")
            bucket.delete_cors()
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            self.fail("Caught S3ResponseError deleting CORS config, after setting and validating it successfully: " + str(e))

        # Get the CORS config (none anymore). 
        # Should get 404 Not Found, with "NoSuchCORSConfiguration" in the body.
        try :    
            self.tester.debug("Getting (empty again) CORS config")
            bucket.get_cors()
            self.tester.s3.delete_bucket(test_bucket)
            self.fail("Did not get an S3ResponseError getting CORS config after being deleted.")
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            if (e.status == 404 and e.reason == "Not Found" and e.code == "NoSuchCORSConfiguration"):
                self.tester.debug("Caught S3ResponseError with expected contents, " + 
                                  "getting CORS config after being deleted.")
            else:
                self.fail("Caught S3ResponseError getting CORS config after being deleted," +
                          "but exception contents were unexpected: " + str(e))
コード例 #44
0
ファイル: ingest.py プロジェクト: imclab/splice
def distribute(data, channel_id, deploy, scheduled_dt=None):
    """Upload tile data to S3
    :data: tile data
    :channel_id: channel id for which to distribute tile data
    :deploy: whether to deploy tiles to firefox immediately
    :scheduled_dt: an optional scheduled date in the future for deploy. overrides deploy
    """
    command_logger.info("Generating Data")

    from splice.models import Channel
    from splice.environment import Environment

    env = Environment.instance()

    if scheduled_dt:
        now = datetime.utcnow()
        if now > scheduled_dt:
            raise ScheduleError("scheduled date needs to be in the future")
        elif deploy:
            raise ScheduleError("cannot specify deploy and schedule at the same time")

    channel = (
        env.db.session
        .query(Channel)
        .filter(Channel.id == channel_id)
        .one())

    artifacts = generate_artifacts(data, channel.name, deploy)

    command_logger.info("Uploading to S3 for channel {0}".format(channel.name))

    bucket = Environment.instance().s3.get_bucket(Environment.instance().config.S3["bucket"])
    cors = CORSConfiguration()
    cors.add_rule("GET", "*", allowed_header="*")
    bucket.set_cors(cors)

    distributed = []

    headers = {
        'Cache-Control': 'public, max-age=31536000',
        'Content-Disposition': 'inline',
    }

    # upload individual files
    for file in artifacts:
        if "mime" in file:
            headers['Content-Type'] = file["mime"]
        else:
            # default to JSON for artifacts
            headers['Content-Type'] = "application/json"

        key = bucket.get_key(file["key"])
        uploaded = False

        if key is None or file.get("force_upload"):
            key = Key(bucket)
            key.name = file["key"]
            key.set_contents_from_string(file["data"], headers=headers)
            key.set_acl("public-read")
            uploaded = True

        url = key.generate_url(expires_in=0, query_auth=False)

        # remove x-amz-security-token, which is inserted even if query_auth=False
        # ref: https://github.com/boto/boto/issues/1477
        uri = furl(url)
        try:
            uri.args.pop('x-amz-security-token')
        except:
            pass
        url = uri.url

        if uploaded:
            command_logger.info("UPLOADED {0}".format(url))
        else:
            command_logger.info("SKIPPED {0}".format(url))
        distributed.append([url, uploaded])

        if file.get("dist", False):
            insert_distribution(url, channel_id, deploy, scheduled_dt)

    return distributed
コード例 #45
0
ファイル: fc_2014_10_01.py プロジェクト: mfwarren/FreeCoding
def load_CORS(bucket, host):
    cors_cfg = CORSConfiguration()
    cors_cfg.add_rule(['PUT', 'POST', 'GET'], host, allowed_header='*', max_age_seconds=3000)
    bucket.set_cors(cors_cfg)
コード例 #46
0
ファイル: cors_tests.py プロジェクト: tbeckham/eutester
    def test_cors_config_mgmt(self):
        '''
        Method: Tests setting, getting, and deleting the CORS config on a bucket
        '''
        test_bucket = self.bucket_prefix + "-simple-test-bucket"
        self.buckets_used.add(test_bucket)
        self.tester.debug(
            "Starting CORS config management tests, using bucket name: " +
            test_bucket)

        try:
            bucket = self.tester.s3.create_bucket(test_bucket)
            if bucket == None:
                self.tester.s3.delete_bucket(test_bucket)
                self.fail(test_bucket + " was not created correctly")
        except (S3ResponseError, S3CreateError) as e:
            self.fail(test_bucket + " create caused exception: " + str(e))

        # Get the CORS config (none yet).
        # Should get 404 Not Found, with "NoSuchCORSConfiguration" in the body.
        try:
            self.tester.debug("Getting (empty) CORS config")
            bucket.get_cors()
            #self.tester.s3.delete_bucket(test_bucket)
            #LPT self.fail("Did not get an S3ResponseError getting CORS config when none exists yet.")
        except S3ResponseError as e:
            if (e.status == 404 and e.reason == "Not Found"
                    and e.code == "NoSuchCORSConfiguration"):
                self.tester.debug(
                    "Caught S3ResponseError with expected contents, " +
                    "getting CORS config when none exists yet.")
            else:
                self.tester.s3.delete_bucket(test_bucket)
                self.fail(
                    "Caught S3ResponseError getting CORS config when none exists yet,"
                    + "but exception contents were unexpected: " + str(e))

        # Set a simple CORS config.
        try:
            self.tester.debug("Setting a CORS config")
            bucket_cors_set = CORSConfiguration()
            bucket_rule_id = "ManuallyAssignedId1"
            bucket_allowed_methods = ['GET', 'PUT']
            bucket_allowed_origins = ['*']
            bucket_allowed_headers = ['*']
            bucket_max_age_seconds = 3000
            #bucket_expose_headers = []
            bucket_cors_set.add_rule(bucket_allowed_methods,
                                     bucket_allowed_origins, bucket_rule_id,
                                     bucket_allowed_headers,
                                     bucket_max_age_seconds)
            bucket.set_cors(bucket_cors_set)
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            self.fail("Caught S3ResponseError setting CORS config: " + str(e))

        # Get the CORS config. Should get the config we just set.
        try:
            self.tester.debug("Getting the CORS config we just set")
            bucket_cors_retrieved = bucket.get_cors()
            assert (bucket_cors_retrieved.to_xml() == bucket_cors_set.to_xml(
            )), 'Bucket CORS config: Expected ' + bucket_cors_set.to_xml(
            ) + ', Retrieved ' + bucket_cors_retrieved.to_xml()

        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            self.fail(
                "Caught S3ResponseError getting CORS config, after setting it successfully: "
                + str(e))

        # Delete the CORS config.
        try:
            self.tester.debug("Deleting the CORS config")
            bucket.delete_cors()
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            self.fail(
                "Caught S3ResponseError deleting CORS config, after setting and validating it successfully: "
                + str(e))

        # Get the CORS config (none anymore).
        # Should get 404 Not Found, with "NoSuchCORSConfiguration" in the body.
        try:
            self.tester.debug("Getting (empty again) CORS config")
            bucket.get_cors()
            self.tester.s3.delete_bucket(test_bucket)
            self.fail(
                "Did not get an S3ResponseError getting CORS config after being deleted."
            )
        except S3ResponseError as e:
            self.tester.s3.delete_bucket(test_bucket)
            if (e.status == 404 and e.reason == "Not Found"
                    and e.code == "NoSuchCORSConfiguration"):
                self.tester.debug(
                    "Caught S3ResponseError with expected contents, " +
                    "getting CORS config after being deleted.")
            else:
                self.fail(
                    "Caught S3ResponseError getting CORS config after being deleted,"
                    + "but exception contents were unexpected: " + str(e))
コード例 #47
0
 def get_cors_rules(self):
     try:
         return self.bucket.get_cors()
     except:
         return CORSConfiguration()
コード例 #48
0
ファイル: storage.py プロジェクト: cmusatyalab/django-s3
 def _get_cors_config(self):
     cors = CORSConfiguration()
     cors.add_rule(['GET'], ['*'])
     return cors
コード例 #49
0
ファイル: 2.py プロジェクト: maxthwell/shells
    XSU, XST = swift_api_auth(user, key)
    headers = {"X-Auth-Token": XST, "limit": 10, "offset": 0, "format": "json"}
    '''
	out_storage_info(XSU,headers)
	requests.put(XSU+"/abc",headers=headers)	
	requests.put(XSU+"/abc/def",headers=headers)	
	requests.put(XSU+"/abc/def/a",headers=headers,data="wo")	
	requests.put(XSU+"/abc/def/b",headers=headers,data="go")	
	requests.put(XSU+"/abc/def/c",headers=headers,data="op")	
	requests.put(XSU+"/abc/def/d",headers=headers,data="no")	
	out_storage_info(XSU,headers)
	'''
    access_key = info_s3[0]["access_key"]
    secret_key = info_s3[0]["secret_key"]
    s3_conn = s3_connect(access_key, secret_key)
    #import pdb;pdb.set_trace()
    s3_conn.create_bucket("eisoo")
    bkt = s3_conn.get_bucket('eisoo')
    #cors=bkt.get_cors()
    config = CORSConfiguration()
    config.add_rule('POST', '*')
    bkt.set_cors(config)
    out_storage_info(XSU, headers)
    '''
	for bucket in s3_conn.get_all_buckets():
		key=bucket.get_key("def")
		key.set_contents_from_filename("/etc/hosts")
		#print "{name}\t{created}".format(name=bucket.name,created=bucket.creation_date)
	out_storage_info(XSU,headers)
	'''
コード例 #50
0
 def test_minimal(self):
     cfg = CORSConfiguration()
     cfg.add_rule('GET', '*')
     self.assertEqual(cfg.to_xml(), CORS_BODY_3)
コード例 #51
0
ファイル: s3.py プロジェクト: mvx24/fabric-shuttle
    def site_config(self, site):
        with hook('site config %s' % self.name, self, site):
            setup_aws_access_key(site)

            from boto import connect_s3
            from boto.s3.bucket import Bucket
            from boto.s3.key import Key

            for bucket_config in self.settings['buckets']:
                # Connect and make sure the bucket exists
                print bold(u'Configuring bucket %s...' % bucket_config['name'])
                connection = connect_s3()
                try:
                    bucket = connection.get_bucket(bucket_config['name'])
                except:
                    bucket = connection.create_bucket(bucket_config['name'])
                # Set the bucket policy
                if bucket_config.has_key('policy'):
                    bucket.set_policy(bucket_config['policy'])
                # Setup CORS, array of rules
                # http://boto.readthedocs.org/en/latest/ref/s3.html#boto.s3.cors.CORSConfiguration
                if bucket_config.has_key('cors') and bucket_config['cors'] is None:
                    # If explicity set to None, then remove the cors policy
                    bucket.delete_cors()
                else:
                    if not bucket_config.has_key('cors'):
                        # If not specified, use the default GET policy
                        bucket_config['cors'] = (DEFAULT_CORS_RULE,)
                    from boto.s3.cors import CORSConfiguration
                    cors_config = CORSConfiguration()
                    for rule in bucket_config['cors']:
                        cors_config.add_rule(**rule)
                    bucket.set_cors(cors_config)
                # Setup the lifecycle, array of rules
                # http://boto.readthedocs.org/en/latest/ref/s3.html#boto.s3.lifecycle.Lifecycle
                if bucket_config.has_key('lifecycle'):
                    from boto.s3.lifecycle import Lifecycle
                    lifecycle_config = Lifecycle()
                    for rule in bucket_config['lifecycle']:
                        lifecycle_config.add_rule(**rule)
                    bucket.configure_lifecycle(lifecycle_config)
                else:
                    bucket.delete_lifecycle_configuration()
                # Setup the bucket website hosting {suffix, error_key, routing_rules, redirect_all_requests_to}
                # http://boto.readthedocs.org/en/latest/ref/s3.html
                # https://github.com/boto/boto/blob/develop/boto/s3/website.py
                if bucket_config.has_key('website'):
                    # Expand the routing rules, array of {condition, redirect}
                    if bucket_config['website'].has_key('routing_rules'):
                        from boto.s3.website import RoutingRules, RoutingRule
                        routing_rules = RoutingRules()
                        for rule in bucket_config['website']['routing_rules']:
                            routing_rules.add_rule(RoutingRule(**rule))
                        bucket_config['website']['routing_rules'] = routing_rules
                    # Expand the redirect, redirect_all_requests_to is {hostname, protocol}
                    if bucket_config['website'].has_key('redirect_all_requests_to'):
                        from boto.s3.website import RedirectLocation
                        bucket_config['website']['redirect_all_requests_to'] = RedirectLocation(**bucket_config['website']['redirect_all_requests_to'])
                    bucket.configure_website(**bucket_config['website'])
                else:
                    bucket.delete_website_configuration()
コード例 #52
0
In addition to accessing specific Tree via the create_tree method you can also get a list of
all available trees' name that you have created.
"""
rs = conn1.list()
print "you have %s trees in your garden" % str(len(rs))
print "Tree names list : " + str(rs)
"""
Setting/Getting/Deleting CORS Configuration on a Bucket

Cross-origin resource sharing (CORS) defines a way for client web applications
 that are loaded in one domain to interact with resources in a different domain.
With CORS support in Amazon S3, you can build rich client-side web applications
with Amazon S3 and selectively allow cross-origin access to your Amazon S3 resources.
"""
cors_cfg = CORSConfiguration()
cors_cfg.add_rule(['PUT', 'POST', 'DELETE'],
                  'https://www.example.com',
                  allowed_header='*',
                  max_age_seconds=3000,
                  expose_header='x-amz-server-side-encryption')
cors_cfg.add_rule('GET', '*')
"""
The above code creates a CORS configuration object with two rules.

The first rule allows cross-origin PUT, POST, and DELETE requests from the https://www.example.com/ origin. The rule also allows all headers in preflight OPTIONS request through the Access-Control-Request-Headers header. In response to any preflight OPTIONS request, Amazon S3 will return any requested headers.
The second rule allows cross-origin GET requests from all origins.
To associate this configuration with a bucket:
"""
mytree1 = conn1.lookup('my_tree1')
mytree1.set_cors(cors_cfg)
コード例 #53
0
    try:
        current_cors_config = bucket.get_cors()
        current_cors_xml = current_cors_config.to_xml()
    except S3ResponseError, e:
        if e.error_code == "NoSuchCORSConfiguration":
            current_cors_xml = None
        else:
            module.fail_json(msg=e.message)

    if cors_xml is not None:
        cors_rule_change = False
        if current_cors_xml is None:
            cors_rule_change = True  # Create
        else:
            # Convert cors_xml to a Boto CorsConfiguration object for comparison
            cors_config = CORSConfiguration()
            h = XmlHandler(cors_config, bucket)
            xml.sax.parseString(cors_xml, h)
            if cors_config.to_xml() != current_cors_config.to_xml():
                cors_rule_change = True  # Update

        if cors_rule_change:
            try:
                bucket.set_cors_xml(cors_xml)
                changed = True
                current_cors_xml = bucket.get_cors().to_xml()
            except S3ResponseError, e:
                module.fail_json(msg=e.message)
    elif current_cors_xml is not None:
        try:
            bucket.delete_cors()