def upload_large(self, source_path):
        source_size = os.stat(source_path).st_size
        source_key = md5(str(uuid.uuid4())).hexdigest() + ".mp4"
        print "source path %s" % source_path
        print "source key %s" % source_key

        mp = self.bucket.initiate_multipart_upload(source_key)
        chunk_size = 52428800
        chunk_count = int(math.ceil(source_size / float(chunk_size)))
        for i in range(chunk_count):
            offset = chunk_size * i
            bytes = min(chunk_size, source_size - offset)
            with FileChunkIO(source_path, 'r', offset=offset,
                             bytes=bytes) as fp:
                mp.upload_part_from_file(fp, part_num=i + 1)
        mp.complete_upload()

        k = Key(self.bucket)
        k.key = source_key
        k.set_remote_metadata(
            {
                'iname':
                os.path.basename(source_path),
                "iscale":
                str(source_size),
                "itime":
                time.strftime("%Y-%m-%d %H:%M:%S",
                              time.localtime(int(time.time())))
            }, {}, True)
        remote_key = self.bucket.get_key(source_key)
        print "Key name", len(remote_key.metadata)
예제 #2
0
파일: s3_manager.py 프로젝트: herveza/1
    def upload(self, filename, filepath):
        k = Key(self.fusion_bucket)
        k.key = "%s/%s/%s" % (self.folder, get_current_date(), filename)
        k.set_contents_from_filename(filepath, policy='public-read')

        content_type = mimetypes.guess_type(filename)[0]
        k.set_remote_metadata({'Cache-Control': self.cache_header, 'Content-Type': content_type}, {}, True)

        return k.generate_url(expires_in=0, query_auth=False)
예제 #3
0
    def upload(self, filename, filepath):
        k = Key(self.fusion_bucket)
        k.key = "%s/%s/%s" % (self.folder, get_current_date(), filename)
        k.set_contents_from_filename(filepath, policy='public-read')

        content_type = mimetypes.guess_type(filename)[0]
        k.set_remote_metadata(
            {
                'Cache-Control': self.cache_header,
                'Content-Type': content_type
            }, {}, True)

        return k.generate_url(expires_in=0, query_auth=False)
    def store(self,
              in_bucket=None,
              key_name=None,
              metadata=None,
              quality=95,
              public=True):
        """Store the loaded image into the given bucket with the given key name. Tag
        it with metadata if provided. Make the Image public and return its url"""
        if not in_bucket:
            raise InvalidParameterException("No in_bucket specified")
        if not key_name:
            raise InvalidParameterException("No key_name specified")
        if not self.image:
            raise RTFMException(
                "No image loaded! You must call fetch() before store()")

        if metadata:
            if type(metadata) is not dict:
                raise RTFMException("metadata must be a dict")
        else:
            metadata = {}

        metadata['Content-Type'] = 'image/jpeg'

        log.info("Storing image into bucket %s/%s" % (in_bucket, key_name))

        # Export image to a string
        sio = BytesIO()
        self.image.save(sio, 'JPEG', quality=quality, optimize=True)
        contents = sio.getvalue()
        sio.close()

        # Get the bucket
        bucket = self.s3_conn.get_bucket(in_bucket)

        # Create a key containing the image. Make it public
        k = Key(bucket)
        k.key = key_name
        k.set_contents_from_string(contents)
        k.set_remote_metadata(metadata, {}, True)

        if public:
            k.set_acl('public-read')

        # Return the key's url
        return k.generate_url(method='GET',
                              expires_in=0,
                              query_auth=False,
                              force_http=False)
예제 #5
0
def upload_asset_to_s3(asset, access_key, secret_key, bucket):

    connection = S3Connection(access_key, secret_key)

    asset_bucket = connection.get_bucket(bucket)
    asset_path = asset.asset_file.path
    asset_file = open(asset_path)

    tag_string = "-".join([tag.name.replace("/", "-") for tag in asset.app.tags.all()])
    product = asset.app.product.name
    file_extension = os.path.splitext(asset_path)[-1]
    version = asset.app.version

    key_name = "%s-%s-%s%s" % (product, tag_string, version, file_extension)

    app_key = Key(asset_bucket, key_name)
    app_key.set_contents_from_file(open(asset_path))
    app_key.make_public()
    metadata = {'version' : version, 'product' : product}
    app_key.set_remote_metadata(metadata, {}, preserve_acl=True)
    asset.external_url = app_key.generate_url(0, query_auth=False)
    asset.save()
예제 #6
0
def upload_asset_to_s3(asset, access_key, secret_key, bucket):

    connection = S3Connection(access_key, secret_key)

    asset_bucket = connection.get_bucket(bucket)
    asset_path = asset.asset_file.path
    asset_file = open(asset_path)

    tag_string = "-".join(
        [tag.name.replace("/", "-") for tag in asset.app.tags.all()])
    product = asset.app.product.name
    file_extension = os.path.splitext(asset_path)[-1]
    version = asset.app.version

    key_name = "%s-%s-%s%s" % (product, tag_string, version, file_extension)

    app_key = Key(asset_bucket, key_name)
    app_key.set_contents_from_file(open(asset_path))
    app_key.make_public()
    metadata = {'version': version, 'product': product}
    app_key.set_remote_metadata(metadata, {}, preserve_acl=True)
    asset.external_url = app_key.generate_url(0, query_auth=False)
    asset.save()
예제 #7
0
print conn.get_all_buckets()

for b in conn.get_all_buckets():
   print b

#bucket.set_acl('public-read')

###

from boto.s3.key import Key
k = Key(bucket)
k.key = 'test1.dat'
k.set_contents_from_string('hello123')
#k.set_acl('public-read')
k.set_remote_metadata( {'x-scal-test': { 'key1':'value1' }}, [], True ) 

c = Key(bucket)
c.key = 'test1.dat'
print c.get_contents_as_string()
print c.get_acl()
print c.metadata

###

###

full_bucket = conn.get_bucket('test-bucket')
# It's full of keys. Delete them all.
for key in full_bucket.list():
    key.delete()