Esempio n. 1
0
    def bucket_create(self, key, val, metadata_dict={}):
        '''
        Create an object in the bucket, but only if not yet present (save traffic).
        
        Parameters
        ---------
        key : str
        val : file-like object 
        metadata_dict : dict
        
        Returns
        -------
        Key
        '''
        s3_key = Key(self.apk_bucket)

        s3_key.key = key
        # important: set metadata before actual upload
        s3_key.metadata = metadata_dict
        s3_key.content_type = 'application/vnd.android.package-archive'
        # upload
        log.debug("uploading %s", s3_key.key)
        s3_key.set_contents_from_file(val, replace=False)

        return s3_key
Esempio n. 2
0
def main(stream_url: str, stream_name: str, bucket_name: str, duration: str):
    temp_file = 'temp.m4a'

    print('beginning rip')

    code = subprocess.call(['ffmpeg',
                            '-i', stream_url,
                            '-t', duration,
                            '-acodec', 'copy',
                            '-absf', 'aac_adtstoasc',
                            temp_file])

    assert code == 0, 'stream rip failed with code ' + str(code)

    print('connecting to s3')
    conn = S3Connection(is_secure=False)  # AWS uses invalid certs
    bucket = conn.get_bucket(bucket_name)

    print('writing recorded file to s3')
    m4a = Key(bucket)
    m4a.name = datetime.datetime.utcnow().strftime(stream_name + '--%Y-%m-%d.m4a')
    m4a.content_type = MIME_TYPE
    m4a.metadata = {'Content-Type': MIME_TYPE}
    m4a.storage_class = 'STANDARD_IA'
    m4a.set_contents_from_filename(temp_file)
    m4a.close()

    print('generating new feed.xml from s3 bucket list')
    feed_xml = Key(bucket)
    feed_xml.name = 'feed.xml'
    feed_xml.content_type = 'application/rss+xml'
    feed_xml.set_contents_from_string(
        rss_xml(stream_name, bucket_name, bucket.list()))
    feed_xml.close()
Esempio n. 3
0
 def bucket_create(self, key, val, metadata_dict = {}):
     '''
     Create an object in the bucket, but only if not yet present (save traffic).
     
     Parameters
     ---------
     key : str
     val : file-like object 
     metadata_dict : dict
     
     Returns
     -------
     Key
     '''
     s3_key = Key(self.apk_bucket)
     
     s3_key.key = key
     # important: set metadata before actual upload
     s3_key.metadata = metadata_dict
     s3_key.content_type = 'application/vnd.android.package-archive'
     # upload
     log.debug("uploading %s", s3_key.key)
     s3_key.set_contents_from_file(val, replace = False)
     
     return s3_key
    def upload(ext):
        filename = os.path.join(settings.LOCAL_PHOTO_BUCKETS_BASE_PATH, old_photo_bucket, photo.photo_id + ext)
        print "Uploading: " + filename

        key = Key(s3_bucket, new_storage_id + ext)
        key.metadata = {"Content-Type": "image/jpeg"}
        key.set_contents_from_filename(filename)
        key.close()
Esempio n. 5
0
 def backup_data_file(self, file_path, remote_key_name, remote_bucket=None):
     stat = os.stat(file_path)
     fp = file(file_path)
     key = Key(remote_bucket, remote_key_name)
     key.metadata = {'uid': stat.st_uid, 'gid': stat.st_gid}
     self.set_contents_from_file(key, fp)
     key.close()
     fp.close()
Esempio n. 6
0
def save_s3(bucket,
            filename,
            contents,
            systemfile,
            content_type=None,
            acl='public-read',
            meta=None,
            encode=None):
    from boto.dynamodb2.table import Item
    key = Key(bucket, filename)
    print 'new s3 key:', 'http://s3.amazonaws.com/' + bucket.name + (
        key.name if key.name.startswith('/') else '/' + key.name)
    if isinstance(meta, Item):
        meta = meta._data
    if isinstance(meta, dict):
        trim_meta = fixed.check_entity_size(meta)
        trim_meta = dict([(k, value) for (k, value) in trim_meta.items()
                          if value is not None and value])
        trim_meta = json.loads(json.dumps(trim_meta, cls=fixed.SetEncoder))
        print 'meta key length:', len(trim_meta.keys())
        key.metadata = trim_meta
    if content_type is not None:
        print 'set content type:', content_type
        key.content_type = content_type
    elif systemfile and systemfile.endswith('js.map'):
        print 'js map!'
        key.content_type = 'application/json'
    elif systemfile:
        gt = mimetypes.guess_type(systemfile)
        key.set_metadata('Content-Type', gt[0])
    if encode is not None and encode == 'gzip':
        key.set_metadata('Content-Encoding', 'gzip')
        gzmem = StringIO.StringIO()
        gzip_file = gzip.GzipFile(fileobj=gzmem, mode='w')
        if contents is not None:
            gzip_file.write(contents)
        elif systemfile is not None:
            with open(systemfile, 'rb') as outfile:
                gzip_file.write(outfile.read())
        gzip_file.close()
        key.set_contents_from_string(gzmem.getvalue())
        print 'gzip!'
    elif contents is not None:
        print 'from string'
        key.set_contents_from_string(contents)
    elif systemfile is not None:
        io = StringIO.StringIO(open(systemfile, 'r').read()).getvalue()
        print 'from disk:', systemfile, 'io:', len(io)
        key.set_contents_from_string(io)
    if acl is not None:
        print 'save acl:', acl
        key.set_acl(acl)
    print 'save complete:', key.name
    return key
Esempio n. 7
0
def upload_file(file_path):
    bucket = _get_bucket()
    basename = os.path.basename(file_path)

    content_type = _get_content_type(basename)

    path = _path(basename)
    key = Key(bucket, path)
    key.metadata = {'Content-Type': content_type}
    key.set_contents_from_filename(file_path)
    key.make_public()
Esempio n. 8
0
    def single_file(self):
        try:

            k = Key(self.s3_conn.bucket)

            k.metadata = self.get_headers
            k.key = self.filename

            k.set_contents_from_filename(self.get_file_path,
                                         headers=self.get_headers,
                                         cb=progress)

        except Exception as e:
            self.delete_entry()
            raise Exception("There was some problem in uploading")
Esempio n. 9
0
    def process_upload_request(self, request, uploaded_chunks):
        """Uploads user avatar to the randomly picked location
        where we host user avatars and returns Response"""

        avatar_image_filename = settings.AVATAR_FILENAME_FORMAT_STRING.format(
            user_id=request.user.id,
            timestamp=timezone.now().strftime("%s")
        )
        avatar_location_format_str = random.choice(settings.AVATAR_BUCKETS)
        storage, bucket_name, filename = avatar_location_format_str.split(":")

        # Write uploaded data to temporary file
        # File will be delete once handler is closed
        temp_file = tempfile.TemporaryFile()
        for chunk in uploaded_chunks:
            temp_file.write(chunk)
        temp_file.seek(0)

        if storage == "s3":
            # Upload to S3
            try:
                conn = S3Connection(settings.AWS_ACCESS_KEY,
                                    settings.AWS_SECRET_ACCESS_KEY)
                bucket = conn.get_bucket(bucket_name)
                key = Key(bucket, avatar_image_filename)
                key.metadata = {'Content-Type': 'image/jpeg'}
                key.set_contents_from_file(temp_file)
                # Otherwise it's not accessible
                key.make_public()
                key.close(fast=True)
                temp_file.close()
            except:
                temp_file.close()
                raise
        else:
            temp_file.close()
            raise ValueError("Failed to upload avatar. "
                             "Unknown storage '{0}'.".format(storage))

        request.user.avatar_file = avatar_location_format_str.format(
            filename=avatar_image_filename)
        request.user.save()

        user_avatar_changed.send(sender=self, user=request.user)

        return Response()
Esempio n. 10
0
def _upload_file(file_path):
    conn = S3Connection(os.environ['AWS_ACCESS_KEY_ID'],
                        os.environ['AWS_SECRET_ACCESS_KEY'])
    bucket = conn.get_bucket(os.environ['S3_BUCKET'])
    basename = os.path.basename(file_path)

    content_type = _get_content_type(basename)

    key = Key(bucket, '{}/{}'.format(os.environ['S3_PREFIX'], basename))
    key.metadata = {'Content-Type': content_type}
    key.set_contents_from_filename(file_path)
    key.make_public()

    jar_url = "http://{0}.s3.amazonaws.com/{1}/{2}".format(
        os.environ['S3_BUCKET'], os.environ['S3_PREFIX'], basename)

    return jar_url
Esempio n. 11
0
def _upload_file(file_path):
    conn = S3Connection(os.environ['AWS_ACCESS_KEY_ID'], os.environ['AWS_SECRET_ACCESS_KEY'])
    bucket = conn.get_bucket(os.environ['S3_BUCKET'])
    basename = os.path.basename(file_path)

    content_type = _get_content_type(basename)

    key = Key(bucket, '{}/{}'.format(os.environ['S3_PREFIX'], basename))
    key.metadata = {'Content-Type': content_type}
    key.set_contents_from_filename(file_path)
    key.make_public()

    jar_url = "http://{0}.s3.amazonaws.com/{1}/{2}".format(
        os.environ['S3_BUCKET'],
        os.environ['S3_PREFIX'],
        basename)

    return jar_url
Esempio n. 12
0
def main():
    bucket_name = AWS_ACCESS_KEY_ID.lower() + "-" + BUCKET_NAME
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.create_bucket(BUCKET_NAME)

    for dirpath, dirnames, filenames in os.walk(ROOT):
        for filename in filenames:
            fn = os.path.join(dirpath, filename)
            print("Uploading", fn, "...")
            k = Key(bucket)
            k.key = os.path.relpath(fn, ROOT)
            k.set_contents_from_filename(fn)

        # Put up an index page
        k = Key(bucket)
        k.key = os.path.relpath(os.path.join(dirpath, "index.html"), ROOT)
        k.metadata = {"Content-Type": "text/html"}
        k.set_contents_from_string(index_page(dirpath, filenames))
Esempio n. 13
0
def main():
    bucket_name = AWS_ACCESS_KEY_ID.lower() + '-' + BUCKET_NAME
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.create_bucket(BUCKET_NAME)

    for dirpath, dirnames, filenames in os.walk(ROOT):
        for filename in filenames:
            fn = os.path.join(dirpath, filename)
            print('Uploading', fn, '...')
            k = Key(bucket)
            k.key = os.path.relpath(fn, ROOT)
            k.set_contents_from_filename(fn)

        # Put up an index page
        k = Key(bucket)
        k.key = os.path.relpath(os.path.join(dirpath, 'index.html'), ROOT)
        k.metadata = {'Content-Type': 'text/html'}
        k.set_contents_from_string(index_page(dirpath, filenames))
Esempio n. 14
0
def upload_file(file_path):
    conn = S3Connection(os.environ['AWS_ACCESS_KEY_ID'],
                        os.environ['AWS_SECRET_ACCESS_KEY'])
    bucket = conn.get_bucket(os.environ['S3_BUCKET'])
    basename = os.path.basename(file_path)

    if basename.endswith('.jar'):
        content_type = 'application/java-archive'
    elif basename.endswith('.py'):
        content_type = 'application/x-python'
    else:
        raise ValueError(
            "Unexpected file type: {}. Expected .jar or .py file.".format(
                basename))

    key = Key(bucket, '{}/{}'.format(os.environ['S3_PREFIX'], basename))
    key.metadata = {'Content-Type': content_type}
    key.set_contents_from_filename(file_path)
    key.make_public()

    jar_url = "http://{0}.s3.amazonaws.com/{1}/{2}".format(
        os.environ['S3_BUCKET'], os.environ['S3_PREFIX'], basename)

    return jar_url
Esempio n. 15
0
    keyname = "{0}/{1}/{2}_{3}_{4}.ei.spb".format(flavor,
                                                  info['taskID'].split('.')[0],
                                                  info['taskID'],
                                                  info['jobID'], objID)

    # convert metada values to strings (numbers make generate_url fail)
    # do not include guids
    info2 = {}
    for k, v in info.iteritems():
        if k != 'guids':
            info2[k] = v.__str__()

    try:
        kb = Key(bucket)
        kb.key = keyname
        kb.metadata = info2
        kb.set_contents_from_filename(fname)
        kb.set_acl('public-read')
        if opt.http:
            url = kb.generate_url(expires_in=0,
                                  query_auth=False,
                                  force_http=True)
    except Exception, e:
        log.info("Unable to store object " + str(e))
        raise Exception(e)

    if opt.http:
        u = urlparse(url)
        if u.port is not None:
            urlx = "{}://{}:{}{}".format(u.scheme, u.hostname, u.port, u.path)
        else: