Ejemplo n.º 1
0
def update_encode_state(sender, **kwargs):
    message = kwargs['message']
    job_id = message['jobId']
    state = message['state']
    job = EncodeJob.objects.get(pk=job_id)
    obj = job.content_object
    from bumerang.apps.video.models import Video

    if state == 'PROGRESSING':
        job.message = 'Progressing'
        job.state = EncodeJob.PROGRESSING
        obj.status = Video.CONVERTING
    elif state == 'WARNING':
        job.message = 'Warning'
        job.state = EncodeJob.WARNING
    elif state == 'COMPLETED':
        job.message = 'Success'
        job.state = EncodeJob.COMPLETE
        obj.original_file = message['input']['key']
        obj.duration = message['outputs'][0]['duration'] * 1000
        obj.hq_file = message['outputs'][0]['key']
        if isinstance(media_storage, S3BotoStorage):
            key = Key(media_storage.bucket, obj.hq_file.name)
            key.set_acl('public-read')
        obj.save()
        MakeScreenShots.delay(obj.pk)
    elif state == 'ERROR':
        job.message = message['messageDetails']
        job.state = EncodeJob.ERROR
        obj.status = Video.ERROR
    obj.save()
    job.save()
Ejemplo n.º 2
0
def upload_to_s3(fp, key_name, extension, reduced_redundancy=False):
    """
		Upload the contents of file handle 'fp' to the S3 bucket specified by AWS_STORAGE_BUCKET_NAME,
		under the given filename. Return the public URL.
	"""

    # connect to S3 and send the file contents
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
    k = Key(bucket)
    k.key = key_name
    k.content_type = MIME_TYPE_BY_EXTENSION.get(extension,
                                                'application/octet-stream')
    # print "uploading: %s" % key_name
    k.set_contents_from_file(fp,
                             reduced_redundancy=reduced_redundancy,
                             rewind=True)
    k.set_acl('public-read')

    # construct the resulting URL, which depends on whether we're using a CNAME alias
    # on our bucket
    if settings.AWS_BOTO_CALLING_FORMAT == 'VHostCallingFormat':
        return "http://%s/%s" % (settings.AWS_STORAGE_BUCKET_NAME, key_name)
    else:
        return "http://%s.s3.amazonaws.com/%s" % (
            settings.AWS_STORAGE_BUCKET_NAME, key_name)
Ejemplo n.º 3
0
def upload_to_s3(content,
                 filename,
                 bucket=settings.DEFAULT_BUCKET,
                 content_type=None,
                 set_public=True):
    '''
    helper function which uploads the file to s3.
    '''
    conn = S3Connection(settings.AWS_ID, settings.AWS_KEY)
    try:
        b = conn.get_bucket(bucket)
    except:
        b = conn.create_bucket(bucket)
        b.set_acl('public-read')
    k = Key(b)

    k.key = filename
    if content_type:
        k.set_metadata("Content-Type", content_type)
    k.set_contents_from_string(content)
    if set_public:
        k.set_acl('public-read')

    url = filename
    return url
Ejemplo n.º 4
0
def upload_stache():
    import base64
    import tempfile
    import boto
    from boto.s3.key import Key
    import re

    dataUrlPattern = re.compile('data:image/(png|jpeg);base64,(.*)$')

    conn = boto.connect_s3(app.config['AWS_KEY'], app.config['AWS_SECRET'])

    song_id = request.values.get('song_id')
    imgb64 = dataUrlPattern.match(request.values.get('stache')).group(2)
    data = base64.b64decode(imgb64)

    fp = tempfile.NamedTemporaryFile()
    # fp = open(song_id, 'w')
    fp.write(data)

    bucket = conn.get_bucket('staches')
    headers = {'Content-Type': 'image/png'}

    k = Key(bucket)
    k.key = "%s.png" % (song_id)
    k.set_contents_from_file(fp, headers=headers)
    k.set_acl('public-read')
    fp.close()

    r = get_redis()
    key = 'cache:data:%s' % (song_id)
    song = json.loads(r.get(key))
    song['s3_url'] = "http://staches.s3.amazonaws.com/%s" % k.key
    song['stache_version'] = '0.1'
    r.set(key, json.dumps(song))
    return song['s3_url']
Ejemplo n.º 5
0
def __upload(fname):
    k = Key(bucket)
    k.key = fname[10:]  #strip off the site_root/
    print fname
    k.set_contents_from_filename(fname)
    k.set_acl('public-read')
    return  k
Ejemplo n.º 6
0
def upload():
    """ Upload controller """

    print 'Starting upload...'
    if request.method == 'POST':
        image = request.files['file']
        print 'File obtained...'

        if allowed_file(image.filename):
            print 'Image allowed.'
            filename = secure_filename(image.filename)
            image.save(os.path.join(UPLOAD_FOLDER, filename))

            print 'Uploading to s3...'
            conn = boto.connect_s3(AWS_KEY, AWS_SECRET)
            b = conn.get_bucket(AWS_BUCKET)
            k = Key(b)

            print 'Setting key...'
            k.key = '%s_%s' % (uuid.uuid4(), filename)
            k.set_contents_from_filename(UPLOAD_FOLDER + filename)

            print 'Making public...'
            k.make_public
            k.set_acl('public-read')

            print 'Responding to request...'
            return jsonify(status='Success.')
        else:
            print 'File not allowed.'
            return jsonify(status='File not allowed.')
    else:
        print 'Upload failed.'
        return jsonify(status='fail')
Ejemplo n.º 7
0
def index(pin):

    s3_conn = S3Connection(AWS_KEY, AWS_SECRET)
    bucket = s3_conn.get_bucket('property-image-cache')
    s3_key = Key(bucket)
    s3_key.key = '{0}.jpg'.format(pin)

    if s3_key.exists():
        output = BytesIO()
        s3_key.get_contents_to_file(output)

    else:
        image_viewer = 'http://www.cookcountyassessor.com/PropertyImage.aspx?pin={0}'
        image_url = image_viewer.format(pin)
        image = requests.get(image_url)

        print(image.headers)

        if  'image/jpeg' in image.headers['Content-Type']:
            output = BytesIO(image.content)
            s3_key.set_metadata('Content-Type', 'image/jpg')
            s3_key.set_contents_from_file(output)
            s3_key.set_acl('public-read')
        else:
            sentry.captureMessage('Could not find image for PIN %s' % pin)
            abort(404)

    output.seek(0)
    response = make_response(output.read())
    response.headers['Content-Type'] = 'image/jpg'
    return response
Ejemplo n.º 8
0
def upload_images_to_s3(markup, archive, question_files_bucket):
    """Uploads all the images referenced in the markup to S3. Exracts the
    images from the '***_files' directory in the zip archive.

    Keyword Arguments:
    markup -- the string markup whose images need to be uploaded to s3.
    archive -- the archive object as returned by zipfile.
    """

    ## Create a BS object from the markup
    soup = BeautifulSoup(markup, 'html.parser')

    ## Find all the image objects
    imgs = soup.find_all('img')

    ## Iterate over all the images, get the file path, upload the file to S3 and change the attribute to point to the S3 hosted image
    bucket = question_files_bucket
    for img in imgs:
        path = img.attrs['src']
        img_file = archive.open(path)
        img_s3 = Key(bucket)
        img_s3.key = ''.join([str(uuid4()), '_', os.path.basename(path)])
        img_s3.content_type = mimetypes.guess_type(path)[0]
        img_s3.set_contents_from_string(img_file.read())
        img_s3.set_acl('public-read')
        img_url = ''.join(['https://', app.config['S3_QUESTION_FILES_TEMP_BUCKET'], '.s3.amazonaws.com/', img_s3.key])
        img.attrs['src'] = img_url

    return str(soup)
Ejemplo n.º 9
0
    def upload_file(self, filename):
        try:
            lifecycle = Lifecycle()
            lifecycle.add_rule('rulename',
                               prefix='logs/',
                               status='Enabled',
                               expiration=Expiration(days=10))
            conn = boto.connect_s3()

            if conn.lookup(self.bucket_name):  # bucket exisits
                bucket = conn.get_bucket(self.bucket_name)
            else:
                # create a bucket
                bucket = conn.create_bucket(
                    self.bucket_name,
                    location=boto.s3.connection.Location.DEFAULT)

            bucket.configure_lifecycle(lifecycle)
            from boto.s3.key import Key

            k = Key(bucket)
            k.key = filename
            k.set_contents_from_filename(filename,
                                         cb=self.percent_cb,
                                         num_cb=10)
            k.set_acl('public-read-write')

        except Exception, e:
            sys.stdout.write(
                "AmazonS3Agent failed with exception:\n{0}".format(str(e)))
            sys.stdout.flush()
            raise e
Ejemplo n.º 10
0
def upload(abbr, filename, type, s3_prefix='downloads/', use_cname=True):
    today = datetime.date.today()

    # build URL
    s3_bucket = settings.AWS_BUCKET
    s3_path = '%s%s-%02d-%02d-%s-%s.zip' % (s3_prefix, today.year, today.month,
                                            today.day, abbr, type)
    if use_cname:
        s3_url = 'http://%s/%s' % (s3_bucket, s3_path)
    else:
        s3_url = 'http://%s.s3.amazonaws.com/%s' % (s3_bucket, s3_path)

    # S3 upload
    s3conn = boto.connect_s3(settings.AWS_KEY,
                             settings.AWS_SECRET,
                             calling_format=OrdinaryCallingFormat())
    bucket = s3conn.create_bucket(s3_bucket)
    k = Key(bucket)
    k.key = s3_path
    logging.info('beginning upload to %s' % s3_url)
    k.set_contents_from_filename(filename)
    k.set_acl('public-read')

    meta = metadata(abbr)
    meta['latest_%s_url' % type] = s3_url
    meta['latest_%s_date' % type] = datetime.datetime.utcnow()
    db.metadata.save(meta, safe=True)

    logging.info('uploaded to %s' % s3_url)
    def upload_file(self, filepath):

        if self.api_type == 'aws':

            from boto.s3.key import Key

            filename = filepath.split('/')[-1]

            k = Key(self.bucket)
            k.key = filename

            k.set_contents_from_filename(filepath)
            k.set_acl('public-read')
            if '.png' in k.key: k.set_metadata('Contet-Type', 'image/png')

        elif self.api_type == 'dropbox':

            f = open(filepath, 'r')
            filename = filepath.split('/')[-1]

            newfile = '%s/%s' % (self.thisfolder, filename)

            # if filename alread exists, delate and replace
            #filecheck = self.client.search(self.thisfolder, filename)
            #if filecheck: del_res = self.client.file_delete(newfile)

            res = self.client.put_file(newfile, f, overwrite=True)

            return res
Ejemplo n.º 12
0
def publicUrlTest():
    result = 0
    obj = dsslib.getConnection(CALLER)
    b1 = obj.create_bucket('urlbucket1')
    k = Key(b1)
    k.key = 'obj1'
    k.set_contents_from_string('Data of URL object')
    print "Setting ACL on obj"
    k.set_acl('public-read')
    print "Setting ACL on bucket"
    b1.set_acl('public-read')

    m = Key(b1)
    m.key = 'obj1'
    urlname = m.generate_url(1000)
    print "\nThe obj URL is: " + str(urlname)
    urlname = b1.generate_url(1000)
    print "\nThe bucket URL is: " + str(urlname)

    for i in range(1, 21):
        time.sleep(1)
        if i % 5 == 0:
            print str(20 - i) + " Seconds left before Obj deletion"

    m.delete()
    print "Object deleted\n"

    for i in range(1, 21):
        time.sleep(1)
        if i % 5 == 0:
            print str(20 - i) + " Seconds left before bucket deletion"

    obj.delete_bucket('urlbucket1')
    print "Bucket deleted\n"
    return result
Ejemplo n.º 13
0
 def save_file(self, remote_file, local_file_path, content_type=None):
     k = Key(self.bucket)
     k.name = self.target_path+'/'+remote_file
     if content_type:
          k.content_type = content_type
     k.set_contents_from_filename(local_file_path)
     k.set_acl('public-read')
Ejemplo n.º 14
0
def upload_files(bucketname, srcdir):
    print bucketname, srcdir
    conn = S3Connection()
    bucket = conn.get_bucket(bucketname)

    for path, dir, files in os.walk(srcdir):
        for file in files:

            filekey = os.path.relpath(os.path.join(path, file),
                                      srcdir).replace('\\', '/')
            filepath = os.path.normpath(os.path.join(path, file))

            #print "filekey: ", filekey
            #print "filepath: ", filepath

            key = bucket.lookup(filekey)
            if key:
                fingerprint = key.etag.replace('"', '')
            else:
                fingerprint = None
                key = Key(bucket, filekey)

            fp = str(key.compute_md5(open(filepath, "rb"))[0])
            fs = os.path.getsize(filepath)

            if fingerprint != fp:
                print "Uploading file %s (%d bytes, %s MD5) .." % (filekey, fs,
                                                                   fp)
                key.set_contents_from_filename(filepath,
                                               cb=percent_cb,
                                               num_cb=100)
                key.set_acl('public-read')
            else:
                print "File %s already on S3 and unchanged." % filekey
Ejemplo n.º 15
0
    def set_profile_image(self, file_path, file_name, content_type):
        """
        Takes a local path, name and content-type, which are parameters passed in by
        nginx upload module.  Converts to RGB, resizes to thumbnail and uploads to S3.
        Returns False if some conditions aren't met, such as error making thumbnail
        or content type is one we don't support.
        """
        valid_content_types = (
            'image/gif',
            'image/jpeg',
            'image/jpg',
            'image/png',
        )
        if content_type not in valid_content_types:
            return False

        destination = cStringIO.StringIO()
        if not transform_to_square_thumbnail(file_path, 100 * 2, destination):
            return False

        bucket = S3Bucket()
        k = Key(bucket)
        k.key = "account/%s/profile.jpg" % (self.id)
        k.set_metadata('Content-Type', 'image/jpeg')
        k.set_metadata('Cache-Control', 'max-age=86400')
        k.set_contents_from_string(destination.getvalue())
        k.set_acl('public-read')
        self.profile_image = 1
        self.save()
        return True
Ejemplo n.º 16
0
def upload_video_to_s3(video_id, **kwargs):
    """docstring for upload_photo_to_s3"""
    from gelder.models import Video
    logger = upload_video_to_s3.get_logger(**kwargs)
    video = Video.objects.get(id=video_id) or None
    if not video:
        logger.info(
            '(error) in upload_video_to_s3. cannot find video with id: %s ',
            (video_id))
        return False

    try:
        conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
        bucket = conn.create_bucket(settings.GELDER_S3_VIDEO_BUCKET)
        k = Key(bucket)
        k.key = video.basename
        k.set_metadata('checksum', video.checksum)
        k.set_contents_from_filename(video.video.path)
        k.set_acl("public-read")
        logger.info('(success) Boto s3 uploaded %s with id: %s',
                    (type(video).__name__, video_id))
        video.uploaded = True
        #import datetime
        #video.uploaddate = datetime.datetime.now
        video.save()
        ## include the upload for the albumart

    except (AWSConnectionError, S3ResponseError, S3PermissionsError,
            S3CreateError), exc:
        logger.info('(error) s3 failed uploading %s with id: %s',
                    (type(video).__name__, video_id))
        upload_audio_to_s3.retry(exc=exc, countdown=30)
Ejemplo n.º 17
0
 def uploadPublicImageFromString(self, s3KeyName, imageString):
     key = Key(self._bucket)
     key.key = s3KeyName
     # todo: check content-type
     key.set_metadata("Content-Type", "image/jpeg")
     key.set_contents_from_string(imageString)
     key.set_acl("public-read")
Ejemplo n.º 18
0
	def push(self, source_dir, dest_path):
		if os.path.isdir(source_dir):
			inputs = sorted(os.listdir(source_dir))
		elif os.path.isfile(source_dir):
			inputs = []
			inputs.append(source_dir)
		else:
			print "ERROR: Input is neither file or directory"
			sys.exit(3)

		for file_name in inputs:
			local_file = os.path.join(source_dir, file_name)
			if os.path.isdir(local_file):
				print "Source directory has sub directory, {0}, which is being skipped".format(local_file)
				continue
			keyName = os.path.join(dest_path, os.path.basename(file_name))
			print "{0}: pushing file {1} to s3 key name {2}".format(format(datetime.datetime.now()), file_name, keyName)
			# If the key already exists then delete if the delete option is set, otherwise skip
			if self.s3_bkt.get_key(keyName):
				print "Key already exists!"
				if options.get("delete"):
					oldKey = self.s3_bkt.get_key(keyName)
					oldKey.delete()
				else:
					continue
			newKey = Key(self.s3_bkt)
			newKey.key = keyName
			self.masterLogger.debug("Pushing local file= {0} to s3_key {1}".format(local_file, newKey.key))
			newKey.set_contents_from_filename(local_file)
			if options.get("acl"):
				print "Setting acl of key {0} to {1}".format(newKey.key, options.get("acl"))
				newKey.set_acl(options.get("acl")) 
Ejemplo n.º 19
0
 def create_version(self, username, packagename, versionnumber, info,
                    filepath):
     version = dict(
         {
             u'versionnumber': versionnumber,
             u'status': u'pre-upload'
         }, **info)
     item = u'packages.%s' % packagename
     item_v = u'%s.versionnumber' % item
     ret = self.users.update({u'_id': username, \
                                  item: {u'$exists': True}, \
                                  item_v: {u'$ne': versionnumber}}, \
                                 {u'$push': {item: version}})
     if ret[u'updatedExisting'] != True or ret[u'err'] != None or ret[
             u'n'] != 1:
         return json.dumps(ret)
     k = Key(self.bucket)
     k.key = u'%s-%s.zip' % (packagename, versionnumber)
     k.set_contents_from_filename(filepath)
     k.set_acl(u'public-read')
     item_s = u'%s.$.status' % item
     ret = self.users.update({u'_id': username, \
                                  item: {u'$exists': True}, \
                                  item_v: versionnumber}, \
                                 {u'$set': {item_s: u'update'}})
     if ret[u'updatedExisting'] != True or ret[u'err'] != None or ret[
             u'n'] != 1:
         return json.dumps(ret)
     return None
Ejemplo n.º 20
0
 def upload_text():
     headers = get_s3_headers()
     headers["Content-Type"] = "text/html"
     key = Key(bucket)
     key.name = "dist/latest.html"
     key.set_contents_from_filename(file_path, headers=headers)
     key.set_acl("public-read")
Ejemplo n.º 21
0
def push_to_s3(filename=None, contents=None):
    """
    Save a file to the the configured bucket with name and contents
    specified in the call.

    It compresses the data.

    This sets the contents to be publicly readable, cacheable by
    intermediaries with an expiration date a specified number
    of hours from when this job is run. (See above.)
    """

    out = StringIO.StringIO()
    with gzip.GzipFile(fileobj=out, mode="w") as f:
        f.write(contents)

    conn = S3Connection(calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(BUCKET)
    k = Key(bucket)
    k.key = filename
    expires = datetime.utcnow() + timedelta(hours=HOURS_TO_EXPIRE)
    expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
    k.set_contents_from_string(out.getvalue(),
            policy='public-read',
            headers={
                'Cache-Control': 'public',
                'Content-Type': 'application/json',
                'Content-Encoding': 'gzip',
                'Expires': '{}'.format(expires)
                })
    k.set_acl('public-read')
Ejemplo n.º 22
0
def upload_files(bucketname, srcdir):
   print bucketname, srcdir
   conn = S3Connection()
   bucket = conn.get_bucket(bucketname)

   for path, dir, files in os.walk(srcdir):
      for file in files:

         filekey = os.path.relpath(os.path.join(path, file), srcdir).replace('\\', '/')
         filepath = os.path.normpath(os.path.join(path, file))

         #print "filekey: ", filekey
         #print "filepath: ", filepath

         key = bucket.lookup(filekey)
         if key:
            fingerprint = key.etag.replace('"', '')
         else:
            fingerprint = None
            key = Key(bucket, filekey)

         fp = str(key.compute_md5(open(filepath, "rb"))[0])
         fs = os.path.getsize(filepath)

         if fingerprint != fp:
            print "Uploading file %s (%d bytes, %s MD5) .." % (filekey, fs, fp)
            key.set_contents_from_filename(filepath, cb = percent_cb, num_cb = 100)
            key.set_acl('public-read')
         else:
            print "File %s already on S3 and unchanged." % filekey
Ejemplo n.º 23
0
 def onGetContactPictureResult(self, resultGetPictureIqProtocolEntity,
                               getPictureIqProtocolEntity):
     # write to file example:
     #print dir(resultGetPictureIqProtocolEntity)
     #print dir(getPictureIqProtocolEntity)
     #resultGetPictureIqProtocolEntity.writeToFile("/tmp/yowpics/%s_%s.jpg" % (getPictureIqProtocolEntity.getTo(), "preview" if resultGetPictureIqProtocolEntiy.isPreview() else "full"))
     #filename = "%s/%s-fullprofile.jpg"%(tempfile.gettempdir(),resultGetPictureIqProtocolEntity.getPictureId())
     #print filename
     #with open(filename, 'wb') as f:
     #    f.write(resultGetPictureIqProtocolEntity.getPictureData())
     id = re.sub(r"@.*", "", getPictureIqProtocolEntity.getTo())
     filename = "%s-profile.jpg" % (id)
     print("checking %s", filename)
     k = self.b.get_key(filename)
     if k:
         url = k.generate_url(expires_in=0, query_auth=False)
         print("%s exists: %s" % (filename, url))
     else:
         k = Key(self.b)
         k.key = filename
         k.set_contents_from_string(
             str(resultGetPictureIqProtocolEntity.getPictureData()))
         k.set_metadata('Content-Type', 'image/jpeg')
         k.set_acl('public-read')
         url = k.generate_url(expires_in=0, query_auth=False)
         print("%s doesn't exist, created: %s" % (k, url))
Ejemplo n.º 24
0
    def sync(self, source, target_name):
        upload_path = self.compress_data(source, target_name)
        if self.gpg_binary and self.encrypt_command:
            upload_path = self.encrypt_file(upload_path)

        print upload_path
        now = self.now()
        now_str = now.strftime('%Y-%m-%dT%H:%M:%S')
        name_parts = target_name.split('.')
        if len(name_parts) > 1:
            new_name = name_parts[:-1]
            new_name.append(now_str)
            new_name.append(name_parts[-1])
            if self.compress:
                new_name.append('bz2')
        else:
            new_name = name_parts
            new_name.append(now_str)
            if self.compress:
                new_name.append('bz2')


        target_name = u'.'.join(new_name)
        bucket = self.get_bucket()
        key = Key(bucket)
        key.key = os.path.join(self.path, target_name)
        logger.info('Uploading to %s' % key.key)
        key.set_metadata('created', now_str)
        key.set_contents_from_filename(upload_path)
        key.set_acl('private')
Ejemplo n.º 25
0
Archivo: dump.py Proyecto: hpetru/billy
def upload(abbr, filename, type, s3_prefix='downloads/', use_cname=True):
    today = datetime.date.today()

    # build URL
    s3_bucket = settings.AWS_BUCKET
    s3_path = '%s%s-%02d-%02d-%s-%s.zip' % (s3_prefix, today.year, today.month,
                                            today.day, abbr, type)
    if use_cname:
        s3_url = 'http://%s/%s' % (s3_bucket, s3_path)
    else:
        s3_url = 'http://%s.s3.amazonaws.com/%s' % (s3_bucket, s3_path)

    # S3 upload
    s3conn = boto.connect_s3(settings.AWS_KEY, settings.AWS_SECRET)
    bucket = s3conn.create_bucket(s3_bucket)
    k = Key(bucket)
    k.key = s3_path
    logging.info('beginning upload to %s' % s3_url)
    k.set_contents_from_filename(filename)
    k.set_acl('public-read')

    meta = metadata(abbr)
    meta['latest_%s_url' % type] = s3_url
    meta['latest_%s_date' % type] = datetime.datetime.utcnow()
    db.metadata.save(meta, safe=True)

    logging.info('uploaded to %s' % s3_url)
Ejemplo n.º 26
0
def add():
    image = request.POST.get("photo")
    name = request.POST.get("name")
    common_name = request.POST.get("common_name")
    plant = Plant.retrieve(request.db, name)
    if plant:
        return "This species is already in the DB"
    if image is not None:
        mime = mimetypes.guess_type(image.filename)[0]
        conn = S3Connection("AKIAIMXIHJX3TFDQFVCA", "Lf7xWpeOB9mnY1zfFzl7WNtxtNhmCZ4ZXOI8Kvrr")
        bucket = conn.get_bucket("db_leaves")
        key = Key(bucket)
        key.key = name
        key.set_metadata("Content-Type", mime)
        key.set_contents_from_string(image.value)
        key.set_acl("public-read")

        descriptors, ax, bx, ay, by = EFD(Threshold(image.file).process(), 50, 100).fourier_coefficients()
        return Plant(
            {
                "name": name,
                "common_name": common_name,
                "wiki": request.POST.get("wiki"),
                "photo": "https://s3.amazonaws.com/db_leaves/%s" % quote(name),
                "descriptors": descriptors,
            }
        ).save(request.db)
    return []
Ejemplo n.º 27
0
def upload_to_s3(file_path, path, name):
    """
    Upload file to S3 using provided keyname.

    Returns:
        public_url: URL to access uploaded file
    """
    try:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucketname = settings.S3_BUCKETNAME
        try:
            bucket = conn.create_bucket(bucketname.lower())
        except Exception:
            bucket = conn.get_bucket(bucketname.lower())
        prefix = getattr(settings, "S3_PATH_PREFIX")
        path = u"{0}/{1}".format(prefix, path)
        key = u"{path}/{name}".format(path=removeNonAscii(path), name=removeNonAscii(name))
        k = Key(bucket)
        k.key = key
        k.set_contents_from_filename(file_path)
        k.set_acl("public-read")
        k.set_metadata("filename", removeNonAscii(name))
        public_url = k.generate_url(60 * 60 * 24 * 365)  # URL timeout in seconds.

        return True, public_url
    except Exception:
        error = "Could not connect to S3."
        log.exception(error)
        return False, error
Ejemplo n.º 28
0
def save_file_to_s3(file_path, dirpath=None, public=False, dest_path=None):
    """
    Save the file to S3.
    """
    if settings.USE_S3_STORAGE:
        conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
                               settings.AWS_SECRET_ACCESS_KEY)
        bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
        k = Key(bucket)

        filename = os.path.split(file_path)[1]

        if not dirpath:
            dirpath = settings.ORIGINAL_THEMES_DIR

        if not dest_path:
            dest_path = file_path.replace(os.path.dirname(dirpath), '')

        key = '%s%s' % (settings.AWS_LOCATION, dest_path)
        k.key = key
        if os.path.splitext(filename)[1] == '.less':
            content_type = 'text/css'
        else:
            content_type = mimetypes.guess_type(
                filename)[0] or k.DefaultContentType
        k.set_metadata('Content-Type', content_type)
        k.set_contents_from_filename(file_path, replace=True)

        if public:
            k.set_acl('public-read')
Ejemplo n.º 29
0
def move_to_s3(db_name: str, archive_name: str):
    today = datetime.datetime.today()
    path = os.getcwd()
    keys = utils.load_keys()[AWS]

    aws_access_key = keys[AWS_ACCESS_KEY]
    aws_secret_key = keys[AWS_SECRET_KEY]
    aws_bucket = keys[AWS_BUCKET]
    aws_folder = keys[AWS_FOLDER]

    # configuring filepath and tar file name
    archive_path = os.path.join(path, archive_name)
    print(f'[FILE] Creating archive for {db_name}')

    shutil.make_archive(archive_path, 'gztar', path)
    print('Completed archiving database')

    full_archive_path = archive_path + '.tar.gz'
    full_archive_name = archive_name + '.tar.gz'

    # Establish S3 Connection
    s3 = S3Connection(aws_access_key, aws_secret_key, AWS_HOST)
    bucket = s3.get_bucket(aws_bucket, validate=False)

    # Send files to S3
    print(f'[S3] Uploading file archive {full_archive_name}')
    k = Key(bucket)
    k.key = today + '/' + full_archive_name
    print(k.key)
    k.set_contents_from_filename(full_archive_path)
    k.set_acl("public-read")
    print(f'[S3] Success uploaded file archive {full_archive_name}')
Ejemplo n.º 30
0
 def run(self):
     # Get Access keys from command line
     accessKey = self.accessKey
     secretAccessKey = self.secretAccessKey
     try:
         #Creating S3 Connection using Access and Secrect access key
         print('Starting S3 Connection')
         conn = S3Connection(accessKey, secretAccessKey)
         print('Connection Successful')
         # Connecting to specified bucket
         print('connecting to bucket')
         b = conn.get_bucket('finalprojectads')
         print('connecting to Successful')
         #Initializing Key
         k = Key(b)
         #Uploading pickle and model performance files to S3 Bucket
         print('Starting to upload')
         onlyfiles = pd.read_csv(train_model().output().path)['0'].tolist()
         for i in onlyfiles:
             k.key = i
             k.set_contents_from_filename(i)
             k.set_acl('public-read')
         print('Upload Completed')
     except:
         print("Amazon credentials or location is invalid")
Ejemplo n.º 31
0
def process_photo(photo, record):
    if record.source in ('android', 'iphone'):
        image = Image.open(photo)
        if image.size[0] > image.size[1]:
            temp = image.rotate(-90, Image.BILINEAR, expand=True)
            image = cStringIO.StringIO()
            temp.save(image, 'jpeg')
    else:
        image = photo
    headers = {'Content-Type': 'image/jpeg',
        'Expires': '%s GMT' % (email.Utils.formatdate(time.mktime(
                                (datetime.datetime.now() + datetime.timedelta(days=365*2)).timetuple()))),
        'Cache-Control': 'public, max-age=%d' % (3600 * 24 * 365 * 2)}
    conn = S3Connection(settings.S3_CREDENTIALS['access_key'], settings.S3_CREDENTIALS['secret_key'])
    bucket = conn.get_bucket(settings.S3_BUCKET)
    photo_filename = '%s/photo.jpg' % record._id
    key = Key(bucket=bucket, name=photo_filename)
    key.set_contents_from_file(image, headers=headers)
    key.set_acl('public-read')
    thumbnail_filename = '%s/thumbnail.jpg' % record._id
    key = Key(bucket=bucket, name=thumbnail_filename)
    key.set_contents_from_file(create_thumbnail(image), headers=headers)
    key.set_acl('public-read')
    record.photo_url = 'http://%s/%s' % (settings.S3_BUCKET, photo_filename)
    record.thumbnail_url = 'http://%s/%s' % (settings.S3_BUCKET, thumbnail_filename)
    record.save()
Ejemplo n.º 32
0
def upload_to_aws(bucket_name, aws_key, aws_secret, file, key, acl='public-read'):
    """
    Uploads to AWS at key
    http://{bucket}.s3.amazonaws.com/{key}
    """
    conn = S3Connection(aws_key, aws_secret)
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    # generate key using key + extension
    __, ext = os.path.splitext(file.filename)  # includes dot
    k.key = key
    key_name = key.rsplit('/')[-1]
    # set object settings
    file_data = file.read()
    size = len(file_data)
    sent = k.set_contents_from_string(
        file_data,
        headers={
            'Content-Disposition': 'attachment; filename=%s%s' % (key_name, ext)
        }
    )
    k.set_acl(acl)
    s3_url = 'https://%s.s3.amazonaws.com/' % (bucket_name)
    if sent == size:
        return s3_url + k.key
    return False
Ejemplo n.º 33
0
def s3_upload_file(from_file, to_key, acl="private"):
    k = Key(bucket())
    k.key = to_key
    k.set_contents_from_filename(from_file)
    k.set_acl(acl)
    # s3_key_expires(k)
    return True
Ejemplo n.º 34
0
 def db_thread(self, threadName, path, name):
     print "%s: %s %s" % (threadName, path, name)
     
     k = Key(self.bucket)
     k.key = name+'.jpg'
     k.set_contents_from_filename(path)
     k.set_acl('public-read')
Ejemplo n.º 35
0
def upload(state, filename):
    today = datetime.date.today()

    # build URL
    s3_bucket = 'data.openstates.sunlightlabs.com'
    n = 1
    s3_path = '%s-%02d-%s-r%d.zip' % (today.year, today.month, state, n)
    s3_url = 'http://%s.s3.amazonaws.com/%s' % (s3_bucket, s3_path)

    metadata = db.metadata.find_one({'_id':state})
    old_url = metadata.get('latest_dump_url')

    if s3_url == old_url:
        old_num = re.match('.*?-r(\d*).zip', old_url).groups()[0]
        n = int(old_num)+1
        s3_path = '%s-%02d-%s-r%d.zip' % (today.year, today.month, state, n)
        s3_url = 'http://%s.s3.amazonaws.com/%s' % (s3_bucket, s3_path)

    # S3 upload
    s3conn = boto.connect_s3(settings.AWS_KEY, settings.AWS_SECRET)
    bucket = s3conn.create_bucket(s3_bucket)
    k = Key(bucket)
    k.key = s3_path
    k.set_contents_from_filename(filename)
    k.set_acl('public-read')

    metadata['latest_dump_url'] = s3_url
    metadata['latest_dump_date'] = datetime.datetime.utcnow()
    db.metadata.save(metadata, safe=True)

    print 'uploaded to %s' % s3_url
Ejemplo n.º 36
0
def add_s3_file(filename, public=True, bucket='averrin'):
    b = get_bucket(bucket)
    k = Key(b)
    k.key = os.path.basename(filename)
    k.set_contents_from_file(file(filename, 'r'))
    if eval(str(public)):
        k.set_acl('public-read')
Ejemplo n.º 37
0
def upload(abbr, filename):
    today = datetime.date.today()

    # build URL
    s3_bucket = settings.AWS_BUCKET
    s3_path = '%s-%02d-%02d-%s.zip' % (today.year, today.month, today.day,
                                       abbr)
    s3_url = 'http://%s.s3.amazonaws.com/%s' % (s3_bucket, s3_path)

    meta = metadata(abbr)

    # S3 upload
    s3conn = boto.connect_s3(settings.AWS_KEY, settings.AWS_SECRET)
    bucket = s3conn.create_bucket(s3_bucket)
    k = Key(bucket)
    k.key = s3_path
    logging.info('beginning upload to %s' % s3_url)
    k.set_contents_from_filename(filename)
    k.set_acl('public-read')

    meta['latest_dump_url'] = s3_url
    meta['latest_dump_date'] = datetime.datetime.utcnow()
    db.metadata.save(meta, safe=True)

    logging.info('upload complete')
Ejemplo n.º 38
0
    def upload(self, path, content, invalidate=None):
        """
        Set the content, mime type and ACL for a key on S3. Before setting the
        check if the object is new or changed.

        Arguments:
            path: path for key
            content: content to set
            invalidate: CloudFront path to add to invalidation list. * will be
                        added to the end to make sure we invalidate the URL
                        path with a trailing slash and the html itself.
                        If None the path will be used.
        """
        changed = self.file_changed(path, content)

        if not changed:
            return

        key = Key(self.bucket)
        key.content_type = guess_mime_type(path)
        key.key = path
        key.set_contents_from_string(content)
        key.set_acl("public-read")

        print("uploaded: {0}".format(path))

        if invalidate is None:
            invalidate = path

        self.to_invalidate.append(invalidate + "*")
Ejemplo n.º 39
0
    def save_page(self, html):
        if html and len(html) > 100:
            if settings.BACKED_BY_AWS.get('pages_on_s3'):
                k = Key(settings.S3_PAGES_BUCKET)
                k.key = self.feed.s3_pages_key
                k.set_metadata('Content-Encoding', 'gzip')
                k.set_metadata('Content-Type', 'text/html')
                k.set_metadata('Access-Control-Allow-Origin', '*')
                out = StringIO.StringIO()
                f = gzip.GzipFile(fileobj=out, mode='w')
                f.write(html)
                f.close()
                compressed_html = out.getvalue()
                k.set_contents_from_string(compressed_html)
                k.set_acl('public-read')
                
                try:
                    feed_page = MFeedPage.objects.get(feed_id=self.feed.pk)
                    feed_page.delete()
                    logging.debug('   --->> [%-30s] ~FYTransfering page data to S3...' % (self.feed))
                except MFeedPage.DoesNotExist:
                    pass

                self.feed.s3_page = True
                self.feed.save()
            else:
                try:
                    feed_page = MFeedPage.objects.get(feed_id=self.feed.pk)
                    feed_page.page_data = html
                    feed_page.save()
                except MFeedPage.DoesNotExist:
                    feed_page = MFeedPage.objects.create(feed_id=self.feed.pk, page_data=html)
                return feed_page
Ejemplo n.º 40
0
def backup(filename, **kwargs):
    log = kwargs.get("logger", app_logger)
    conf = kwargs.get("conf", None)
    bucket= get_bucket(conf)
    if not bucket:
        return

    log.info("Backing up " + filename)
    arcname = filename.split("/")[-1]
    
    out = StringIO()
    with tarfile.open(fileobj=out, mode="w:gz") as tar:
        tar.add(filename, arcname=arcname)

    password = kwargs.get("password")
    if not password:
        password = getpass()

    encrypted_out = StringIO()
    encrypt(out, encrypted_out, password)
    encrypted_out.seek(0)

    k = Key(bucket)
    k.key = arcname + datetime.now().strftime("%Y%m%d") + ".tgz.enc"
    k.set_contents_from_file(encrypted_out)
    k.set_acl("private")
Ejemplo n.º 41
0
 def _BOTO_SINGLEPART(self):
     """
     Upload single part (under threshold in node_config)
     node_config MULTI_UPLOAD_BARRIER
     """
     try:
         conn = boto.connect_s3()
     except S3ResponseError:
         LOGGER.error('[DELIVERY] s3 Connection Error')
         return False
     delv_bucket = conn.get_bucket(
         self.auth_dict['edx_s3_endpoint_bucket']
     )
     upload_key = Key(delv_bucket)
     upload_key.key = os.path.basename(os.path.join(
         self.node_work_directory,
         self.encoded_file
     ))
     headers = {"Content-Disposition": "attachment"}
     upload_key.set_contents_from_filename(
         os.path.join(
             self.node_work_directory,
             self.encoded_file
         ),
         headers=headers,
         replace=True
     )
     upload_key.set_acl('public-read')
     return True
Ejemplo n.º 42
0
def add_s3_file(filename, public=True, bucket='averrin'):
    b = get_bucket(bucket)
    k = Key(b)
    k.key = os.path.basename(filename)
    k.set_contents_from_file(file(filename, 'r'))
    if eval(str(public)):
        k.set_acl('public-read')
Ejemplo n.º 43
0
    def upload_file(self, filename):
        try:
            lifecycle = Lifecycle()
            lifecycle.add_rule('rulename', prefix='logs/', status='Enabled',
                               expiration=Expiration(days=10))
            conn = boto.connect_s3()

            if conn.lookup(self.bucket_name):  # bucket exisits
                bucket = conn.get_bucket(self.bucket_name)
            else:
                # create a bucket
                bucket = conn.create_bucket(self.bucket_name, location=boto.s3.connection.Location.DEFAULT)

            bucket.configure_lifecycle(lifecycle)
            from boto.s3.key import Key

            k = Key(bucket)
            k.key = filename
            k.set_contents_from_filename(filename, cb=self.percent_cb, num_cb=10)
            k.set_acl('public-read-write')

        except Exception, e:
            sys.stdout.write("AmazonS3Agent failed with exception:\n{0}".format(str(e)))
            sys.stdout.flush()
            raise e
Ejemplo n.º 44
0
def upload_to_s3(file, key_name):
    conn = get_connection()
    b = conn.get_bucket(S3_BUCKET)
    key = Key(b)
    key.key = key_name
    key.set_contents_from_filename(file)
    key.set_acl('public-read')
Ejemplo n.º 45
0
def make_zip(directory):
    if None in [settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY]:
        raise ImproperlyConfigured("AWS configuration not set.")

    conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.create_bucket(settings.AWS_BUCKET)

    filename = os.path.basename(directory) + ".zip"
    zip_file = zipfile.ZipFile(filename, "w")

    for root, dirs, files in os.walk(directory):
        for file in files:
            path = os.path.join(root, file)

            arcname = path.replace(directory, "")

            zip_file.write(path, arcname)

    zip_file.close()

    k = Key(bucket)
    k.key = filename
    k.set_contents_from_filename(filename)
    k.set_acl("public-read")

    os.remove(filename)

    return k.generate_url(24 * 60 * 60)
Ejemplo n.º 46
0
def store_in_s3(filename, content,context):
        s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucket = s3.get_bucket('xz820s')
        k = Key(bucket)
        k.key = filename
        k.set_contents_from_string(content)
        k.set_acl("public-read")
Ejemplo n.º 47
0
def upload_to_gs(bucket_name, client_id, client_secret, file, key, acl='public-read'):
    conn = GSConnection(client_id, client_secret, calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    # generate key
    filename = secure_filename(file.filename)
    key_dir = key + '/' + generate_hash(key) + '/'
    k.key = key_dir + filename
    # delete old data
    for item in bucket.list(prefix='/' + key_dir):
        item.delete()
    # set object settings

    file_data = file.read()
    file_mime = magic.from_buffer(file_data, mime=True)
    size = len(file_data)
    sent = k.set_contents_from_string(
        file_data,
        headers={
            'Content-Disposition': 'attachment; filename=%s' % filename,
            'Content-Type': '%s' % file_mime
        }
    )
    k.set_acl(acl)
    gs_url = 'https://storage.googleapis.com/%s/' % bucket_name
    if sent == size:
        return gs_url + k.key
    return False
Ejemplo n.º 48
0
def upload_file(filename, bucket, prefix=None):

    global pbar

    key = Key(bucket)
    if prefix:
        key.key = '%s/%s' % (prefix, filename)
    else:
        key.key = '%s' % (filename)

    size = os.stat(filename).st_size
    if size == 0:
        print 'Bad filesize for "%s"' % (filename)
        return 0

    widgets = [
        unicode(filename, errors='ignore').encode('utf-8'), ' ',
        progressbar.FileTransferSpeed(), ' <<<',
        progressbar.Bar(), '>>> ',
        progressbar.Percentage(), ' ',
        progressbar.ETA()
    ]
    pbar = progressbar.ProgressBar(widgets=widgets, maxval=size)
    pbar.start()

    try:
        key.set_contents_from_filename(
            filename,
            cb=progress_callback,
            num_cb=100,
        )
        key.set_acl('public-read')
    except IOError, e:
        print e
        return 0
Ejemplo n.º 49
0
def put(source_url,
        bucket,
        dest_key,
        mime_type,
        acl,
        compress,
        jsonp,
        overwrite=False):
    k = Key(bucket)
    k.key = dest_key
    headers = {"Content-Type": mime_type}
    if k.exists() and not overwrite:
        logging.info("Skipping %s - already exists")
        return False
    try:
        logging.info("Downloading from %s" % source_url)
        stream = urllib.urlopen(source_url)
        contents = stream.read()
        logging.info("Uploading to %s" % dest_key)
        string_to_store = "%s(%s);" % (prefix, contents) if jsonp else contents
        if compress:
            headers["Content-Encoding"] = "gzip"
            string_to_store = compress_string(string_to_store)
        k.set_contents_from_string(string_to_store,
                                   headers=headers,
                                   cb=s3_progress,
                                   num_cb=1000)
        k.set_acl(acl)
    except:
        logging.info("There was an error uploading to %s" % dest_key)
    logging.info("Finished uploading to %s" % dest_key)
Ejemplo n.º 50
0
    def dump(self, key, value, public=False):
        """
        Dump file to S3.

        Optionally make public
        """

        assert isinstance(key, basestring), u'Key must be a string'

        k = Key(self.bucket)
        k.key = key

        try:
            k.set_metadata(u'Content-Type', u'application/json')
            k.set_contents_from_string(json.dumps(value, sort_keys=True, indent=4, separators=(u',', u': ')))

            # set file permissions
            if public:
                k.set_acl(u'public-read')

        except Exception as e:
            print e
            return False

        else:
            # now update the cache
            if self._keys is not None:
                self._keys.add(key)
            return True
Ejemplo n.º 51
0
def upload_file(file, filename):
    import boto
    import boto.s3
    import sys
    from boto.s3.key import Key

    REGION_HOST = config('REGION_HOST')
    AWS_ACCESS_KEY_ID = config('AWS_ACCESS_KEY_ID')
    AWS_SECRET_ACCESS_KEY = config('AWS_SECRET_ACCESS_KEY')

    bucket_name = 'jbti-kickstarter-success'

    s3_connection = boto.connect_s3(AWS_ACCESS_KEY_ID,
                                    AWS_SECRET_ACCESS_KEY,
                                    host=REGION_HOST)
    bucket = s3_connection.get_bucket(bucket_name)
    key = Key(bucket)
    key.key = 'visualizations/' + filename
    key.set_contents_from_filename(file)

    bucket = s3_connection.get_bucket(bucket_name)
    key = bucket.lookup('visualizations/' + filename)
    key.set_acl('public-read')

    return 'https://' + bucket_name + '.' + REGION_HOST + '/visualizations/' + filename
Ejemplo n.º 52
0
def testPicture(decodedData,connection,cursor):
	#print decodedData['data']
	#Make the file locally so that it can be uploaded to S3
	fileName = str(uuid.uuid1()) + ".jpg"
	fh = open("images/" + fileName, "wb")
	fh.write(decodedData['data'].decode('base64'))
	fh.close()
	
	#upload the file to S3
	conn = S3Connection(apikeys.AWSAccessKeyId, apikeys.AWSSecretKey)
	bucket = conn.get_bucket("devcontest", False, None)
	k = Key(bucket)
	k.key = fileName
	#uploads file
	k.set_contents_from_filename("images/" + fileName, None, None)
	#sets to public
	k.set_acl('public-read')
	#gets a url back
	url = k.generate_url(expires_in=0,query_auth=False)
	conn.close()

	#putting urls into dynamodb
	conn2 = boto.dynamodb.connect_to_region(
        'us-east-1',
        aws_access_key_id=apikeys.AWSAccessKeyId,
        aws_secret_access_key=apikeys.AWSSecretKey)
	table = conn2.get_table('Picture')
	#nosql db uses key, value pair. key is location id and value is url
	item = table.new_item(hash_key=decodedData['location_id'], range_key=url)
	item.put()

	return url
Ejemplo n.º 53
0
def upload_to_gs(bucket_name,
                 client_id,
                 client_secret,
                 file,
                 key,
                 acl='public-read'):
    conn = GSConnection(client_id,
                        client_secret,
                        calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    # generate key
    filename = secure_filename(file.filename)
    key_dir = key + '/' + generate_hash(key) + '/'
    k.key = key_dir + filename
    # delete old data
    for item in bucket.list(prefix='/' + key_dir):
        item.delete()
    # set object settings

    file_data = file.read()
    file_mime = magic.from_buffer(file_data, mime=True)
    size = len(file_data)
    sent = k.set_contents_from_string(file_data,
                                      headers={
                                          'Content-Disposition':
                                          'attachment; filename=%s' % filename,
                                          'Content-Type':
                                          '%s' % file_mime
                                      })
    k.set_acl(acl)
    gs_url = 'https://storage.googleapis.com/%s/' % bucket_name
    if sent == size:
        return gs_url + k.key
    return False
def restful_uploader():
    AWS_ACCESS_KEY_ID = 'AKIAIHNJNV3BX634MAZA'
    AWS_SECRET_ACCESS_KEY = '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'

    bucket_name = AWS_ACCESS_KEY_ID.lower() + 'data-center'
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID,
            AWS_SECRET_ACCESS_KEY)

    bucket = conn.create_bucket(bucket_name,
        location=boto.s3.connection.Location.DEFAULT)

    testfile = "valid_json.png"
    print 'Uploading %s to Amazon S3 bucket %s' % \
       (testfile, bucket_name)

    def percent_cb(complete, total):
        sys.stdout.write('.')
        sys.stdout.flush()

    k = Key(bucket)
    k.key = str(uuid.uuid1())+ "." +testfile.split('.')[1]
    k.set_contents_from_filename(testfile,
        cb=percent_cb, num_cb=10)
    k.set_acl('public-read')
    url = k.generate_url(expires_in=0, query_auth=False, force_http=True)
    print url

    payload = {'EventTag': 'AngelHack', 'gifImageUrl': url, 'note': 'Time is calling'}
    post_url = 'http://localhost:3000/events/gifs/558f71d4387f0e3740ab7a0f'
    r = requests.post(post_url, data=payload)
    print r.text
Ejemplo n.º 55
0
 def upload_text():
     headers = get_s3_headers()
     headers["Content-Type"] = "text/html"
     key = Key(bucket)
     key.name = "dist/latest.html"
     key.set_contents_from_filename(file_path, headers=headers)
     key.set_acl("public-read")
Ejemplo n.º 56
0
def upload_maybe(fname):

    keyname = fname[len(INPUT_DIR)+1:]
    key = bucket.get_key(keyname)
    uploaded = False

    fname_md5 = hashlib.md5()
    with open(fname, 'r') as f:
        fname_md5.update(f.read())

    hsh = fname_md5.hexdigest()

    if key is None or key.md5 != hsh:
        h = headers
        if keyname.endswith('sw.js'):
            h = copy.deepcopy(headers)
            h['Service-Worker-Allowed'] = '/'
        key = Key(bucket)
        key.name = keyname
        key.set_contents_from_filename(fname, headers=h)
        key.set_acl("public-read")
        uploaded = True

    url = key.generate_url(expires_in=0, query_auth=False)

    uri = furl(url)
    try:
        uri.args.pop('x-amz-security-token')
    except:
        pass
    url = uri.url
    return (url, uploaded)
Ejemplo n.º 57
0
    def upload(self, data, key, content_type, headers=None, public=True):
        '''Uploads a file to S3 as the given key.

        :param data: the file data
        :type data: a file-like object or a :class:`str`
        :param str key: the name associated with the file (usually looks like a
                        path).
        :param str content_type: The MIME type of the data.
        :param headers: Any extra headers associated with the file that will be
                        sent any time the file is accessed.
        :type headers: :class:`dict` or :const:`None`
        :returns: the protocol-agnostic URL of the new file on S3.
        :rtype: :class:`str`
        '''
        if not headers:
            headers = {}
        headers.update({
            'Content-Type': content_type,
        })
        key = Key(self.bucket, key)
        if hasattr(data, 'read'):
            key.set_contents_from_file(data, headers=headers)
        else:
            key.set_contents_from_string(data, headers=headers)
        if public:
            key.set_acl('public-read')
        return '//%s.s3.amazonaws.com/%s' % (self.bucket_name, key.name)
Ejemplo n.º 58
0
    def create(self, bucket_name, key, data, headers=None, acl=None):
        """
        Creates a file on Amazon S3. 

        :param bucket_name: Name of bucket to use
        :param key:         Key to use
        :param data:        File contents
        :param headers:     File headers
        :param acl:         File permissions.

        :type bucket_name:  string
        :type key:          string
        :type data:         anything
        :type headers:      dict
        :type acl:          string. Any string of: ('private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control', 'log-delivery-write')

        :raises:            BotoClientError
        """
        # Returns bucket connection
        bucket = self._connect_and_get_bucket(bucket_name)

        # Create a Key instance for this bucket
        k = Key(bucket)
        k.key = key

        # Create the file in S3
        try:
            k.set_contents_from_string(
                data,
                headers=headers,
            )
            if acl:
                k.set_acl(acl)
        except:
            return BotoClientError("Error uploading file to Amazon S3")