示例#1
1
    def save_job_results(geocoder, job_id):
        """
        Download and save to S3 results for completed jobs.
        """
        logging.info('Saving results for %s to S3' % job_id)
        finished_folder = 'geocode_finished_jobs'
        pending_folder = 'geocode_pending_jobs'

        connection = boto.connect_s3()
        bucket = connection.get_bucket(GEO_BUCKET)
        old_key = bucket.get_key('%s/%s' % (pending_folder, job_id))

        new_name = old_key.get_contents_as_string()
        new_key = Key(bucket)
        new_key.key = '%s/%s' % (finished_folder, new_name)

        results = geocoder.get_job_results(job_id)
        result_string = StringIO.StringIO()
        writer = DictWriter(result_string, fieldnames=results[0].keys())
        writer.writeheader()
        writer.writerows(results)
        result_string.seek(0)

        email_address = old_key.get_metadata('email')
        if email_address:
            new_key.set_metadata('email', email_address)
            send_email_notification(
                email_address, geocoder.get_job_statuses(job_id=job_id), new_name, 'finished')

        new_key.set_contents_from_string(result_string.getvalue())
        new_key.make_public()
        old_key.delete()
示例#2
0
def upload_to_s3(file_obj, filename, path, content_type=None):
    """
    Method to upload a file to s3 and return a link.
    This method automatically tries to guess  of the file using filename if content_type is not passed.

    :param file_obj: File object (this must not be closed)
    :param path: Path where file is to be uploaded (this does not include filename)
    :param content_type: Content-type of the file.
    :return: AWS file url
    """
    # Connect to the bucket
    conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
                           settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)

    key_path = "{}/{}".format(path, filename)

    k = Key(bucket)
    k.key = key_path

    if content_type is None:
        # content_type passed; Guess the type
        content_type = mimetypes.guess_type(filename)[0]

    # Now set type only if is not None
    if content_type:
        k.content_type = content_type

    file_obj.seek(0)
    k.set_contents_from_string(file_obj.read())
    k.make_public()

    return "https://{}/{}".format(settings.AWS_S3_CUSTOM_DOMAIN, key_path)
示例#3
0
def upload_packages(packages, bucket_name=None, nightly=False):
    if debug:
        print "[DEBUG] upload_packages: {}".format(packages)
    try:
        import boto
        from boto.s3.key import Key
    except ImportError:
        print "!! Cannot upload packages without the 'boto' Python library."
        return 1
    print "Connecting to S3...".format(bucket_name)
    c = boto.connect_s3()
    if bucket_name is None:
        bucket_name = 'influxdb-nightly'
    bucket = c.get_bucket(bucket_name)
    print "\t - Using bucket: {}".format(bucket_name)
    for p in packages:
        name = os.path.basename(p)
        if bucket.get_key(name) is None or nightly:
            print "\t - Uploading {}...".format(name)
            sys.stdout.flush()
            k = Key(bucket)
            k.key = name
            if nightly:
                n = k.set_contents_from_filename(p, replace=True)
            else:
                n = k.set_contents_from_filename(p, replace=False)
            k.make_public()
        else:
            print "\t - Not uploading package {}, as it already exists.".format(
                p)
    print ""
    return 0
    def getVideo(self, url, tempfilename, filename):
        """
        Do the actually downloading and upload the file to AWS S3
        """
        BUCKET = get_config('BUCKET', '')
        GOOGLE_STORAGE = get_config('GOOGLE_STORAGE', '')
        GS_KEY = get_config('GS_KEY', '')
        GS_SECRET = get_config('GS_SECRET', '')

        tempfilename = str(random.randrange(1, 60000)) + '.mp4'

#        command = 'ffmpeg -i \"%s\" -acodec copy -vcodec copy -absf aac_adtstoasc -y "%s.mp4"' % (url, filename)   # Old command, save for future reference
        command = 'ffmpeg -i \"%s\" -y "%s"' % (url, tempfilename)
        print command
        os.system(command)

        filename_gs = filename.split('/')[-1] + '.mp4'
        filename += '.mp4'
        conn = S3Connection(GS_KEY, GS_SECRET)
        bucket = conn.get_bucket('wlps-vhs')
        k = Key(bucket)
        k.key = filename_gs
        k.set_contents_from_filename(tempfilename)
        k.make_public()

        os.remove(tempfilename)
示例#5
0
    def push_via_file_object(cls,
                             file_obj,
                             filename,
                             s3_dir,
                             mode='private',
                             **kwargs):
        """
        push file object to s3 directory
        :param file_obj: the StringIO like file object to be pushed to s3
        :param filename: the name to store the object with
        :param s3_dir: the s3 directory to puch the object to
        :param mode: private or public url to be generated
        :return: the s3 key and the url generated for the file
        """
        try:
            # point to the beginning of the file
            file_obj.seek(0)

            bucket = cls.get_s3_bucket(**kwargs)

            key_obj = Key(bucket)
            key_obj.key = "{}/{}".format(s3_dir, filename)
            key_obj.set_contents_from_file(file_obj)

            if mode == 'public':
                key_obj.make_public()
                url = key_obj.generate_url(expires_in=0, query_auth=False)
            else:
                url = cls.generate_private_url(key_name=key_obj.key, **kwargs)

            return key_obj.key, url
        except Exception as e:
            print("error pushing file object to s3 : {}".format(e))
            return None, None
示例#6
0
    def push_via_file_path(cls,
                           file_path,
                           filename,
                           s3_dir,
                           mode='public',
                           **kwargs):
        """
        push a local file to s3
        :param file_path: the local path of the file
        :param filename: the name of the file stored locally
        :param s3_dir: the s3 directory to which the file is to be pushed
        :param mode: the mode of file storage public/private
        :return: the s3 key and url of the file
        """
        try:
            bucket = cls.get_s3_bucket(**kwargs)

            key_obj = Key(bucket)
            key_obj.key = "{}/{}".format(s3_dir, filename)
            key_obj.set_contents_from_filename(file_path)

            if mode == 'public':
                key_obj.make_public()
                url = key_obj.generate_url(expires_in=0, query_auth=False)
            else:
                url = cls.generate_private_url(key_name=key_obj.key, **kwargs)

            return key_obj.key, url
        except Exception as e:
            print("error pushing file to s3 : {}".format(e))
            return None, None
def s3sync(gmt_diff, local_path, remote_path):
    #todo: put your aws keys here
    conn = S3Connection('<aws access key>', '<aws secret key>')
    
    #todo: put your bucket name here
    bucket = conn.get_bucket('bucket_name_here')
    blist = bucket.list(remote_path)

    d={}
    for a in blist:
        date_obj = datetime.strptime(a.last_modified, '%Y-%m-%dT%H:%M:%S.000Z')
        d[a.key] = {'key':a.key, 'name':a.name.decode('latin-1'), 'last_modified':a.last_modified, 'modified_date':date_obj}
     
    for path, subdirs, files in os.walk(local_path):
        for name in files:
            file_path = os.path.join(path, name)
            i = file_path.find(remote_path)
            compare_path = file_path[i:]
            file_datetime = datetime.fromtimestamp(os.path.getmtime(file_path))
            file_datetime = file_datetime.replace(microsecond=0) - timedelta(hours=gmt_diff)

            if (compare_path in d)==False or (('modified_date' in d[compare_path])==True and d[compare_path]['modified_date'] < file_datetime):
                k = Key(bucket)
                k.key = compare_path
                k.set_contents_from_filename(file_path)
                k.make_public()
示例#8
0
def upload_to_s3(filename, key=''):
    """
        Uploads given filename to s3 bucket and makes it public. Returns URL
        of uploaded resource.

        :param key: Key of the uploaded resource. Defaults to `filename`.
    """
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
        settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.create_bucket(SCREEN_DUMPS_BUCKET_NAME)
    k = Key(bucket)
    k.key = key if key else filename
    # Set key to desired screenshot.
    k.set_contents_from_filename(filename)

    # Give access to view screen.
    k.make_public()

    # Amazon replaces some characters
    name = k.name.replace('%', '%25')
    name = name.replace('://', '%3A/')
    name = name.replace('?', '%3F')
    name = name.replace('=', '%3D')
    name = name.replace('+', '%2B')

    # Url for public screen (without any expiration date)
    return S3_SERVER_NAME + SCREEN_DUMPS_BUCKET_NAME + '/' + name
示例#9
0
def add_issue():
    if request.method == 'POST':
        u = User.get_or_create(db.session, int(request.form['user']))

        if not u.can_publish:
            return "", 403

        date = dateutil.parser.parse(request.form['time'])
        date = date.astimezone(pytz.utc).replace(tzinfo=None)

        issue = Issue(reporter=u,
                      title=request.form['title'],
                      time=date,
                      description=request.form['description'],
                      urgency=int(request.form.get('urgency', 0)))

        db.session.add(issue)
        db.session.commit()

        pictures = request.files.getlist("pictures[]")
        # TODO: check extension

        for picture in pictures:
            k = Key(boto_bucket)
            k.set_contents_from_file(picture.stream)
            k.make_public()

            p = Picture(issue=issue, s3_name=k.name)
            db.session.add(p)
        db.session.commit()

        return ""
    else:
        return """
示例#10
0
def _write_files(app, static_url_loc, static_folder, files, bucket,
                 ex_keys=None, hashes=None):
    """ Writes all the files inside a static folder to S3. """
    new_hashes = []
    for file_path in files:
        asset_loc = _path_to_relative_url(file_path)
        key_name = _static_folder_path(static_url_loc, static_folder,
                                       asset_loc)
        msg = "Uploading %s to %s as %s" % (file_path, bucket, key_name)
        logger.debug(msg)

        exclude = False
        if app.config.get('S3_ONLY_MODIFIED', False):
            file_hash = hash_file(file_path)
            new_hashes.append((key_name, file_hash))

            if hashes and hashes.get(key_name, None) == file_hash:
                exclude = True

        if ex_keys and key_name in ex_keys or exclude:
            logger.debug("%s excluded from upload" % key_name)
        else:
            k = Key(bucket=bucket, name=key_name)
            # Set custom headers
            for header, value in app.config['S3_HEADERS'].iteritems():
                k.set_metadata(header, value)
            k.set_contents_from_filename(file_path)
            k.make_public()

    return new_hashes
示例#11
0
def _write_string_to_s3(key_path, str):
    conn = boto.connect_s3()
    bucket = conn.get_bucket('el.epton.org')
    k = Key(bucket)
    k.key = key_path
    k.set_contents_from_file(StringIO.StringIO(str))
    k.make_public()
示例#12
0
def _save_file_to_bucket(conn, bucket_name, remote_filename, local_file, **kwargs):
    """ Save the local_file to bucket_name as remote_filename. Also, any additional
    arguments passed as key-value pairs, are stored as file's metadata on S3."""
    # print "Establishing handle with bucket '%s'..." % bucket_name
    b = _get_bucket(conn, bucket_name)
    if b is not None:
        # print "Establishing handle with key object '%s'..." % remote_filename
        k = Key( b, remote_filename )
        print("Attempting to save file '%s' to bucket '%s'..." % (remote_filename, bucket_name))
        try:
            # Store some metadata (key-value pairs) about the contents of the file being uploaded
            # Note that the metadata must be set *before* writing the file
            k.set_metadata('date_uploaded', str(datetime.utcnow()))
            for args_key in kwargs:
                print("Adding metadata to file '%s': %s=%s" % (remote_filename, args_key, kwargs[args_key]))
                k.set_metadata(args_key, kwargs[args_key])
            print("Saving file '%s'" % local_file)
            k.set_contents_from_filename(local_file)
            print("Successfully added file '%s' to bucket '%s'." % (remote_filename, bucket_name))
            make_public = True
            if make_public:
                k.make_public()
        except S3ResponseError as e:
            print("Failed to save file local file '%s' to bucket '%s' as file '%s': %s" % ( local_file, bucket_name, remote_filename, e ))
            return False
        return True
    else:
        return False
示例#13
0
def upload_image_and_return_url(id):
    """ Returns S3 image url after uploading the image of the given file name to s3.

    """
    try:
        temp_image_path = ConfigSectionMap('paths')['temp_image_path']
        key = "{0}".format(id)
        fn = "{0}/{1}".format(temp_image_path, id)
        bucket_name = ConfigSectionMap('amazon_s3')['bucket_name']
        image_url = "http://s3.amazonaws.com/{0}/{1}".format(bucket_name, key)

        # connect to the bucket
        conn = boto.connect_s3(ConfigSectionMap('amazon_s3')['access_key_id'],
                        ConfigSectionMap('amazon_s3')['secret_access_key'])
        bucket = conn.get_bucket(bucket_name)
        # create a key to keep track of our file in the storage
        k = Key(bucket)
        k.key = key
        k.set_contents_from_filename(fn)
        # we need to make it public so it can be accessed publicly
        k.make_public()
        # remove the file from the web server
        os.remove(fn)
        log.info("Image url : {0}".format(image_url))
        return image_url
    except Exception, err:
        logging.exception("Error Message {0}".format(err))
        return None
示例#14
0
def s3UploadFilePublic(local_file_name, remote_file_name):
    print("Uploading public '%s' as '%s'" % (local_file_name, remote_file_name))
    bucket = s3Bucket()
    k = Key(bucket)
    k.key = remote_file_name
    k.set_contents_from_filename(local_file_name, cb=ul_cb)
    k.make_public()
def postbuild_osx(package_name, args, bucket, package_s3_key):
    if args.upload:
        url = package_s3_key.generate_url(expires_in=0, query_auth=False)
    else:
        # For testing "brew install" locally
        url = "http://127.0.0.1:8000/%s" % package_name

    print "Generating formula..."
    sha1 = sha1_file(package_name)
    formula_str = BREW_FORMULA.format(url=url, sha1=sha1)
    with open("kite.rb", "w") as f:
        f.write(formula_str)

    if args.upload:
        print "Uploading new brew formula..."
        formula_key = Key(bucket)
        formula_key.key = "kite.rb"
        formula_key.set_contents_from_string(formula_str)
        formula_key.make_public()
        formula_url = formula_key.generate_url(expires_in=0, query_auth=False)

        print "kite tool has been uplaoded successfully.\n" \
              "Users can install it with:\n    " \
              "brew install \"%s\"" % formula_url
    else:
        print "Did not upload to S3. " \
              "If you want to upload, run with --upload flag."
示例#16
0
def deploy():
    from flask import url_for

    with app.app.test_request_context():
        path = url_for('_stack_json')

    with app.app.test_request_context(path=path):
        view = app.__dict__['_stack_json']
        content = view()

    with open('www/live-data/stack.json', 'w') as f:
        f.write(content.data)

    if app_config.DEPLOYMENT_TARGET:
        for bucket in app_config.S3_BUCKETS:
            c = boto.connect_s3()
            b = c.get_bucket(bucket['bucket_name'])
            k = Key(b)
            k.key = 'live-data/stack.json'
            k.set_contents_from_filename(
                'www/live-data/stack.json',
                headers={
                    'cache-control':
                    'max-age=%i no-cache no-store must-revalidate' %
                    app_config.MAX_AGE_CACHE_CONTROL_HEADER
                })
            k.make_public()
示例#17
0
def upload_file(url, user):
    try:
        # S3 Save
        conn = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_KEY)
        bucket = conn.get_bucket(BUCKET_NAME)
        k = Key(bucket)
        k.key = RE_GET_FILE.match(url).group(5)
        file_object = urllib2.urlopen(url)
        fp = StringIO.StringIO(file_object.read())
        k.set_contents_from_file(fp)
        k.make_public()
        # Save S3 url to user
        url_to_save = 'https://s3-us-west-2.amazonaws.com/'+ BUCKET_NAME + '/' + k.key

        project = Project.create(url=url_to_save, project_user_id=user.user_id, status='pre')
        if not user.team_id:
            user.project_id = project.proj_id
            user.save()
        else:
            project.team_id = user.team_id
            project.save()
            users = User.update(project_id=project.proj_id).where(User.team_id == user.team_id)
            users.execute()

        return True
    except:
        return False
示例#18
0
def upload_image(folder_name, file_uuid, image_file, type_known=False):
    """
    Creates a connection to the s3 service then uploads the file which was
    passed
    to this function an uses the uuid as the filename.

    :param type_known:
    :param image_file:
    :param folder_name:
    :param file_uuid:
    :return:
    """
    bucket = settings.AWS_STORAGE_BUCKET_NAME
    conn = connect_s3(settings.AWS_ACCESS_KEY_ID,
                      settings.AWS_SECRET_ACCESS_KEY)
    k = Key(conn.get_bucket(bucket))
    if type_known:
        key_string = "%s/%s" % (folder_name, file_uuid)
        k.content_type = 'image/%s' % file_uuid[file_uuid.find('.') + 1:]
    else:
        key_string = "%s/%s%s" % (folder_name, file_uuid, ".png")
        k.content_type = 'image/png'
    k.key = key_string

    if not isinstance(image_file, str):
        image_file.seek(0)
        k.set_contents_from_string(image_file.read())
    else:
        k.set_contents_from_string(image_file)
    k.make_public()
    image_uri = k.generate_url(expires_in=0, query_auth=False)
    return image_uri
    def upload_image(img_str, img_name):
        AWS_ACCESS_KEY = 'AKIAIFCVURCK6MYMZOQQ'
        AWS_ACCESS_SECRET_KEY = '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'
        try:
            conn = boto.s3.connect_to_region(
                'us-east-1',
                aws_access_key_id=AWS_ACCESS_KEY,
                aws_secret_access_key=AWS_ACCESS_SECRET_KEY,
                is_secure=False,
                calling_format=OrdinaryCallingFormat())

            bucket_name = 'course-360'

            bucket = conn.get_bucket('course-360', validate=False)
            k = Key(bucket)
            k.key = img_name
            temp = img_str.split(",", 1)
            img_str = temp[1]
            decode_img = base64.b64decode(img_str)
            k.set_metadata('Content-Type', 'image/jpeg')
            k.set_contents_from_string(decode_img)
            k.make_public()
            return True
        except Exception as e:
            raise e
示例#20
0
def push_picture_to_s3(source, id):
    try:
        import boto
        from boto.s3.key import Key
        # set boto lib debug to critical
        bucket_name = settings.BUCKET_NAME
        print(bucket_name + 'worked')
        # connect to the bucket
        conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
                               settings.AWS_SECRET_ACCESS_KEY)
        bucket = conn.get_bucket(bucket_name)
        print(conn)
        print(settings.AWS_ACCESS_KEY_ID)
        # go through each version of the file
        key = 'user-%s.png' % id
        print(key)
        #    fn = '/var/www/data/%s.png' % id
        # create a key to keep track of our file in the storage
        k = Key(bucket)
        k.key = key
        k.set_contents_from_file(source)
        # we need to make it public so it can be accessed publicly
        # using a URL like http://s3.amazonaws.com/bucket_name/key
        k.make_public()
        # remove the file from the web server
    except:
        print('error')
        pass
示例#21
0
文件: build.py 项目: tompao/influxdb
def upload_packages(packages, bucket_name=None, nightly=False):
    if debug:
        print "[DEBUG] upload_packages: {}".format(packages)
    try:
        import boto
        from boto.s3.key import Key
    except ImportError:
        print "!! Cannot upload packages without the 'boto' Python library."
        return 1
    print "Connecting to S3...".format(bucket_name)
    c = boto.connect_s3()
    if bucket_name is None:
        bucket_name = 'influxdb-nightly'
    bucket = c.get_bucket(bucket_name)
    print "\t - Using bucket: {}".format(bucket_name)
    for p in packages:
        name = os.path.basename(p)
        if bucket.get_key(name) is None or nightly:
            print "\t - Uploading {}...".format(name)
            sys.stdout.flush()
            k = Key(bucket)
            k.key = name
            if nightly:
                n = k.set_contents_from_filename(p, replace=True)
            else:
                n = k.set_contents_from_filename(p, replace=False)
            k.make_public()
        else:
            print "\t - Not uploading package {}, as it already exists.".format(p)
    print ""
    return 0
示例#22
0
文件: event.py 项目: elerion/atrium
        def event_from_facebook(args, data):
            event = g.Event(
                club=args['club'],
                facebook_id=data['id'],
                name=data['name'],
                start_date=arrow.get(data['start_time']).datetime,
            )

            if 'description' in data.keys():
                event.description = data['description'].replace('\n', '<br />')

            if 'end_time' in data.keys():
                event.end_date = arrow.get(data['end_time']).datetime
            else:
                event.end_date = arrow.get(data['start_time']).replace(hours=2).datetime

            if 'place' in data.keys():
                event.place = Place(
                    name=data['place']['name']
                )
                if 'location' in data['place'].keys():
                    event.address = data['place']['location']['street'] + ', ' + data['place']['location']['city'] \
                                  + ', ' + data['place']['location']['country']

            event.save()
            bucket = s3conn.get_bucket(current_app.config['AWS_S3_BUCKET'])
            key = Key(bucket)
            key.key = g.tenant + '/events/' + str(event.id)
            key.content_type = 'image/jpeg'
            key.set_contents_from_string(requests.get(data['cover']['source']).content)
            key.make_public()
            event.poster = 'https://' + current_app.config['AWS_S3_BUCKET'] + '.s3.amazonaws.com/' + g.tenant + '/events/' + str(event.id)
            event.save()
def uploadFileToS3(destDir, fileName, request, _type, cust):
    bucket_name = BUCKET_NAME
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(bucket_name)
    # create a key to keep track of our file in the storage
    k = Key(bucket)
    k.key = fileName
    #k.set_contents_from_filename(destDir+fileName)
    buffer = ''
    #output = StringIO(fileName)
    #fileName.save(output)
    #output.seek(0)
    #buffer = output.getvalue()
    f = request.FILES[_type]
    s = f.file.getvalue()
    k.set_metadata('original_filename', fileName)
    k.set_contents_from_filename(f, policy='public-read')
    # we need to make it public so it can be accessed publicly
    # using a URL like http://s3.amazonaws.com/bucket_name/key
    k.make_public()
    s3File = S3File(key=k,
                    user=request.user,
                    name=fileName,
                    type=_type,
                    customer=cust)
    s3File.save()
    # remove the file from the web server
    # os.remove(destDir+fileName)
    return s3File
示例#24
0
 def put(self, filename, fileuid):
     k = Key(self.__bucket)
     k.key = str(fileuid)
     k.set_contents_from_filename(filename)
     url = k.generate_url(expires_in=0, query_auth=False)
     k.make_public()
     return (fileuid, url)
示例#25
0
文件: news.py 项目: elerion/atrium
    def post(self, news_id):
        news = g.News.objects.with_id(news_id)

        if not current_user.is_admin() and not current_user.has_any_permission('club', news.club.id, ['admin', 'news']):
            return abort(401)

        parser = reqparse.RequestParser()
        parser.add_argument('media', type=werkzeug.datastructures.FileStorage, location='files')
        args = parser.parse_args()

        uid = str(uuid4())

        bucket = s3conn.get_bucket(current_app.config['AWS_S3_BUCKET'])
        key = Key(bucket)
        key.key = g.tenant + '/news/' + str(news.id) + '/' + uid
        key.content_type = args['media'].mimetype
        key.set_contents_from_file(args['media'].stream)
        key.make_public()

        news.update(add_to_set__medias=Media(
            name=uid,
            url='https://' + current_app.config['AWS_S3_BUCKET'] + '.s3.amazonaws.com/' + g.tenant + '/news/' + str(news.id) + '/' + uid
        ))

        return g.News.objects.with_id(news_id)
示例#26
0
    def upload_cache_to_amazon(self, filename, bucket_name, key, secret, public):
        if filename is None or bucket_name is None or key is None or secret is None:
            raise RobustusException(
                "In order to upload to amazon S3 you should specify filename,"
                'bucket, access key and secret access key, see "robustus upload_cache -h"'
            )

        if os.path.isdir(filename):
            raise RobustusException("Can't upload directory to amazon S3, please specify archive name")

        try:
            import boto
            from boto.s3.key import Key

            # set boto lib debug to critical
            logging.getLogger("boto").setLevel(logging.CRITICAL)

            # connect to the bucket
            conn = boto.connect_s3(key, secret)
            bucket = conn.get_bucket(bucket_name)

            # create a key to keep track of our file in the storage
            k = Key(bucket)
            k.key = filename
            k.set_contents_from_filename(filename)
            if public:
                k.make_public()
        except ImportError:
            raise RobustusException("To use S3 cloud install boto library into robustus virtual")
        except Exception as e:
            raise RobustusException(e.message)
示例#27
0
文件: cmmt.py 项目: kzkv/CommuteTime
def upload_image(image_url, image_name):
    """аплоад изображения"""
    try:
        # соединение с S3 bucket
        connection = boto.connect_s3()
        bucket = connection.get_bucket(config.AWS_STORAGE_BUCKET_NAME)
        key = Key(bucket)

        # присвоение имени файла
        key.key = str(int(time())) + "-" + image_name + ".png"

        # чтение
        file_object = urllib2.urlopen(image_url)
        file_data = StringIO.StringIO(file_object.read())

        # запись
        key.content_type = "image/png"
        key.set_contents_from_file(file_data)

        # права на чтение
        key.make_public()

        result_url = key.generate_url(0, expires_in_absolute=True, force_http=True, query_auth=False)
        return result_url

    except Exception, e:
        return e
示例#28
0
def photoupload():
    file = request.files['photo']
    token = request.form['token']
    filename = request.form['filename']
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket('noteable-paf14')
    k = Key(bucket)
    k.key =  hashlib.sha224(file.read()).hexdigest() + '.jpg'
    file.seek(0)
    k.set_contents_from_string(file.read())
    k.make_public()
    url = k.generate_url(expires_in=0, query_auth=False)
    u = User.objects.get(token=token)
    # Evernote stuff
    dev_token = os.environ['EVERNOTE_KEY']
    client = EvernoteClient(token=dev_token)
    file.seek(0)
    ret = sendImageToEvernote(client, filename, u.evernote_id, file.read())
    file.seek(0)
    Note(name=filename, original_photo_url=url, evernote_guid=ret.guid, author=u).save()
    chunks = findRectangle(create_opencv_image_from_file(file))
    noteStore = client.get_note_store()
    result = noteStore.getNote(dev_token, ret.guid, False, False, True, False)
    widgets = identifyWidgets(chunks, xml = result.resources[0].recognition.body[result.resources[0].recognition.body.find('<recoIndex'):-1])
    thread = threading.Thread(target=getTextChunks, args=(client, dev_token, ret.guid, widgets))
    thread.start()
    return jsonify(imageurl=url, success=True)
示例#29
0
def set_metadata():
    """Take a list of files to be uploaded to s3 and gzip CSS, JS, and HTML,
    setting metadata for all files"""

    gzip_extensions = ['.html', '.js', '.css']

    upload_list = get_files(PUSH_FROM)
    conn = S3Connection(AWS_KEY, AWS_SECRET_KEY, is_secure=False)
    mybucket = conn.get_bucket(AWS_BUCKET)

    static_expires = expires_header(STATIC_EXPIRES)
    html_expires = expires_header(HTML_EXPIRES)

    for filename in upload_list:
        web_path = filename.split('build')[1]
        s = "\\"
        web_path.replace(s,"/")
        print web_path


    # define all necessary attributes of each file for s3
    for filename in upload_list:
        k = Key(mybucket)
        ext = os.path.splitext(filename)[1]
        if not ext:
            ext = '.html'
        web_path = filename.split('build')[1]
        s = "\\"
        web_path = web_path.replace(s,"/")
        # Set expires for HTML, remove extension if not index
        if ext == '.html':
            if 'index' not in web_path:
                k.key = AWS_DIRECTORY + os.path.splitext(web_path)[0]
            else:
                k.key = AWS_DIRECTORY + web_path
            k.set_metadata('Expires', html_expires)
        else:
            k.key = AWS_DIRECTORY + web_path  # strip leading 0
            k.set_metadata('Expires', static_expires)

        if ext in gzip_extensions:
            f = gzip_file(filename)
            k.set_metadata('Content-Encoding', 'gzip')
        else:
            f = filename

        try:
            k.set_metadata('Content-Type', mimetypes.types_map[ext])
        except KeyError:
            print ext
            if ext != ".ttf":
                print "bad code " + web_path
                continue
        etag_hash = hashlib.sha1(f + str(time.time())).hexdigest()
        k.set_metadata('ETag', etag_hash)
        k.set_contents_from_filename(f)
        k.make_public()

    print '\nPage successfully updated'
    print "On " + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
def main(transfer_file,
         bucket_name,
         s3_key_name=None,
         use_rr=False,
         make_public=True):
    global bucket
    # open the wikipedia file
    if not s3_key_name:
        s3_key_name = os.path.basename(transfer_file)
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(bucket_name)

    file_handle = open(transfer_file, 'rb')

    k = Key(bucket)
    k.key = s3_key_name

    k.set_contents_from_file(file_handle,
                             cb=progress,
                             num_cb=20,
                             reduced_redundancy=use_rr)
    if make_public:
        k.make_public()

    return '/'.join((bucket_name, s3_key_name))
示例#31
0
def push_picture_to_s3(source,id):
    try:
        import boto
        from boto.s3.key import Key
        # set boto lib debug to critical
        bucket_name = settings.BUCKET_NAME
        print(bucket_name+'worked')
        # connect to the bucket
        conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,settings.AWS_SECRET_ACCESS_KEY)
        bucket = conn.get_bucket(bucket_name)
        print(conn)
        print(settings.AWS_ACCESS_KEY_ID)
        # go through each version of the file
        key = 'user-%s.png' % id
        print(key)
        #    fn = '/var/www/data/%s.png' % id
        # create a key to keep track of our file in the storage
        k = Key(bucket)
        k.key = key
        k.set_contents_from_file(source)
        # we need to make it public so it can be accessed publicly
        # using a URL like http://s3.amazonaws.com/bucket_name/key
        k.make_public()
        # remove the file from the web server
    except:
        print('error')
        pass
         
示例#32
0
def _write_files(app,
                 static_url_loc,
                 static_folder,
                 files,
                 bucket,
                 ex_keys=None,
                 hashes=None):
    """ Writes all the files inside a static folder to S3. """
    new_hashes = []
    for file_path in files:
        asset_loc = _path_to_relative_url(file_path)
        key_name = _static_folder_path(static_url_loc, static_folder,
                                       asset_loc)
        msg = "Uploading %s to %s as %s" % (file_path, bucket, key_name)
        logger.debug(msg)

        exclude = False
        if app.config.get('S3_ONLY_MODIFIED', False):
            file_hash = hash_file(file_path)
            new_hashes.append((key_name, file_hash))

            if hashes and hashes.get(key_name, None) == file_hash:
                exclude = True

        if ex_keys and key_name in ex_keys or exclude:
            logger.debug("%s excluded from upload" % key_name)
        else:
            k = Key(bucket=bucket, name=key_name)
            # Set custom headers
            for header, value in app.config['S3_HEADERS'].iteritems():
                k.set_metadata(header, value)
            k.set_contents_from_filename(file_path)
            k.make_public()

    return new_hashes
示例#33
0
def save_file_in_s3(filename, subdirectory, env, gzipped_file_path):
    if env == 'staging':
        BUCKET_NAME = STAGE_BUCKET_NAME
    else:
        BUCKET_NAME = PROD_BUCKET_NAME


    if subdirectory != '':
        remote_filepath = WP_PATH + subdirectory + '/'
    else:
        remote_filepath = WP_PATH + '/'

    print 'uploading -- %s --  to -- %s --' % (filename, BUCKET_NAME + ' : ' + remote_filepath)

    #set headers
    # css -> content-type: text/css, content-encoding: gzip
    # js -> content-type: application/javascript, content-encoding: gzip

    conn = S3Connection(ACCESS_KEY, SECRET)
    bucket = conn.get_bucket(BUCKET_NAME)
    k = Key(bucket)
    k.key = remote_filepath + filename
    k.set_metadata('Content-Encoding', 'gzip')
    k.set_contents_from_filename(gzipped_file_path)
    k.make_public()

    print '**** Deleting ' + gzipped_file_path + '****'
    os.remove(gzipped_file_path)
示例#34
0
def upload_s3(filename, fn, filetype, name):
    success = False
    try:
        from boto.s3.key import Key
        from boto.s3.connection import S3Connection

        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)

        if filename and fn and filetype and name:
            if 'image' == filetype:
                image = Image.open(filename)
                image.save(fn, image.format)
            else:
                destination = open(fn, 'wb+')
                for chunk in filename.chunks():
                    destination.write(chunk)
                destination.close()

            k = Key(bucket)
            k.key = name
            k.set_contents_from_filename(fn)
            k.make_public()
            os.remove(fn)
            success = True

    except Exception, e:
        print e
        pass
示例#35
0
def upload_packages(packages, bucket_name=None, overwrite=False):
    logging.debug("Uploading files to bucket '{}': {}".format(bucket_name, packages))
    try:
        import boto
        from boto.s3.key import Key
    except ImportError:
        logging.warn("Cannot upload packages without 'boto' Python library! Skipping.")
        return False
    logging.info("Connecting to S3.")
    c = boto.connect_s3()
    if bucket_name is None:
        bucket_name = DEFAULT_BUCKET
    bucket = c.get_bucket(bucket_name.split('/')[0])
    for p in packages:
        if '/' in bucket_name:
            # Allow for nested paths within the bucket name (ex:
            # bucket/folder). Assuming forward-slashes as path
            # delimiter.
            name = os.path.join('/'.join(bucket_name.split('/')[1:]),
                                os.path.basename(p))
        else:
            name = os.path.basename(p)
        if bucket.get_key(name) is None or overwrite:
            logging.info("Uploading file {}".format(name))
            k = Key(bucket)
            k.key = name
            if overwrite:
                n = k.set_contents_from_filename(p, replace=True)
            else:
                n = k.set_contents_from_filename(p, replace=False)
            k.make_public()
        else:
            logging.warn("Not uploading file {}, as it already exists in the target bucket.")
    return True
示例#36
0
    def upload(self, resource):
        """
        Upload a resource.
        
        :param resource: An instance of `django_s3.resource.Resource`
        """
        try:

            folder_name = url_pattern.match(
                resource.name).groupdict()['folder_name']

            key_holder = Key(self.__bucket)
            key_holder.key = "{}/{}/{}".format(
                settings.S3_CATEGORY_MAP[resource.category_code], folder_name,
                resource.name)
            key_holder.set_contents_from_filename(
                os.path.join(django_s3_settings.S3_UPLOAD_DIR_PATH,
                             resource.name))
            key_holder.make_public()
        except Exception as err:
            Transport.logger.error(
                _("Error uploading file: {}. Error: {}".format(
                    resource.name, err)))
            # Right now we don't know what exceptions are expected here, we propagate the error
            # up. If we found some exception then we'll add the proper handler.
            raise
示例#37
0
文件: flask_s3.py 项目: xymz/flask-s3
def _upload_file(file_path, bucket, key_name, headers={}, do_gzip=False):
    k = Key(bucket=bucket, name=key_name)
    for header, value in headers.items():
        if (header, value) != ('Content-Encoding', 'gzip'):
            k.set_metadata(header, value)
    mimetype = mimetypes.guess_type(file_path)[0]
    if mimetype:
        k.set_metadata('Content-Type', mimetype)
    with open(file_path) as f:
        content = f.read()
        if do_gzip:
            k.set_metadata('Content-Encoding', 'gzip')
            gzipped = StringIO()
            with gzip.GzipFile(fileobj=gzipped, mode='w') as _gzip:
                _gzip.write(content)
            content = gzipped.getvalue()
    try:
        k.set_contents_from_string(content)
    except S3ResponseError:
        if not do_gzip:
            k.set_contents_from_filename(file_path)
        else:
            raise
    k.make_public()
    return k
    def upload_file(self, s3_url, wiki_url, options):
        # open the wikipedia file
        local_filename = os.path.basename(s3_url)
        filename, headers = urllib.urlretrieve(
            wiki_url, os.path.join('/tmp', local_filename))
        print local_filename
        print os.path.getsize(filename)
        print headers.gettype()

        file_handle = open(filename, 'rb')
        #meta = remote_file.info()
        #content_type = meta.gettype()

        #sys.stdout.write(u"Preparing to upload {0} to path {1}...\n".format(local_filename.decode('utf-8'), s3_url))

        k = Key(self.bucket)
        k.key = s3_url

        sys.stdout.write('Uploading...\n')
        k.set_contents_from_file(file_handle, cb=progress, num_cb=20)
        k.make_public()

        sys.stdout.write('Upload complete.\n')

        return os.path.join(self.conf['AWS_BUCKET'], s3_url)
def s3UploadFilePublic(local_file_name, remote_file_name):
    print ("Uploading public '%s' as '%s'" % (local_file_name, remote_file_name))
    bucket = s3Bucket()
    k = Key(bucket)
    k.key = remote_file_name
    k.set_contents_from_filename(local_file_name, cb=ul_cb)
    k.make_public()
示例#40
0
    def save_job_results(geocoder, job_id):
        """
        Download and save to S3 results for completed jobs.
        """
        logging.info('Saving results for {} to S3'.format(job_id))
        finished_folder = 'geocode_finished_jobs'
        pending_folder = 'geocode_pending_jobs'

        connection = boto.connect_s3()
        bucket = connection.get_bucket(GEO_BUCKET)
        old_key = bucket.get_key('{}/{}'.format(pending_folder, job_id))

        new_name = old_key.get_contents_as_string()
        new_key = Key(bucket)
        new_key.key = '{}/{}'.format(finished_folder, new_name)

        results = geocoder.get_job_results(job_id)
        result_string = StringIO.StringIO()
        writer = DictWriter(result_string, fieldnames=results[0].keys())
        writer.writeheader()
        writer.writerows(results)
        result_string.seek(0)

        email_address = old_key.get_metadata('email')
        if email_address:
            new_key.set_metadata('email', email_address)
            send_email_notification(email_address,
                                    geocoder.get_job_statuses(job_id=job_id),
                                    new_name, 'finished')

        new_key.set_contents_from_string(result_string.getvalue())
        new_key.make_public()
        old_key.delete()
示例#41
0
文件: util.py 项目: kzmkv/SumatraPDF
def s3UploadDataPublic(data, remote_file_name):
    log("s3 upload data as '%s'" % remote_file_name)
    bucket = s3PubBucket()
    k = Key(bucket)
    k.key = remote_file_name
    k.set_contents_from_string(data)
    k.make_public()
示例#42
0
def upload_to_s3(filename, key=''):
    """
        Uploads given filename to s3 bucket and makes it public. Returns URL
        of uploaded resource.

        :param key: Key of the uploaded resource. Defaults to `filename`.
    """
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.create_bucket(SCREEN_DUMPS_BUCKET_NAME)
    k = Key(bucket)
    k.key = key if key else filename
    # Set key to desired screenshot.
    k.set_contents_from_filename(filename)

    # Give access to view screen.
    k.make_public()

    # Amazon replaces some characters
    name = k.name.replace('%', '%25')
    name = name.replace('://', '%3A/')
    name = name.replace('?', '%3F')
    name = name.replace('=', '%3D')
    name = name.replace('+', '%2B')

    # Url for public screen (without any expiration date)
    return S3_SERVER_NAME + SCREEN_DUMPS_BUCKET_NAME + '/' + name
示例#43
0
def upload(bucket_name, local_filename, s3_path):
    connection = boto.connect_s3()
    bucket = Bucket(connection, bucket_name)
    key = Key(bucket)
    key.key = s3_path
    key.set_contents_from_filename(local_filename)
    key.make_public()
def scrape_data_to_html():
    timestamp = datetime.fromtimestamp(time.time()).strftime("%H:%M:%S on %A, %d %B, %Y")
    all_drinkers = drinkers_table.scan()
    drinkers = []
    for drinker in all_drinkers:
        if (drinker['code'] == None):
            drinker['code'] = "UNKNOWN"
        if (drinker['name'] == None):
            drinker['name'] = "UNKNOWN"
        if (drinker['volume_consumed'] == None):
            drinker['volume_consumed'] = 0
        if (drinker['number_of_drinks'] == None):
            drinker['number_of_drinks'] = 0
        d = {}
        d['code'] = drinker['code']
        d['name'] = drinker['name']
        d['volume_consumed'] = drinker['volume_consumed']
        d['number_of_drinks'] = drinker['number_of_drinks']
        drinkers.append(d)
    loader = FileLoader('templates')
    template = loader.load_template('drinks.html.template')
    webpage = template.render(locals())
    bucket = s3.get_bucket('kegerator')
    key = Key(bucket)
    key.key = 'drinks.html'
    key.content_type = 'text/html'
    key.set_contents_from_string(webpage)
    key.make_public()
示例#45
0
def add_meow(request):
    if request.method == "POST":
        newfile = request.FILES.get('new_meow_image')
        key = Key(bucket)
        keyname = str(int(uuid.uuid4()))[:10] + newfile.name
        key.key = keyname
        key.set_contents_from_string(newfile.read())
        key.make_public()
            
        url = 'https://s3.amazonaws.com/kitty2013/' + keyname

        user = request.user
        new_meow_text = request.POST.get('new_meow')
        new_meow = Meow(text=new_meow_text,
                        user=user, image_url=url)

        new_meow.save()

        # Find these values at https://twilio.com/user/account
        tagged_username = request.POST.get('tag')
        tagged_user = User.objects.filter(username=tagged_username)[0]
        if(tagged_user):
            cell = tagged_user.userprofile.cell_phone

            account_sid = "ACb65bbe159b7b180894c412cd4d47d231"
            auth_token = "19d4f44997dbabfb6b15add63408682f"
            client = TwilioRestClient(account_sid, auth_token)
            message = client.messages.create(to="+%d" % cell, from_="+16083716550",
                    body="Hey %s, %s just tagged you in a picture" % (tagged_user.username, user.username))

        return redirect('/user/%s' % user.id)

    raise Http404
示例#46
0
def s3UploadFilePublic(local_file_name, remote_file_name):
    log("s3 upload '%s' as '%s'" % (local_file_name, remote_file_name))
    bucket = s3PubBucket()
    k = Key(bucket)
    k.key = remote_file_name
    k.set_contents_from_filename(local_file_name, cb=ul_cb)
    k.make_public()
示例#47
0
def upload_image_and_return_url(id):
    """ Returns S3 image url after uploading the image of the given file name to s3.

    """
    try:
        temp_image_path = ConfigSectionMap('paths')['temp_image_path']
        key = "{0}".format(id)
        fn = "{0}/{1}".format(temp_image_path, id)
        bucket_name = ConfigSectionMap('amazon_s3')['bucket_name']
        image_url = "http://s3.amazonaws.com/{0}/{1}".format(bucket_name, key)

        # connect to the bucket
        conn = boto.connect_s3(ConfigSectionMap('amazon_s3')['access_key_id'],
                        ConfigSectionMap('amazon_s3')['secret_access_key'])
        bucket = conn.get_bucket(bucket_name)
        # create a key to keep track of our file in the storage
        k = Key(bucket)
        k.key = key
        k.set_contents_from_filename(fn)
        # we need to make it public so it can be accessed publicly
        k.make_public()
        # remove the file from the web server
        os.remove(fn)
        log.info("Image url : {0}".format(image_url))
        return image_url
    except Exception, err:
        logging.exception("Error Message {0}".format(err))
        return None
示例#48
0
def s3UploadDataPublic(data, remote_file_name):
    log("s3 upload data as '%s'" % remote_file_name)
    bucket = s3PubBucket()
    k = Key(bucket)
    k.key = remote_file_name
    k.set_contents_from_string(data)
    k.make_public()
示例#49
0
def _upload_file(file_path, bucket, key_name, headers={}, do_gzip=False):
    k = Key(bucket=bucket, name=key_name)
    for header, value in headers.items():
        if (header, value) != ('Content-Encoding', 'gzip'):
            k.set_metadata(header, value)
    mimetype = mimetypes.guess_type(file_path)[0]
    if mimetype:
        k.set_metadata('Content-Type', mimetype)
    with open(file_path) as f:
        content = f.read()
        if do_gzip:
            k.set_metadata('Content-Encoding', 'gzip')
            gzipped = StringIO()
            with gzip.GzipFile(fileobj=gzipped, mode='w') as _gzip:
                _gzip.write(content)
            content = gzipped.getvalue()
    try:
        k.set_contents_from_string(content)
    except S3ResponseError:
        if not do_gzip:
            k.set_contents_from_filename(file_path)
        else:
            raise
    k.make_public()
    return k
示例#50
0
def post_recapture(request, pk):
	post = get_object_or_404(Post, pk=pk)

	#Webcapture using phantomjs
	driver = webdriver.PhantomJS()
	driver.set_window_size(1024, 768)
	driver.get(post.final_url)
	regex = re.compile('[^a-zA-Z]')
	simpletitle = regex.sub('', post.title)
	driver.save_screenshot('/home/ubuntu/tmp/' + simpletitle + '.png')

	#uploading image to s3 using boto
	b = c.get_bucket('leech-bucket-lab3')
	k = Key(b)
	k.key = simpletitle + '.png'
	k.delete()

	k = Key(b)
	k.key = simpletitle + '.png'
	k.set_contents_from_filename('/home/ubuntu/tmp/' + simpletitle + '.png')
	k.make_public()
	post.webcapture = 'https://s3.amazonaws.com/leech-bucket-lab3/' + simpletitle + '.png'

	os.remove('/home/ubuntu/tmp/' + simpletitle + '.png')
	
	post.save()
	return redirect('blog.views.post_detail', pk=post.pk)
示例#51
0
文件: util.py 项目: kzmkv/SumatraPDF
def s3UploadFilePublic(local_file_name, remote_file_name):
    log("s3 upload '%s' as '%s'" % (local_file_name, remote_file_name))
    bucket = s3PubBucket()
    k = Key(bucket)
    k.key = remote_file_name
    k.set_contents_from_filename(local_file_name, cb=ul_cb)
    k.make_public()
示例#52
0
def upload_to_s3(self, local_file_path, sgdid, file_extension, s3_access_key, s3_secret_key, s3_bucket):
    try:
        conn = boto.connect_s3(s3_access_key, s3_secret_key)
        bucket = conn.get_bucket(s3_bucket)

        k = Key(bucket)
        k.key = str(sgdid) + "." + file_extension
        k.set_contents_from_filename(local_file_path)
        k.make_public()

        md5_local = md5(local_file_path)
        file_s3 = bucket.get_key(k.key)
        etag_md5_s3 = file_s3.etag.strip('"').strip("'")

        if (md5_local == etag_md5_s3):
            fdb = DBSession.query(Filedbentity).filter(Filedbentity.sgdid == sgdid).one_or_none()
            fdb.md5sum = etag_md5_s3
            fdb.s3_url = file_s3.generate_url(expires_in=0, query_auth=False)

            DBSession.flush()

            transaction.commit()

	    os.remove(local_file_path)
        else:
            raise Exception('MD5sum check failed for: ' + local_file_path)
    except Exception as exc:
        raise self.retry(exc=exc)
示例#53
0
 def update(self, instance):
     conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
     b = conn.get_bucket(AWS_STORAGE_BUCKET_NAME)
     k = Key(b)
     #session = botocore.session.get_session()
     #client = boto3.resource('s3')
     #client = session.create_client('s3')
     driver = webdriver.PhantomJS(
         service_log_path=os.path.devnull)  # or add to your PATH
     driver.set_window_size(1024, 768)  # optional
     driver.get(str(instance.short_url))
     instance.page_title = driver.title
     filepath = '/tmp/' + str(instance.id) + '.png'
     k.key = 'captures/' + str(instance.id) + '.png'
     s3path = 'captures/' + str(instance.id) + '.png'
     driver.save_screenshot(filepath)
     k.set_contents_from_filename(filepath)
     k.make_public()
     driver.service.process.kill()
     #data = open(filepath, 'rb') # save a screenshot to disk
     #client.put_object(Key=s3path, Body=data, Bucket=AWS_STORAGE_BUCKET_NAME) #client.Bucket(AWS_STORAGE_BUCKET_NAME).put_object(Key=s3path, Body=data)
     instance.screen_capture = 'https://s3.amazonaws.com/%s' % AWS_STORAGE_BUCKET_NAME + '/' + s3path
     instance.destination_url = requests.get(instance.short_url).url
     instance.save()
     driver.service.process.kill()
     #driver.quit()
     os.remove(filepath)
     return instance
示例#54
0
def postbuild_osx(package_name, args, bucket, package_s3_key):
    if args.upload:
        url = package_s3_key.generate_url(expires_in=0, query_auth=False)
    else:
        # For testing "brew install" locally
        url = "http://127.0.0.1:8000/%s" % package_name

    print "Generating formula..."
    sha1 = sha1_file(package_name)
    formula_str = BREW_FORMULA.format(url=url, sha1=sha1)
    with open("kite.rb", "w") as f:
        f.write(formula_str)

    if args.upload:
        print "Uploading new brew formula..."
        formula_key = Key(bucket)
        formula_key.key = "kite.rb"
        formula_key.set_contents_from_string(formula_str)
        formula_key.make_public()
        formula_url = formula_key.generate_url(expires_in=0, query_auth=False)

        print "kite tool has been uplaoded successfully.\n" \
              "Users can install it with:\n    " \
              "brew install \"%s\"" % formula_url
    else:
        print "Did not upload to S3. " \
              "If you want to upload, run with --upload flag."
示例#55
0
def create_local_redirect(bucket, path, location):
    print 'attempt local_redirect', bucket.name, path, location
    key = Key(bucket)
    key.name = path
    key.set_contents_from_string('')
    key.set_redirect(location)
    key.make_public()
    print 'local_redirect', bucket.name, path, location
示例#56
0
def set_key_json_object(bucket, key, object):
    key_obj = bucket.get_key(key)
    if key_obj == None:
        key_obj = Key(bucket)
        key_obj.key = key

    key_obj.set_contents_from_string(simplejson.dumps(object))
    key_obj.make_public()
def postbuild_linux(package_name, args, bucket, package_s3_key):
    if args.upload:
        print "Uploading again as kite-latest.linux.deb ..."
        latest = Key(bucket)
        latest.key = "kite-latest-linux.deb"
        latest.set_contents_from_filename(package_name)
        latest.make_public()
        print "Uploaded:", latest.generate_url(expires_in=0, query_auth=False)
示例#58
0
def upload_s3(local_fn, remote_fn, **kwargs):
    s3 = boto.connect_s3()
    bucket = s3.get_bucket("sauron-yeo")
    k = Key(bucket)
    k.key = remote_fn
    k.set_contents_from_filename(local_fn)
    k.make_public()
    return ["done"]