Exemplo n.º 1
0
def main(stream_url: str, stream_name: str, bucket_name: str, duration: str):
    temp_file = 'temp.m4a'

    print('beginning rip')

    code = subprocess.call(['ffmpeg',
                            '-i', stream_url,
                            '-t', duration,
                            '-acodec', 'copy',
                            '-absf', 'aac_adtstoasc',
                            temp_file])

    assert code == 0, 'stream rip failed with code ' + str(code)

    print('connecting to s3')
    conn = S3Connection(is_secure=False)  # AWS uses invalid certs
    bucket = conn.get_bucket(bucket_name)

    print('writing recorded file to s3')
    m4a = Key(bucket)
    m4a.name = datetime.datetime.utcnow().strftime(stream_name + '--%Y-%m-%d.m4a')
    m4a.content_type = MIME_TYPE
    m4a.metadata = {'Content-Type': MIME_TYPE}
    m4a.storage_class = 'STANDARD_IA'
    m4a.set_contents_from_filename(temp_file)
    m4a.close()

    print('generating new feed.xml from s3 bucket list')
    feed_xml = Key(bucket)
    feed_xml.name = 'feed.xml'
    feed_xml.content_type = 'application/rss+xml'
    feed_xml.set_contents_from_string(
        rss_xml(stream_name, bucket_name, bucket.list()))
    feed_xml.close()
Exemplo n.º 2
0
    def init_bucket(self):
        bucket = self._connection.create_bucket(self._bucket_name)
        bucket.set_policy(json.dumps({
            "Version": "2012-10-17",
            "Statement": [
                {
                    "Sid": "PublicReadGetObject",
                    "Effect": "Allow",
                    "Principal": "*",
                    "Action": ["s3:GetObject"],
                    "Resource": [
                        "arn:aws:s3:::{bucket}/*".format(
                            bucket=self._bucket_name)]
                }
            ]
        }))

        error_key = Key(bucket, 'error.html')
        error_key.content_type = "text/html"
        error_key.set_contents_from_string("""
        <!doctype html>
        <h1>It's all gone wrong!
        """)

        index_key = Key(bucket, "index.html")
        index_key.content_type = "text/html"
        index_key.set_contents_from_string("""
        <!doctype html>
        <h1>Welcome to gifshare
        """)

        bucket.set_website_configuration(WebsiteConfiguration(
            'index.html',
            'error.html',
        ))
Exemplo n.º 3
0
def upload_image(folder_name, file_uuid, image_file, type_known=False):
    """
    Creates a connection to the s3 service then uploads the file which was
    passed
    to this function an uses the uuid as the filename.

    :param type_known:
    :param image_file:
    :param folder_name:
    :param file_uuid:
    :return:
    """
    bucket = settings.AWS_STORAGE_BUCKET_NAME
    conn = connect_s3(settings.AWS_ACCESS_KEY_ID,
                      settings.AWS_SECRET_ACCESS_KEY)
    k = Key(conn.get_bucket(bucket))
    if type_known:
        key_string = "%s/%s" % (folder_name, file_uuid)
        k.content_type = 'image/%s' % file_uuid[file_uuid.find('.') + 1:]
    else:
        key_string = "%s/%s%s" % (folder_name, file_uuid, ".png")
        k.content_type = 'image/png'
    k.key = key_string

    if not isinstance(image_file, str):
        image_file.seek(0)
        k.set_contents_from_string(image_file.read())
    else:
        k.set_contents_from_string(image_file)
    k.make_public()
    image_uri = k.generate_url(expires_in=0, query_auth=False)
    return image_uri
Exemplo n.º 4
0
     def Upload(self):
        import boto
        import ssl
        import os
        import sys
        import ssl
        from boto.s3.key import Key
        s3 = boto.connect_s3()
        bucket=s3.get_bucket(self.b, validate=False)
        self.lock.acquire()
        print "s3Upload",(self.f+"1.csv")," to ",bucket
        self.lock.release()

        try:
                 k = Key(bucket)
                 print(k)
                 k.key = self.f+"1.csv"
                 k.content_type = 'text/html'
                 k.set_contents_from_filename(str(self.f)+"1.csv")
                 k.set_acl('public-read')
                 k.key = self.f+"1.json"
                 k.content_type = 'text/html'
                 k.set_contents_from_filename(str(self.f)+"1.json")
                 k.set_acl('public-read')
        except IOError as e:
              print "I/O error({0}): {1}".format(e.errno, e.strerror)
        except SSLError as e:
              print "SSL error({0}): {1}".format(e.errno, e.strerror)
        except:
                print "Unexpected error:", sys.exc_info()[0]
                raise
Exemplo n.º 5
0
def save_s3(bucket,
            filename,
            contents,
            systemfile,
            content_type=None,
            acl='public-read',
            meta=None,
            encode=None):
    from boto.dynamodb2.table import Item
    key = Key(bucket, filename)
    print 'new s3 key:', 'http://s3.amazonaws.com/' + bucket.name + (
        key.name if key.name.startswith('/') else '/' + key.name)
    if isinstance(meta, Item):
        meta = meta._data
    if isinstance(meta, dict):
        trim_meta = fixed.check_entity_size(meta)
        trim_meta = dict([(k, value) for (k, value) in trim_meta.items()
                          if value is not None and value])
        trim_meta = json.loads(json.dumps(trim_meta, cls=fixed.SetEncoder))
        print 'meta key length:', len(trim_meta.keys())
        key.metadata = trim_meta
    if content_type is not None:
        print 'set content type:', content_type
        key.content_type = content_type
    elif systemfile and systemfile.endswith('js.map'):
        print 'js map!'
        key.content_type = 'application/json'
    elif systemfile:
        gt = mimetypes.guess_type(systemfile)
        key.set_metadata('Content-Type', gt[0])
    if encode is not None and encode == 'gzip':
        key.set_metadata('Content-Encoding', 'gzip')
        gzmem = StringIO.StringIO()
        gzip_file = gzip.GzipFile(fileobj=gzmem, mode='w')
        if contents is not None:
            gzip_file.write(contents)
        elif systemfile is not None:
            with open(systemfile, 'rb') as outfile:
                gzip_file.write(outfile.read())
        gzip_file.close()
        key.set_contents_from_string(gzmem.getvalue())
        print 'gzip!'
    elif contents is not None:
        print 'from string'
        key.set_contents_from_string(contents)
    elif systemfile is not None:
        io = StringIO.StringIO(open(systemfile, 'r').read()).getvalue()
        print 'from disk:', systemfile, 'io:', len(io)
        key.set_contents_from_string(io)
    if acl is not None:
        print 'save acl:', acl
        key.set_acl(acl)
    print 'save complete:', key.name
    return key
Exemplo n.º 6
0
def upload():
    current_app.logger.info('Upload Image')
    login_key = request.form.get('login_key', False)
    # app_token = request.form.get('app_token', False)
    app_user = request.form.get('u', False)

    if login_key:
        user = get_user_name(login_key)
    else:
        user = app_user

    if 'file' in request.files:
        file = request.files['file']
        if is_image(file.filename):
            object_id = Picture.create(user, {}, file.filename)
        elif is_attachment(file.filename):
            object_id = Attachment.create(user, {}, file.filename)

        filename = secure_filename(object_id + '-' + file.filename)
        c = connect_s3()
        bucket_name = get_config_value('BUCKET')
        bucket = c.create_bucket(bucket_name)
        k = Key(bucket)
        k.key = user + '/' + filename
        k.set_metadata('owner', user)
        extension = os.path.splitext(filename)[1]
        k.content_type = file.content_type
        current_app.logger.info('Extension: ' + str(extension))
        current_app.logger.info('file.content_type: ' + str(file.content_type))
        if extension.lower() == '.jpg':
            k.content_type = 'image/jpeg'
        if extension.lower() == '.png':
            k.content_type = 'image/png'
        if extension.lower() == '.gif':
            k.content_type = 'image/gif'
        current_app.logger.info('Extension: ' + str(extension))
        current_app.logger.info('file.content_type: ' + str(k.content_type))
        k.set_contents_from_string(file.read())
        k.make_public()
        url = 'http://%s/%s' % (bucket_name, k.key)
        current_app.logger.info(
            '########## url: ' + str(url) + ' ' + str(bucket)
        )
        if is_image(file.filename):
            Picture.add_url(object_id, url)
        elif is_attachment(file.filename):
            object_id = Attachment.add_url(object_id, file.filename)

        return jsonify({'upload': url})
    return jsonify({'upload': 'error'})
Exemplo n.º 7
0
def generate_thumbnail(file_id, content_type):
    bucketname = 's3.mediasnak.com'
    if content_type.startswith('image/'):
        botoconn = S3Connection(access_keys.key_id, access_keys.secret, is_secure=False)
        bucket = botoconn.create_bucket(bucketname)

        # Locate and download image to thumbnail, unless it's too big
        k = bucket.get_key('u/'+file_id)
        if k.size > 30000000:
            return
        data = k.get_contents_as_string()

        # Resize the image
        img = images.Image(image_data=data)
        img.resize(width=200, height=150)
        thumb = img.execute_transforms(output_encoding=images.JPEG)

        # Store the image on S3
        tk = Key(bucket, name='t/'+file_id)
        tk.content_type = 'image/jpeg'
        tk.set_contents_from_string(thumb)

        # No error-checking here as our calling function already did it for us
        file_entry = MediaFile.objects.get(file_id=file_id)
        file_entry.has_thumb = True
        file_entry.save()
Exemplo n.º 8
0
def upload_to_s3(file_obj, filename, path, content_type=None):
    """
    Method to upload a file to s3 and return a link.
    This method automatically tries to guess  of the file using filename if content_type is not passed.

    :param file_obj: File object (this must not be closed)
    :param path: Path where file is to be uploaded (this does not include filename)
    :param content_type: Content-type of the file.
    :return: AWS file url
    """
    # Connect to the bucket
    conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
                           settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)

    key_path = "{}/{}".format(path, filename)

    k = Key(bucket)
    k.key = key_path

    if content_type is None:
        # content_type passed; Guess the type
        content_type = mimetypes.guess_type(filename)[0]

    # Now set type only if is not None
    if content_type:
        k.content_type = content_type

    file_obj.seek(0)
    k.set_contents_from_string(file_obj.read())
    k.make_public()

    return "https://{}/{}".format(settings.AWS_S3_CUSTOM_DOMAIN, key_path)
    def post(self, *args, **kwargs):
        
        parse = reqparse.RequestParser()
        parse.add_argument('file', type=str, required=True)
        args = parse.parse_args()

        # create a file object of the image
        mimetype, file_data = parse_base64_string(args['file'])
        archive_file = StringIO(file_data)

        print 'yahan pahuncha 1'

        # upload the file to s3 with a unique uuid
        key_name = str(uuid.uuid4()) + '.zip'
        conn = S3Connection(app.config['S3_ACCESS_KEY'], app.config['S3_SECRET'])
        bucket = conn.get_bucket(app.config['S3_UPLOAD_SET_ARCHIVES_BUCKET'])
        archive_s3 = Key(bucket)
        archive_s3.key = key_name
        archive_s3.content_type = 'application/zip'
        archive_s3.set_contents_from_string(archive_file.getvalue())
        archive_s3.set_acl('public-read')

        print 'yahan pahuncha 2'

        # close the StringIO object
        archive_file.close()

        print 'yahan pahuncha 3'
        
        # return the name of the S3 key
        return {'archive_s3_key': archive_s3.key}
Exemplo n.º 10
0
    def upload(self, filename, contents_bytes, content_type=None):
        file_key = Key(bucket=self.bucket, name=filename)
        
#         if file_key.exists():
#             log.info("File already exists on S3: %s", filename)
#             return
                
        if content_type:
            file_key.content_type = content_type
        
        log.info("Setting content_type of %s as %s", filename, content_type)
        file_key.content_type = content_type
        
        log.info("Uploading %s", filename)
        file_key.set_contents_from_string(contents_bytes, replace=True)
        return True
Exemplo n.º 11
0
    def upload(self, path, content, invalidate=None):
        """
        Set the content, mime type and ACL for a key on S3. Before setting the
        check if the object is new or changed.

        Arguments:
            path: path for key
            content: content to set
            invalidate: CloudFront path to add to invalidation list. * will be
                        added to the end to make sure we invalidate the URL
                        path with a trailing slash and the html itself.
                        If None the path will be used.
        """
        changed = self.file_changed(path, content)

        if not changed:
            return

        key = Key(self.bucket)
        key.content_type = guess_mime_type(path)
        key.key = path
        key.set_contents_from_string(content)
        key.set_acl("public-read")

        print("uploaded: {0}".format(path))

        if invalidate is None:
            invalidate = path

        self.to_invalidate.append(invalidate + "*")
Exemplo n.º 12
0
def rxnorm_crawler():
    """
    Crawl RxNorm weekly updates from NIH's webpage and store them in S3 bucket.
    """
    # Target webpage
    weburls = [
        'https://www.nlm.nih.gov/research/umls/rxnorm/docs/rxnormfiles.html',
        'https://www.nlm.nih.gov/research/umls/rxnorm/docs/rxnormarchive.html'
    ]
    for weburl in weburls:
        # Get contents of webpage
        conn = urllib2.urlopen(weburl)
        html = conn.read()
        # Find urls of all RxNorm files
        pattern = '<a\s*href=[\'|"](.*?/kss/rxnorm/RxNorm_full_\d+.zip)[\'|"]>'
        rxnorm_urls = re.findall(pattern, html)
        for url in rxnorm_urls:
            r = requests.get(url)
            if r.status_code == 200:
                #upload the file
                file_name = re.findall('.*?(\d+.zip)', url)[0]
                k = Key(bucket)
                k.key = 'rxnorm/' + file_name
                k.content_type = r.headers['content-type']
                k.set_contents_from_string(r.content)
Exemplo n.º 13
0
def upload_to_s3(fp, key_name, extension, reduced_redundancy=False):
    """
		Upload the contents of file handle 'fp' to the S3 bucket specified by AWS_STORAGE_BUCKET_NAME,
		under the given filename. Return the public URL.
	"""

    # connect to S3 and send the file contents
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
    k = Key(bucket)
    k.key = key_name
    k.content_type = MIME_TYPE_BY_EXTENSION.get(extension,
                                                'application/octet-stream')
    # print "uploading: %s" % key_name
    k.set_contents_from_file(fp,
                             reduced_redundancy=reduced_redundancy,
                             rewind=True)
    k.set_acl('public-read')

    # construct the resulting URL, which depends on whether we're using a CNAME alias
    # on our bucket
    if settings.AWS_BOTO_CALLING_FORMAT == 'VHostCallingFormat':
        return "http://%s/%s" % (settings.AWS_STORAGE_BUCKET_NAME, key_name)
    else:
        return "http://%s.s3.amazonaws.com/%s" % (
            settings.AWS_STORAGE_BUCKET_NAME, key_name)
Exemplo n.º 14
0
    def uploadThumbToS3(self):
        #setup the bucket
        # c = boto.connect_s3(your_s3_key, your_s3_key_secret)

        bucket = 'imgcat-dev'
        c = boto.connect_s3()
        # b = c.get_bucket(bucket, validate=False)
        b = c.lookup(bucket)
        if (b == None):
            logger.error("Bucket not found: " + bucket)


        sys.stdout.flush()
        keyname = os.path.basename(os.path.normpath(self.localThumbFilename()))
        print 'gonna create key: ' , keyname
        k = Key(bucket=b, name=keyname)

        k.content_type = self.data['mimetype']
        k.set_contents_from_filename(self.localThumbFilename())
        k.set_acl('public-read')
        self.data['thumb_path'] = k.generate_url(expires_in=0, query_auth=False)

        logger.info("uploaded thumbnail:" + keyname + " to S3")
        sys.stdout.flush()
        self.data['load_status'] = 'thumbnailed'
        self.persist()
Exemplo n.º 15
0
def upload_sjson_to_s3(config, sjson_data):
    """
    Upload sjson data to s3.

    Arguments:
        config (dict): instance configuration
        sjson_data (list): transcript data to be uploaded to `s3`

    Returns:
        transcript name for 'edxval'
    """
    s3_conn = boto.connect_s3()
    bucket = s3_conn.get_bucket(config['aws_video_transcripts_bucket'])
    k = Key(bucket)
    k.content_type = 'application/json'

    transcript_name_without_instance_prefix, transcript_name_with_instance_prefix = construct_transcript_names(
        config)

    k.key = '{}.sjson'.format(transcript_name_with_instance_prefix)
    k.set_contents_from_string(json.dumps(sjson_data))
    k.set_acl('public-read')

    # transcript path is stored in edxval without `instance_prefix`
    return '{}.sjson'.format(transcript_name_without_instance_prefix)
Exemplo n.º 16
0
 def clean_photo(self):
     if self.instance.pk:
         if 'photo' not in self.files:
             return self.instance.photo
     if self.files['photo'].content_type not in [
             'image/gif', 'image/jpeg', 'image/jpg', 'image/png'
     ]:
         raise ValidationError('Invalid Photo')
     uuid = u'{prefix:s}.{suffix:s}'.format(
         prefix=str(uuid4()),
         suffix=self.files['photo'].content_type.split('/')[-1],
     )
     try:
         photo = Key(
             S3Connection(
                 settings.AWS_ACCESS_KEY_ID,
                 settings.AWS_SECRET_ACCESS_KEY,
             ).get_bucket(settings.AWS_BUCKET, ))
         photo.content_type = self.files['photo'].content_type
         photo.key = uuid
         photo.set_contents_from_string(self.files['photo'].read())
         return u'https://d2k6ktnea3auzx.cloudfront.net/{uuid:s}'.format(
             uuid=uuid)
     except Exception:
         pass
     raise ValidationError('Invalid Photo')
Exemplo n.º 17
0
 def save_file(self, remote_file, local_file_path, content_type=None):
     k = Key(self.bucket)
     k.name = self.target_path+'/'+remote_file
     if content_type:
          k.content_type = content_type
     k.set_contents_from_filename(local_file_path)
     k.set_acl('public-read')
def scrape_data_to_html():
    timestamp = datetime.fromtimestamp(time.time()).strftime("%H:%M:%S on %A, %d %B, %Y")
    all_drinkers = drinkers_table.scan()
    drinkers = []
    for drinker in all_drinkers:
        if (drinker['code'] == None):
            drinker['code'] = "UNKNOWN"
        if (drinker['name'] == None):
            drinker['name'] = "UNKNOWN"
        if (drinker['volume_consumed'] == None):
            drinker['volume_consumed'] = 0
        if (drinker['number_of_drinks'] == None):
            drinker['number_of_drinks'] = 0
        d = {}
        d['code'] = drinker['code']
        d['name'] = drinker['name']
        d['volume_consumed'] = drinker['volume_consumed']
        d['number_of_drinks'] = drinker['number_of_drinks']
        drinkers.append(d)
    loader = FileLoader('templates')
    template = loader.load_template('drinks.html.template')
    webpage = template.render(locals())
    bucket = s3.get_bucket('kegerator')
    key = Key(bucket)
    key.key = 'drinks.html'
    key.content_type = 'text/html'
    key.set_contents_from_string(webpage)
    key.make_public()
Exemplo n.º 19
0
    def _get_key(self):
        self._get_bucket()
        key = Key(self.bucket)
        key.name = self.filename
        key.content_type = 'application/json'

        self.key = key
Exemplo n.º 20
0
    def upload(self,
               data,
               content_type,
               make_public=True,
               bucket=app.config['S3_BUCKET_NAME']):
        bucket_name = bucket
        try:
            bucket = self.get_bucket(bucket_name)
        except S3ResponseError as e:
            if e.error_code == 'NoSuchBucket':
                try:
                    bucket = self.create_bucket(bucket_name)
                except S3CreateError as e:
                    raise e
            else:
                raise e

        s3key = Key(bucket)
        s3key.key = str(uuid.uuid4().get_hex().upper()
                        [0:16]) + '.' + content_type.split('/')[-1]
        s3key.content_type = content_type
        s3key.set_contents_from_string(data)
        if make_public:
            s3key.set_acl('public-read')
        url = 'https://s3.amazonaws.com/' + bucket_name + '/' + s3key.key
        return url
Exemplo n.º 21
0
Arquivo: s3.py Projeto: samuel/gypsy
    def _save(self, name, content, mimetype=None, public=None):
        if not mimetype:
            mimetype = guess_type(name)[0] or "application/x-binary"

        if hasattr(content, 'public'):
            public = content.public
        else:
            public = self.public if public is None else public

        key = Key(self.bucket, name.encode('utf-8'))
        key.content_type = mimetype
        headers = getattr(content, 'backend_headers', {})

        success = False

        for i in range(3):
            content.seek(0)
            retry(key.set_contents_from_file, content, headers=headers)

            if public:
                try:
                    retry(key.set_acl, 'public-read')
                except S3ResponseError, exc:
                    if exc.status == 404:
                        continue
                    raise

            # Make sure file actually exists on S3
            # TODO: Maybe only do this if not settings the acl as that should have the same effect?
            success = bool(retry(self.bucket.lookup, name.encode('utf-8')))
            if success:
                break

            logging.warning(u"Failed to write to S3: %s" % name)
Exemplo n.º 22
0
    def write_to_s3(self, prefix, fname, content, ctype='text/plain'):
        """
        Write ``content`` into S3 at ``prefix``/``fname``. If ``self.dry_run``,
        write to local disk instead. Return the resulting URL, either an S3
        URL or a local 'file://' URL.

        :param prefix: the prefix to write S3 files under, or local files under
        :type prefix: str
        :param fname: the file name to create
        :type fname: str
        :param content: the content to write into the file
        :type content: str
        :returns: URL to the created file
        :rtype: str
        """
        path = os.path.join(prefix, fname)
        if self.dry_run:
            path = os.path.abspath(path)
            logger.warning("DRY RUN: Writing s3-bound content to %s", path)
            dest_dir = os.path.dirname(path)
            if not os.path.exists(dest_dir):
                os.makedirs(dest_dir)
            with open(path, 'w') as fh:
                fh.write(content)
            return 'file://%s' % path
        # else write to S3
        logger.debug("Creating S3 key: %s (Content-Type: %s)", path, ctype)
        k = Key(self.bucket)
        k.content_type = ctype
        k.key = path
        k.set_contents_from_string(content)
        url = self.url_for_s3(path)
        logger.debug("Data written to %s", url)
        return url
Exemplo n.º 23
0
 def bucket_create(self, key, val, metadata_dict = {}):
     '''
     Create an object in the bucket, but only if not yet present (save traffic).
     
     Parameters
     ---------
     key : str
     val : file-like object 
     metadata_dict : dict
     
     Returns
     -------
     Key
     '''
     s3_key = Key(self.apk_bucket)
     
     s3_key.key = key
     # important: set metadata before actual upload
     s3_key.metadata = metadata_dict
     s3_key.content_type = 'application/vnd.android.package-archive'
     # upload
     log.debug("uploading %s", s3_key.key)
     s3_key.set_contents_from_file(val, replace = False)
     
     return s3_key
Exemplo n.º 24
0
    def bucket_create(self, key, val, metadata_dict={}):
        '''
        Create an object in the bucket, but only if not yet present (save traffic).
        
        Parameters
        ---------
        key : str
        val : file-like object 
        metadata_dict : dict
        
        Returns
        -------
        Key
        '''
        s3_key = Key(self.apk_bucket)

        s3_key.key = key
        # important: set metadata before actual upload
        s3_key.metadata = metadata_dict
        s3_key.content_type = 'application/vnd.android.package-archive'
        # upload
        log.debug("uploading %s", s3_key.key)
        s3_key.set_contents_from_file(val, replace=False)

        return s3_key
Exemplo n.º 25
0
def acquire_run_id(comment='', force=False):

    key = _get_key(RUN_INFO_FILE)
    if key:
        run_info = json.loads(key.get_contents_as_string())
    else:
        key = Key(_get_bucket(), RUN_INFO_FILE)
        key.content_type = 'text/plain'
        run_info = {
            'last_run': 0,
            'active_run': None,
        }

    if run_info['active_run'] is not None:
        print 'Already an active run:', run_info['active_run']
        if not force:
            raise Exception('Already an active run')

    run_info['active_run'] = '%s on %s started at %s %s' % (
        os.environ.get('USER', 'unknown'), socket.gethostname(),
        str(datetime.datetime.utcnow()), comment)

    key.set_contents_from_string(json.dumps(run_info))

    return run_info['last_run'] + 1
Exemplo n.º 26
0
    def upload(self, path, content, invalidate=None):
        """
        Set the content, mime type and ACL for a key on S3. Before setting the
        check if the object is new or changed.

        Arguments:
            path: path for key
            content: content to set
            invalidate: CloudFront path to add to invalidation list. * will be
                        added to the end to make sure we invalidate the URL
                        path with a trailing slash and the html itself.
                        If None the path will be used.
        """
        changed = self.file_changed(path, content)

        if not changed:
            return

        key = Key(self.bucket)
        key.content_type = guess_mime_type(path)
        key.key = path
        key.set_contents_from_string(content)
        key.set_acl("public-read")

        print("uploaded: {0}".format(path))

        if invalidate is None:
            invalidate = path

        self.to_invalidate.append(invalidate + "*")
Exemplo n.º 27
0
        def event_from_facebook(args, data):
            event = g.Event(
                club=args['club'],
                facebook_id=data['id'],
                name=data['name'],
                start_date=arrow.get(data['start_time']).datetime,
            )

            if 'description' in data.keys():
                event.description = data['description'].replace('\n', '<br />')

            if 'end_time' in data.keys():
                event.end_date = arrow.get(data['end_time']).datetime
            else:
                event.end_date = arrow.get(data['start_time']).replace(hours=2).datetime

            if 'place' in data.keys():
                event.place = Place(
                    name=data['place']['name']
                )
                if 'location' in data['place'].keys():
                    event.address = data['place']['location']['street'] + ', ' + data['place']['location']['city'] \
                                  + ', ' + data['place']['location']['country']

            event.save()
            bucket = s3conn.get_bucket(current_app.config['AWS_S3_BUCKET'])
            key = Key(bucket)
            key.key = g.tenant + '/events/' + str(event.id)
            key.content_type = 'image/jpeg'
            key.set_contents_from_string(requests.get(data['cover']['source']).content)
            key.make_public()
            event.poster = 'https://' + current_app.config['AWS_S3_BUCKET'] + '.s3.amazonaws.com/' + g.tenant + '/events/' + str(event.id)
            event.save()
Exemplo n.º 28
0
def upload_from_server(data_file,
                       upload_file_name,
                       bucket_name=None,
                       public=False,
                       content_type=None):
    """

    @param data_file: the file that you want to upload
    @param upload_file_name: the file path where to upload, eg: upload_folder/file_name.txt, or file_name.jpg
    @param public: visibility of file on S3

    @return: the url of the uploaded file
    """

    if data_file is None:
        raise OwnException(NO_FILE_SPECIFIED)

    conn = boto.connect_s3()
    # conn = boto.s3.connect_to_region(region_name='the_region')
    # conn = S3Connection('aws_key', 'aws_secret')
    bucket_name = __get_bucket_name(bucket_name)
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    k.key = upload_file_name
    policy = 'public-read' if public else 'private'
    k.content_type = content_type
    k.set_contents_from_file(data_file, policy=policy)
    # k.set_contents_from_string(data_file, policy=policy)

    url = k.generate_url(expires_in=0, query_auth=False)

    return url
Exemplo n.º 29
0
def acquire_run_id(comment='', force=False):
    
    key = _get_key(RUN_INFO_FILE)
    if key:
        run_info = json.loads(key.get_contents_as_string())
    else:
        key = Key(_get_bucket(), RUN_INFO_FILE)
        key.content_type = 'text/plain'
        run_info = {
            'last_run': 0, 
            'active_run': None,
            }

    if run_info['active_run'] is not None:
        print 'Already an active run:', run_info['active_run']
        if not force:
            raise Exception('Already an active run')

    run_info['active_run'] = '%s on %s started at %s %s' % (
        os.environ.get('USER','unknown'),
        socket.gethostname(),
        str(datetime.datetime.utcnow()),
        comment)
    
    key.set_contents_from_string(json.dumps(run_info))
        
    return run_info['last_run'] + 1
Exemplo n.º 30
0
def upload_images_to_s3(markup, archive, question_files_bucket):
    """Uploads all the images referenced in the markup to S3. Exracts the
    images from the '***_files' directory in the zip archive.

    Keyword Arguments:
    markup -- the string markup whose images need to be uploaded to s3.
    archive -- the archive object as returned by zipfile.
    """

    ## Create a BS object from the markup
    soup = BeautifulSoup(markup, 'html.parser')

    ## Find all the image objects
    imgs = soup.find_all('img')

    ## Iterate over all the images, get the file path, upload the file to S3 and change the attribute to point to the S3 hosted image
    bucket = question_files_bucket
    for img in imgs:
        path = img.attrs['src']
        img_file = archive.open(path)
        img_s3 = Key(bucket)
        img_s3.key = ''.join([str(uuid4()), '_', os.path.basename(path)])
        img_s3.content_type = mimetypes.guess_type(path)[0]
        img_s3.set_contents_from_string(img_file.read())
        img_s3.set_acl('public-read')
        img_url = ''.join(['https://', app.config['S3_QUESTION_FILES_TEMP_BUCKET'], '.s3.amazonaws.com/', img_s3.key])
        img.attrs['src'] = img_url

    return str(soup)
Exemplo n.º 31
0
def s3_upload(acl, bucket, conn, content, content_type, path):
    """ Store an object in our an S3 bucket.

    :param acl:
        S3 ACL for the object
    :param bucket:
        S3 bucket to upload to
    :param content:
        a string representation of the object to upload
    :param content_type:
        a string MIMETYPE of the object that S3 should
        be informed of
    :param path:
        an object specific portion of the S3 key name
        to be passed to gen_url to generate the the location
        in S3 of the new object

    :raise:
        IOError on any failure
    :return:
        S3 generated URL of the uploaded object
    """

    # obj is the object that will be uploaded
    obj = Key(conn.get_bucket(bucket))
    obj.content_type = content_type
    obj.key = path

    obj.set_contents_from_string(content)
    obj.set_acl(acl)

    return gen_url(bucket, path)
Exemplo n.º 32
0
def get_badges():
    initial_data = requests.get('{}.json'.format(CFGOV_BASE)).json()
    BADGES['overall'] = initial_data['badge_url']
    commit_sha = initial_data['commit_sha']
    build_soup = bs(
        requests.get('{}/builds/{}'.format(COVERALLS, commit_sha)).text,
        'html.parser')
    results_div = build_soup.find(
        True, {'class': ['show-item', 'show-last-build-detail']})
    rows = results_div.findAll('tr')[1:]
    for row in rows:
        coverage = row.find('div').find('div').text
        if 'frontend' in row.find('a').text:
            BADGES['js'] = BADGE_URL.format(int(round(float(coverage), 2)))
        elif 'backend' in row.find('a').text:
            BADGES['python'] = BADGE_URL.format(int(round(float(coverage), 2)))
    if not (BADGES['js'] and BADGES['python']):
        print('No update -- only one set of tests were recorded')
        return
    with open('badges.html', 'w') as f:
        content = TEMPLATE.format(BADGES['overall'], BADGES['js'],
                                  BADGES['python']).encode('utf-8')
        f.write(content)
    with open('badges.json', 'w') as f:
        f.write(json.dumps(BADGES))
    s3 = boto.connect_s3(S3_KEY,
                         S3_SECRET,
                         calling_format=OrdinaryCallingFormat())
    bucket = s3.get_bucket(S3_BASE)
    prep = Key(bucket=bucket, name='build/badges/badges.html')
    prep.content_type = 'text/plain'
    prep.set_contents_from_filename('badges.html')
    print('badges updated {}'.format(datetime.datetime.now()))
    return
Exemplo n.º 33
0
    def store_file(self, **kwargs):
        content = kwargs['content']
        k = Key(self.bucket)
        filename = None
        if 'name' in kwargs:
            filename, extension = os.path.splitext(kwargs['name'])
            filename = filename.replace(" ", "")
            extension = extension.replace('.', '')
        if 'ext' in kwargs:
            extension = kwargs['ext']
        typ, ext = settings.ALLOWED_FILE_TYPES[extension]
        if "type" in kwargs:
            typ = kwargs['type']
        if 'random_name' in kwargs:
            filename = None
        k.key = "%s.%s" % (filename or uuid4().hex, ext)
        k.content_type = typ
        try:
            content = base64.decodestring(content.split('base64,')[1])
        except IndexError:
            content = base64.decodestring(content)

        k.set_contents_from_string(content)
        self.bucket.set_acl('public-read', k.key)
        return k.key
Exemplo n.º 34
0
def upload_image(image_url, image_name):
    """аплоад изображения"""
    try:
        # соединение с S3 bucket
        connection = boto.connect_s3()
        bucket = connection.get_bucket(config.AWS_STORAGE_BUCKET_NAME)
        key = Key(bucket)

        # присвоение имени файла
        key.key = str(int(time())) + "-" + image_name + ".png"

        # чтение
        file_object = urllib2.urlopen(image_url)
        file_data = StringIO.StringIO(file_object.read())

        # запись
        key.content_type = "image/png"
        key.set_contents_from_file(file_data)

        # права на чтение
        key.make_public()

        result_url = key.generate_url(0, expires_in_absolute=True, force_http=True, query_auth=False)
        return result_url

    except Exception, e:
        return e
Exemplo n.º 35
0
    def post(self, news_id):
        news = g.News.objects.with_id(news_id)

        if not current_user.is_admin() and not current_user.has_any_permission('club', news.club.id, ['admin', 'news']):
            return abort(401)

        parser = reqparse.RequestParser()
        parser.add_argument('media', type=werkzeug.datastructures.FileStorage, location='files')
        args = parser.parse_args()

        uid = str(uuid4())

        bucket = s3conn.get_bucket(current_app.config['AWS_S3_BUCKET'])
        key = Key(bucket)
        key.key = g.tenant + '/news/' + str(news.id) + '/' + uid
        key.content_type = args['media'].mimetype
        key.set_contents_from_file(args['media'].stream)
        key.make_public()

        news.update(add_to_set__medias=Media(
            name=uid,
            url='https://' + current_app.config['AWS_S3_BUCKET'] + '.s3.amazonaws.com/' + g.tenant + '/news/' + str(news.id) + '/' + uid
        ))

        return g.News.objects.with_id(news_id)
Exemplo n.º 36
0
def upload():
    if request.method == 'POST':
        # Connect to Amazon S3
        s3 = boto.connect_s3(access_id, secret_access_key)

        # Get a handle to the S3 bucket
        bucket_name = 'emotipic'
        bucket = s3.get_bucket(bucket_name)
        k = Key(bucket)

        data_file = request.files.get('file')

        file_contents = data_file.read()
        k.key = os.path.join('uploads', data_file.filename)
        k.content_type = data_file.content_type

        print "Uploading some data to " + bucket_name + " with key: " + k.key
        k.set_contents_from_string(file_contents)

        # call api
        url = k.generate_url(expires_in=300, query_auth=False, force_http=True)

        face_headers = {
            'Content-Type': 'application/json',
            'Ocp-Apim-Subscription-Key': '3014f0d696a144d4bd875661e36057c3'
        }

        emotion_headers = {
            'Content-Type': 'application/json',
            'Ocp-Apim-Subscription-Key': 'b94b9a266d7546ef91e64be1380960c9'
        } 

        data = {
                'url': url 
        }

        print url

        data = json.dumps(data)

        faces = requests.post('https://api.projectoxford.ai/face/v0/detections?analyzesFaceLandmarks=true', data=data, headers=face_headers)

        faceRectangles = ''
        faces = faces.json()

        # parse faces
        for face in faces:
            faceRectangle = face['faceRectangle']
            faceRectangles += ';%s,%s,%s,%s' % (faceRectangle['left'], faceRectangle['top'], faceRectangle['width'], faceRectangle['height'])

        faceRectangles = faceRectangles[1:]

        emotions = requests.post('https://api.projectoxford.ai/emotion/v1.0/recognize?faceRectangles=' + faceRectangles, data=data, headers=emotion_headers)
        emotions = emotions.json()

        return json.dumps({
            'faces': faces,
            'emotions': emotions
        })
Exemplo n.º 37
0
 def _put_S3(self, msg):
     key = '/'.join([self.application, uuid.uuid4().hex])
     key_object = Key(self.bucket_conn)
     key_object.key = key
     key_object.content_type = 'application/json'
     key_object.set_contents_from_string(msg)
     key_object.close()
     return json.dumps({'Bucket': self.bucket_name, 'Key': key})
Exemplo n.º 38
0
    def set_resource_from_string(self, resource, data, content_type=None):
        bucket, s3_key = self.s3_storage_objects(resource)
        key = Key(bucket)
        key.key = s3_key
        if content_type != None:
            key.content_type = content_type

        key.set_contents_from_string(data)
Exemplo n.º 39
0
    def set_resource_from_string(self, resource, data, content_type=None):
        bucket, s3_key = self.s3_storage_objects(resource)
        key = Key(bucket)
        key.key = s3_key
        if content_type != None:
            key.content_type = content_type

        key.set_contents_from_string(data)
Exemplo n.º 40
0
def test_s3_content_type():
    conn = boto.connect_s3()
    bucket = conn.create_bucket('source-bucket')
    key = Key(bucket, 'test-file')
    key.content_type = 'text/plain'
    key.set_contents_from_string('Hello World')

    assert 'text/plain; charset=utf-8' == S3.get_content_type('source-bucket', 'test-file')
Exemplo n.º 41
0
def write(filename, content):
    conn = S3Connection(aws_access_key, aws_secret_key)
    pb = conn.get_bucket(bucket_name)
    k = Key(pb)
    k.name = prefix + filename
    k.content_type = "text/javascript"
    k.set_contents_from_string(content, headers={"Cache-Control": "max-age=0"})
    k.set_acl("public-read")
Exemplo n.º 42
0
def s3bucket(ec2, env, source):
	""" Copy contents of static directory to s3 bucket """
	mime_types = {
		"eot" : "application/vnd.ms-fontobject",
		"ttf" : "font/truetype",
		"otf" : "font/opentype",
		"woff": "font/woff",
	}
	s3b = boto.connect_s3(ec2.access_key,ec2.secret_key)
	for machine in env:
		if 's3bucket' in machine.keys():
			print 'Copying static media for %s' % machine['name']
			s3bucket = machine['s3bucket']

			# Get the expires
			time_format = '%a, %d %b %Y %H:%M:%S'
			now = datetime.datetime.now().strftime(time_format)
			expires = s3bucket.get('expires',datetime.datetime.utcnow().strftime(time_format))
			try:
				datetime.datetime.strptime(expires,time_format)
			except:
				error('Improperly formatted datetime: %s' % expires)

			# Get or create bucket using the name
			name    = s3bucket.get('name','s3%s'%machine['name'])
			try: b = s3b.get_bucket(name)
			except: b = s3b.create_bucket(name)
			
			# Set ACL Public for all items in the bucket
			b.set_acl('public-read')

			k = Key(b)
			static_dir = os.path.join(source,'project','static')
			for root, dirs, files in os.walk(static_dir):
				if '.svn' in dirs: dirs.remove('.svn')
				key_root = root.split('static')[1]

				for file in files:
					filename = os.path.join(root,file)

					# Set the headers
					headers = {'Expires':expires}
					if '.gz' in file:
						headers.update({'Content-Encoding':'gzip'})

					if os.path.isfile(filename):
						# Set the mime-type
						ext = file.split('.')[-1]
						if ext in mime_types.keys():
							k.content_type = mime_types[ext]

						# Send the file
						k.key = os.path.join(key_root,file)
						print '\nTransfering %s' % filename
						k.set_contents_from_filename(filename, headers=headers, cb=s3_percent_cb, num_cb=10)
			print '\nTransfer complete'

	invalidate_cache(ec2, env, source)
Exemplo n.º 43
0
def postToS3(image, originalImage, topCaption, bottomCaption):
    #print "POST!"
    conn = S3Connection('AKIAJQ5EWIERMNJBOSMQ', 'CxMK3wAZ97lItqozZkNp8AmVoj58/TdrH+ClXNaB')
    
    imgHash = hashlib.md5()
    imgHash.update(image)
    imgHashStr = imgHash.hexdigest()
 
    origImgHash = hashlib.md5()
    origImgHash.update(originalImage)
    origImgHashStr = origImgHash.hexdigest()

    bucket = conn.create_bucket('memeyourfriends')
    k = Key(bucket)
    key = 'complete/' + imgHashStr  + '.jpg'
    k.key = key
    k.content_type = 'image/jpeg'
    k.set_metadata('top', topCaption)
    k.set_metadata('bot', bottomCaption)
    k.set_contents_from_string(image) 
    
    k2 = Key(bucket)
    key2 = 'raw/' + origImgHashStr + '.jpg'
    k2.key = key2
    k2.content_type = 'image/jpeg'
    k2.set_contents_from_string(originalImage)
    

    

    sdb = boto.connect_sdb('AKIAJQ5EWIERMNJBOSMQ', 'CxMK3wAZ97lItqozZkNp8AmVoj58/TdrH+ClXNaB')
    
    domain = sdb.create_domain('memeyourfriends')
    item = domain.new_item(imgHashStr)
    item['imgHash'] = imgHashStr
    item['imgHashPath'] = key
    item['origImgHash'] = origImgHashStr
    item['origImgHashPath'] = key2
    item['topCaption'] = topCaption
    item['bottomCaption'] = bottomCaption
    
    item.save()


    return key
Exemplo n.º 44
0
def test_s3_object_metadata():
    conn = boto.connect_s3()
    bucket = conn.create_bucket('source-bucket')
    key = Key(bucket, 'test-file')
    key.content_type = 'text/plain'
    key.set_contents_from_string('Hello World')

    s3 = S3()
    assert {'Content-Type': 'text/plain; charset=utf-8'} == s3.metadata('source-bucket', 'test-file')
Exemplo n.º 45
0
 def upload(self, filename, bucket, key_name, metadata=None):
     bucket = Bucket(self.conn, bucket)
     key = Key(bucket, key_name)
     key.content_type = "application/x-gzip"
     if metadata:
         for k, v in metadata.items():
             key.set_metadata(k, str(v))
     with open(filename, "rb") as fp:
         self.retry(key.set_contents_from_file, fp)
Exemplo n.º 46
0
def upload_view():
  f = request.files['file']
  conn = boto.connect_s3()
  bucket = conn.get_bucket('ym-remote-control')
  k = Key(bucket)
  k.key = str(uuid.uuid4()) + '.' + f.filename.split('.')[-1]
  k.content_type = mimetypes.guess_type(f.filename)[0]
  k.set_contents_from_file(f, headers={'Content-Type': k.content_type}, policy='public-read')
  return jsonify({'url': k.generate_url(300)})
Exemplo n.º 47
0
 def _save(self, bucket_name, end_point, File, mimetype=None):
     k = Key(self.connection.get_bucket(bucket_name))
     k.key = end_point
     if mimetype:
         k.content_type = mimetype
     if isinstance(File, str) or isinstance(File, unicode):
         k.set_contents_from_string(File)
     else:
         k.set_contents_from_file(File)
Exemplo n.º 48
0
 def upload(self, image_in_base64, file_name_with_extension,
            file_extension):
     k = Key(self.bucket)
     k.key = file_name_with_extension
     k.content_type = 'image/' + file_extension
     k.set_contents_from_string(base64.b64decode(image_in_base64),
                                policy='public-read')
     k.set_acl('public-read')
     return self.get_url(file_name_with_extension)
Exemplo n.º 49
0
 def call(self, phoneTo, tweet, voice):
     xml = xmlTemplate % (voice, tweet['user']['name'], tweet['text'])
     print(xml)
     k = Key(self.bucket)
     k.key = 'tweet_%s.xml' % tweet['id']
     k.content_type = 'text/xml'
     k.set_contents_from_string(xml)
     k.set_acl('public-read')
     self.client.calls.create(to=phoneTo, from_=phoneFrom, method='GET',
             url='https://s3.amazonaws.com/twinty/tweet_%s.xml' % tweet['id'])
Exemplo n.º 50
0
 def upload_index(self, content):
     """upload the index.html"""
     key_path = os.path.join(self.prefix, 'index.html')
     start = datetime.datetime.now()
     logger.info("Uploading index.html")
     k = Key(self.bucket)
     k.key = key_path
     k.content_type = 'text/html'
     k.set_contents_from_string(content)
     logger.info("Upload complete in %s", (datetime.datetime.now() - start))
Exemplo n.º 51
0
def save_to_s3():
    conn = boto.connect_s3(settings.AWSAccessKeyId, settings.AWSSecretKey)
    bucket = conn.get_bucket('tt-test-321', validate=True)
    k = Key(bucket)
    k.key = "what.txt"
    k.content_type = 'text/plain'
    with open( 'what.txt', 'r') as f:
        content = f.read()
    k.set_contents_from_string(content)
    k.set_acl('public-read')
Exemplo n.º 52
0
def createBlob(category, png_chunk):
    objectId = category + '/' + str(uuid.uuid4()) + '.png'

    k = Key(bucket)
    k.key = objectId
    k.content_type = 'image/png'
    k.set_contents_from_string(png_chunk)

    url = 'https://cocparser.s3.amazonaws.com/' + objectId
    return url
Exemplo n.º 53
0
def upload_photo(username, photo):
    data = base64.b64decode(photo)
    s3 = boto.connect_s3()
    bucket = s3.get_bucket(BUCKET_NAME)
    key = Key(bucket)
    key.content_type = 'image/jpg'
    key.key = 'photos/%s/%s.jpg' % (username, random_hex())
    key.set_contents_from_string(data)
    key.close()
    key.make_public()
    return AWS_URL_TEMPLATE % key.key
Exemplo n.º 54
0
def push_file(src_path, s3_path, verbose=False, overwrite=False):
    key = _get_key(s3_path)
    if key is not None:
        if not overwrite:
            raise Exception('File already at %s' % s3_path)
        if verbose:
            print 'Overwriting existing %s.' % s3_path

    key = Key(_get_bucket(), s3_path)
    if s3_path.endswith('.TIF') or s3_path.endswith('.tif'):
        key.content_type = 'image/tiff'
    if s3_path.endswith('.jpg'):
        key.content_type = 'image/jpeg'
    if s3_path.endswith('.txt'):
        key.content_type = 'text/plain'

    bytes_uploaded = key.set_contents_from_filename(src_path)
    if verbose:
        print 'Uploaded %d bytes from %s to %s.' % (bytes_uploaded, src_path,
                                                    s3_path)
Exemplo n.º 55
0
 def flush(self):
     payload = ''
     while len(self.buffer) > 0:
         record = self.buffer.pop(0)
         if record.levelno >= self.level:
             payload += self.format(record)
     if payload:
         conn = boto.connect_s3()
         bucket = conn.get_bucket(self.bucket)
         key = Key(bucket)
         key.key = '/'.join((self.prefix, 'bang-%f' % time.time()))
         key.content_type = 'application/json'
         key.set_contents_from_string(payload)