Esempio n. 1
0
    def handle(self, *args, **options):
        context = Context({
            'object_list': Registration.objects.all(),
        })
        loader = Loader()
        template, _ = loader.load_template(
            'willard/registration_list_table.html')
        filedata = template.render(context)

        bucket_name = 'assets.sunlightfoundation.com'
        connection = S3Connection(settings.MEDIASYNC.get('AWS_KEY'),
                                  settings.MEDIASYNC.get('AWS_SECRET'))

        headers = {
            "x-amz-acl": "public-read",
            "Content-Type": 'text/csv',
        }

        # calculate md5 digest of filedata
        checksum = hashlib.md5(filedata)
        hexdigest = checksum.hexdigest()
        b64digest = base64.b64encode(checksum.digest())

        bucket = connection.get_bucket(bucket_name)
        key = Key(bucket)
        key.key = '/reporting/uploads/%s' % 'lobbyist_registrations.html'
        key.set_contents_from_string(filedata,
                                     headers=headers,
                                     md5=(hexdigest, b64digest))

        print key.generate_url(60 * 60 * 24 * 8).split('?')[0].replace(
            'https', 'http').replace('//', '/')
Esempio n. 2
0
def uploadFile(filename, files):
    k = Key(bucket)
    k.key = filename
    k.set_contents_from_string(files.read())
    k.set_acl('public-read')
    k.generate_url(3600 * 24 * 7)
    return 'https://s3-ap-northeast-1.amazonaws.com/gangnam-proto-image/' + filename
Esempio n. 3
0
    def run(self):
	try: 
		dest_host = bucket.get_website_endpoint()
		u = urlparse(self.url)
		keyname = u.path
		h = httplib2.Http()
		resp_origin,c_origin = h.request(u.geturl(),'HEAD')
		resp_dest,c_dest = h.request('http://%s%s' % (dest_host,u.path),'HEAD')
		if resp_origin['status'] != resp_dest['status'] :
			if int(resp_origin['content-length']) > size_limit:
				# big file, save to disk
				logger('%s is larger then limit: %s, saving to disk\n' % (u.geturl(),resp_origin['content-length']))
				save_path= '/tmp/' + os.path.basename(u.path) 
				urlretrieve(u.geturl(),save_path)
				k  = Key(bucket)
				k.set_metadata("Content-Type",resp_origin['content-type'])
				k.name = prefix + keyname         
				k.set_contents_from_file(open(save_path))
				k.set_acl('public-read')
				os.remove(save_path)
				logger('%s syncronized\n' % k.generate_url(0,query_auth=False,force_http=True))
			else:
				resp, content = h.request(self.url)
				k  = Key(bucket)
				k.set_metadata("Content-Type",resp_origin['content-type'])
				k.name = prefix + keyname         
				k.set_contents_from_string(content)
				k.set_acl('public-read')
				logger('%s syncronized\n' % k.generate_url(0,query_auth=False,force_http=True))
		else:
			logger('http://%s%s in sync\n' % (dest_host,u.path))
			
	except Exception,e:	
		logger('could not copy url %s - %s\n' % (self.url,e))
 def handle(self, *args, **options):
     file_name = args[0]
     connection = default_storage.connection
     bucket = connection.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
     key = Key(bucket)
     key.key = file_name
     key.set_contents_from_filename(file_name)
     print key.generate_url(60 * 60)
    def upload_acc_fit_data(self):

        S3_ID = settings.S3_ID
        S3_KEY = settings.S3_KEY
        connection = S3Connection(S3_ID, S3_KEY)
        AWS_STORAGE_BUCKET_NAME = 'gladminds'
        bucket = connection.get_bucket(AWS_STORAGE_BUCKET_NAME)
        k1=Key(bucket)
        k1.key = "acc_data.csv"
        
        fp = open('acc_data.csv','w')
        
        query1 = "SELECT 'Mechanic ID','Permanent ID','First Name', 'District', 'Phone Number', 'State Name','Distributer ID', 'Unique Part Code', 'Points','Date of SMSed' UNION ALL SELECT  mem.mechanic_id, mem.permanent_id, mem.first_name, mem.district, mem.phone_number, st.state_name, distr.distributor_id, spart.unique_part_code, pp.points, acre.created_date FROM gm_accumulationrequest AS acre LEFT OUTER JOIN gm_member mem ON mem.id = acre.member_id LEFT OUTER JOIN gm_distributor AS distr ON mem.registered_by_distributor_id = distr.id LEFT OUTER JOIN gm_state AS st ON mem.state_id = st.id LEFT OUTER JOIN gm_accumulationrequest_upcs AS accup ON acre.transaction_id = accup.accumulationrequest_id LEFT OUTER JOIN gm_sparepartupc AS spart ON accup.sparepartupc_id = spart.id LEFT OUTER JOIN gm_sparepartmasterdata AS mdata ON mdata.id = spart.part_number_id LEFT OUTER JOIN gm_sparepartpoint AS pp ON mdata.id = pp.part_number_id WHERE mem.form_status =  'complete' GROUP BY acre.transaction_id ";
        
        query2 = "SELECT 'Mechanic ID','Permanent ID','First Name', 'District', 'Phone Number', 'State Name','Distributer ID', 'Unique Part Code',  'Part Number', 'Description','Points','Date of SMSed' UNION ALL SELECT  mem.mechanic_id, mem.permanent_id, mem.first_name, mem.district, mem.phone_number, st.state_name, distr.distributor_id, spart.unique_part_code,mdata.part_number, mdata.description, pp.points, acre.created_date FROM gm_accumulationrequest AS acre LEFT OUTER JOIN gm_member mem ON mem.id = acre.member_id LEFT OUTER JOIN gm_distributor AS distr ON mem.registered_by_distributor_id = distr.id LEFT OUTER JOIN gm_state AS st ON mem.state_id = st.id LEFT OUTER JOIN gm_accumulationrequest_upcs AS accup ON acre.transaction_id = accup.accumulationrequest_id LEFT OUTER JOIN gm_sparepartupc AS spart ON accup.sparepartupc_id = spart.id LEFT OUTER JOIN gm_sparepartmasterdata AS mdata ON mdata.id = spart.part_number_id LEFT OUTER JOIN gm_sparepartpoint AS pp ON mdata.id = pp.part_number_id WHERE mem.form_status =  'complete' GROUP BY acre.transaction_id ";
        
        cursor.execute(query1)
        rows1 = cursor.fetchall()
        myFile = csv.writer(fp)
        
        for r in rows1:
                myFile.writerow(r)
        
        fp.close()
        
        s3_key = Key(bucket)
        
        s3_key.key = 'acc_data.csv'
        s3_key.set_contents_from_filename('acc_data.csv')
        
        s3_key.set_acl('public-read')
        path = s3_key.generate_url(expires_in=0, query_auth=False)
        
        k2= Key(bucket)
        k2.key = "fitment_data.csv"
        
        fp1 = open('fitment_data.csv','w')
        
        cursor.execute(query2)
        rows2 = cursor.fetchall()
        myFile2 = csv.writer(fp1)
        
        for r1 in rows2:
                myFile2.writerow(r1)
        
        fp1.close()
        
        s3_key = Key(bucket)
        
        s3_key.key = 'fitment_data.csv'
        s3_key.set_contents_from_filename('fitment_data.csv')
        s3_key.set_acl('public-read')
        path = s3_key.generate_url(expires_in=0, query_auth=False)
        db.close()
Esempio n. 6
0
 def generate_and_store_encryption_key(self):
     bucket = self.__s3.create_bucket(self.__bucket_unique_id)
     bucket.set_policy(self.__get_bucket_policy)
     from boto.s3.key import Key
     key_object = Key(bucket)
     key_object.key = "key"
     encryption_key = self.__generate_encryption_key()
     key_object.set_contents_from_string(encryption_key, {"Referer": self.__get_referer_unique_id()}, True)
     expires_in_seconds = 1800
     key_object.generate_url(expires_in_seconds)
     Logger.log("info", "Encryption key uploaded to S3 bucket named {}".format(self.__bucket_unique_id))
def upload_to_s3(file_path, public=False):
    print '------public-------', public
    if not zipfile.is_zipfile(file_path):
        try:
            import zlib
            compression = zipfile.ZIP_DEFLATED
        except:
            compression = zipfile.ZIP_STORED
        zip_file_path = '.'.join(file_path.split('.')[:-1]) + '.zip'
        file_name = file_path.split('/')[-1]
        zf = zipfile.ZipFile(zip_file_path, mode='w')
        try:
            zf.write(file_path, file_name, compress_type=compression)
        finally:
            zf.close()
        try:
            os.remove(file_path)
        except:
            pass
    else:
        zip_file_path = file_path

    try:
        # os.environ['S3_USE_SIGV4'] = 'True'
        host = 's3.ap-south-1.amazonaws.com'
        c = S3Connection(config['AWS_ACCESS_KEY'],
                         config['AWS_SECRET_KEY'],
                         host=host)
        b = c.get_bucket(config['PROCESSED_FILE_DIR_S3'].split('/')[0])
        k = Key(b)
        k.key = os.path.join(
            '/'.join(config['PROCESSED_FILE_DIR_S3'].split('/')[1:]),
            zip_file_path.split("/")[-1])
        f = open(zip_file_path, 'r+')
        file2 = cStringIO.StringIO()
        file2.write(f.read())
        k.set_contents_from_string(file2.getvalue())
        if bool(public):
            url = k.generate_url(expires_in=1 * 24 * 3600, query_auth=False)
        else:
            url = k.generate_url(expires_in=0, query_auth=False)
        c.close()

        # del os.environ['S3_USE_SIGV4']
        return url

    except Exception as Ex:
        trace = traceback.format_exc()
        app.logger.error(str(trace))
        #send_mail(alert_mails, "Some issue with the file upload to S3!", str(Ex))
        return False
Esempio n. 8
0
def upload(bucket, logs, remote_name, access_key, secret_key, valid_time):

	try:
		conn = S3Connection( access_key, secret_key )
		bucket = conn.get_bucket( bucket )
		key = Key( bucket )
		key.name = remote_name
		key.set_contents_from_filename( logs )

		key1 = Key( bucket )
		key1.name = remote_name
		print key1.generate_url( valid_time )

	except Exception, e:
		print "ERROR GENERATING KEY\n%s" % e
Esempio n. 9
0
def upload():
    if request.method == 'POST':
        file = request.files['file']
        if file and allowed_file(file.filename):
            # save to app/static/raw/
            filename = secure_filename(file.filename)
            path = os.path.join(UPLOAD_FOLDER, filename)
            file.save(path)
            
            # analyze!
            nlp_data = nlp(path)
            cv_data = cv(path)

            # upload video to s3
            conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
            bucket = conn.get_bucket('project-vidaio')
            k = Key(bucket)
            name, ext = os.path.splitext(filename)
            k.key = str(uuid4()) + ext
            k.set_contents_from_filename(path)
            k.set_acl('public-read')
            url = k.generate_url(expires_in=0, query_auth=False)

            # upload thumbnail to s3
            thumb_path = 'app/static/raw/thumb.jpg'
            k2 = Key(bucket)
            k2.key = str(uuid4()) + '.jpg'
            k2.set_contents_from_filename(thumb_path)
            k2.set_acl('public-read')
            thumb_url = k2.generate_url(expires_in=0, query_auth=False)

            # put in mongodb
            insert = {}
            insert['url'] = url
            insert['thumb'] = thumb_url
            for k,v in nlp_data.iteritems():
                insert[k] = v
            for k,v in cv_data.iteritems():
                insert[k] = v
            db.new_video(insert)

            # remove from app/static/raw
            os.remove(path)
            os.remove(thumb_path)
                    
        return redirect(url_for('home.video', url=url))
    else:
        return render_template('file_upload.html', title='File Upload')
Esempio n. 10
0
def upload():
    if request.method == 'POST':
        file = request.files['file']
        if file and allowed_file(file.filename):
            # save to app/static/raw/
            filename = secure_filename(file.filename)
            path = os.path.join(UPLOAD_FOLDER, filename)
            file.save(path)

            # analyze!
            nlp_data = nlp(path)
            cv_data = cv(path)

            # upload video to s3
            conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
            bucket = conn.get_bucket('project-vidaio')
            k = Key(bucket)
            name, ext = os.path.splitext(filename)
            k.key = str(uuid4()) + ext
            k.set_contents_from_filename(path)
            k.set_acl('public-read')
            url = k.generate_url(expires_in=0, query_auth=False)

            # upload thumbnail to s3
            thumb_path = 'app/static/raw/thumb.jpg'
            k2 = Key(bucket)
            k2.key = str(uuid4()) + '.jpg'
            k2.set_contents_from_filename(thumb_path)
            k2.set_acl('public-read')
            thumb_url = k2.generate_url(expires_in=0, query_auth=False)

            # put in mongodb
            insert = {}
            insert['url'] = url
            insert['thumb'] = thumb_url
            for k, v in nlp_data.iteritems():
                insert[k] = v
            for k, v in cv_data.iteritems():
                insert[k] = v
            db.new_video(insert)

            # remove from app/static/raw
            os.remove(path)
            os.remove(thumb_path)

        return redirect(url_for('home.video', url=url))
    else:
        return render_template('file_upload.html', title='File Upload')
Esempio n. 11
0
    def _upload(self, course_id, file_path, s3_bucket):
        """
        Upload a file.

        Args:
            course_id (unicode): The ID of the course.
            file_path (unicode): Absolute path to the file to upload.
            s3_bucket (unicode): Name of the S3 bucket where the file will be uploaded.

        Returns:
            str: URL to access the uploaded archive.

        """
        # Try to get the AWS credentials from settings if they are available
        # If not, these will default to `None`, and boto will try to use
        # environment vars or configuration files instead.
        aws_access_key_id = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
        aws_secret_access_key = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
        conn = boto.connect_s3(
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key
        )

        bucket = conn.get_bucket(s3_bucket)
        key_name = os.path.join(course_id, os.path.split(file_path)[1])
        key = Key(bucket=bucket, name=key_name)
        key.set_contents_from_filename(file_path)
        url = key.generate_url(self.URL_EXPIRATION_HOURS * 3600)

        # Store the key and url in the history
        self._history.append({'key': key_name, 'url': url})

        return url
Esempio n. 12
0
    def get_url(self, package):
        expire = package.data.get('expire', 0)
        changed = False
        if 'url' not in package.data or time.time() > expire:
            key = Key(self.bucket)
            key.key = self.get_path(package)
            expire_after = self.expire_after
            buffer_time = self.buffer_time

            # This is a bit of a hack. If we are using session-based
            # credentials, those might expire before the URL. We will need to
            # adjust the expire_after and buffer_time accordingly.
            # See issue: https://github.com/mathcamp/pypicloud/issues/38
            credential_expr = getattr(self.bucket.connection.provider,
                                      '_credential_expiry_time', None)
            expire_time = time.time() + expire_after
            if credential_expr is not None:
                seconds = calendar.timegm(credential_expr.utctimetuple())
                if seconds < expire_time:
                    # More hacks: boto refreshes session tokens 5 minutes
                    # before expiration, so we have to refresh url after that.
                    buffer_time = 4 * 60
                    expire_time = seconds

            url = key.generate_url(expire_time, expires_in_absolute=True)
            package.data['url'] = url
            expire = expire_time - buffer_time
            package.data['expire'] = expire
            changed = True
        return package.data['url'], changed
Esempio n. 13
0
def upload_from_server(data_file,
                       upload_file_name,
                       bucket_name=None,
                       public=False,
                       content_type=None):
    """

    @param data_file: the file that you want to upload
    @param upload_file_name: the file path where to upload, eg: upload_folder/file_name.txt, or file_name.jpg
    @param public: visibility of file on S3

    @return: the url of the uploaded file
    """

    if data_file is None:
        raise OwnException(NO_FILE_SPECIFIED)

    conn = boto.connect_s3()
    # conn = boto.s3.connect_to_region(region_name='the_region')
    # conn = S3Connection('aws_key', 'aws_secret')
    bucket_name = __get_bucket_name(bucket_name)
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    k.key = upload_file_name
    policy = 'public-read' if public else 'private'
    k.content_type = content_type
    k.set_contents_from_file(data_file, policy=policy)
    # k.set_contents_from_string(data_file, policy=policy)

    url = k.generate_url(expires_in=0, query_auth=False)

    return url
Esempio n. 14
0
    def _upload(self, course_id, file_path, s3_bucket):
        """
        Upload a file.

        Args:
            course_id (unicode): The ID of the course.
            file_path (unicode): Absolute path to the file to upload.
            s3_bucket (unicode): Name of the S3 bucket where the file will be uploaded.

        Returns:
            str: URL to access the uploaded archive.

        """
        # Try to get the AWS credentials from settings if they are available
        # If not, these will default to `None`, and boto will try to use
        # environment vars or configuration files instead.
        aws_access_key_id = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
        aws_secret_access_key = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
        conn = boto.connect_s3(
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key
        )

        bucket = conn.get_bucket(s3_bucket)
        key_name = os.path.join(course_id, os.path.split(file_path)[1])
        key = Key(bucket=bucket, name=key_name)
        key.set_contents_from_filename(file_path)
        url = key.generate_url(self.URL_EXPIRATION_HOURS * 3600)

        # Store the key and url in the history
        self._history.append({'key': key_name, 'url': url})

        return url
Esempio n. 15
0
def publicUrlTest():
    result = 0
    obj = dsslib.getConnection(CALLER)
    b1 = obj.create_bucket('urlbucket1')
    k = Key(b1)
    k.key = 'obj1'
    k.set_contents_from_string('Data of URL object')
    print "Setting ACL on obj"
    k.set_acl('public-read')
    print "Setting ACL on bucket"
    b1.set_acl('public-read')

    m = Key(b1)
    m.key = 'obj1'
    urlname = m.generate_url(1000)
    print "\nThe obj URL is: " + str(urlname)
    urlname = b1.generate_url(1000)
    print "\nThe bucket URL is: " + str(urlname)

    for i in range(1, 21):
        time.sleep(1)
        if i % 5 == 0:
            print str(20 - i) + " Seconds left before Obj deletion"

    m.delete()
    print "Object deleted\n"

    for i in range(1, 21):
        time.sleep(1)
        if i % 5 == 0:
            print str(20 - i) + " Seconds left before bucket deletion"

    obj.delete_bucket('urlbucket1')
    print "Bucket deleted\n"
    return result
Esempio n. 16
0
def upload_maybe(fname):

    keyname = fname[len(INPUT_DIR)+1:]
    key = bucket.get_key(keyname)
    uploaded = False

    fname_md5 = hashlib.md5()
    with open(fname, 'r') as f:
        fname_md5.update(f.read())

    hsh = fname_md5.hexdigest()

    if key is None or key.md5 != hsh:
        h = headers
        if keyname.endswith('sw.js'):
            h = copy.deepcopy(headers)
            h['Service-Worker-Allowed'] = '/'
        key = Key(bucket)
        key.name = keyname
        key.set_contents_from_filename(fname, headers=h)
        key.set_acl("public-read")
        uploaded = True

    url = key.generate_url(expires_in=0, query_auth=False)

    uri = furl(url)
    try:
        uri.args.pop('x-amz-security-token')
    except:
        pass
    url = uri.url
    return (url, uploaded)
Esempio n. 17
0
 def upload_to_s3(self, filename, contents):
     s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     bucket = s3.get_bucket(settings.S3_REPORT_BUCKET)
     key = Key(bucket)
     key.key = os.path.join(settings.S3_REPORT_PREFIX, filename)
     key.set_contents_from_string(contents)
     return key.generate_url(expires_in=REPORT_URL_EXPIRATION_TIME)
Esempio n. 18
0
def upload_content_to_s3(name, version, asset, bucket,
                         headers):  # pragma: no cover
    headers = dict(headers)  # clone the headers as it's mutable
    asset_name, asset_body, signature = asset
    s3_key = "{0}/v{1}/{2}".format(name, version, asset_name)

    key = Key(bucket)
    key.name = s3_key
    ext = os.path.splitext(asset_name)[-1][1:]
    if ext == "html":
        headers["Cache-Control"] = "public, max-age=86400"
    headers['Content-Type'] = MIME_EXTENSIONS.get(ext) or 'text/plain'
    if signature:
        encrypt_key, sig, x5u = _extract_entryption_info(signature)
        headers['X-amz-meta-content-signature'] = sig
        if encrypt_key:  # as the encrypt key is optional
            headers['X-amz-meta-encryption-key'] = encrypt_key
        if x5u:  # as x5u is optional
            headers['X-amz-meta-x5u'] = x5u
    key.set_contents_from_string(asset_body, headers=headers)
    key.set_acl("public-read")

    new_url = key.generate_url(expires_in=0, query_auth=False)
    # remove x-amz-security-token, which is inserted even if query_auth=False
    # ref: https://github.com/boto/boto/issues/1477
    url = furl(new_url)
    try:
        url.args.pop('x-amz-security-token')
    except:
        new_url = os.path.join(
            'https://%s.s3.amazonaws.com' % env.config.S3['content'], s3_key)
    else:
        new_url = url.url
    return new_url
Esempio n. 19
0
def upload_image(folder_name, file_uuid, image_file, type_known=False):
    """
    Creates a connection to the s3 service then uploads the file which was
    passed
    to this function an uses the uuid as the filename.

    :param type_known:
    :param image_file:
    :param folder_name:
    :param file_uuid:
    :return:
    """
    bucket = settings.AWS_STORAGE_BUCKET_NAME
    conn = connect_s3(settings.AWS_ACCESS_KEY_ID,
                      settings.AWS_SECRET_ACCESS_KEY)
    k = Key(conn.get_bucket(bucket))
    if type_known:
        key_string = "%s/%s" % (folder_name, file_uuid)
        k.content_type = 'image/%s' % file_uuid[file_uuid.find('.') + 1:]
    else:
        key_string = "%s/%s%s" % (folder_name, file_uuid, ".png")
        k.content_type = 'image/png'
    k.key = key_string

    if not isinstance(image_file, str):
        image_file.seek(0)
        k.set_contents_from_string(image_file.read())
    else:
        k.set_contents_from_string(image_file)
    k.make_public()
    image_uri = k.generate_url(expires_in=0, query_auth=False)
    return image_uri
Esempio n. 20
0
    def save(self, username, course_id, filepath):
        """Save certificate."""
        try:
            bucket = self.conn.get_bucket(self.bucket_name)
        except S3ResponseError as e:
            if e.status == 404:
                bucket = self.conn.create_bucket(self.bucket_name,
                                                 location=self.location)
                log.info("Cresate bucket(%s)", self.bucket_name)
            else:
                return json.dumps({"error": "{}".format(e)})

        try:
            s3key = Key(bucket)
            s3key.key = "{cid}/{name}.pdf".format(cid=course_id, name=username)

            # headers meta? encrypt_key true?
            s3key.set_contents_from_filename(filepath)
            url = s3key.generate_url(expires_in=0,
                                     query_auth=False,
                                     force_http=True)
        finally:
            s3key.close()

        return json.dumps({
            'download_url': url,
        })
Esempio n. 21
0
def send_ig_img_to_s3(remote_path):
    conn = s3_connect()
    bucket = conn.get_bucket(settings.AWS_BUCKET_NAME)
    s3_filepath = 'media/instagram/%s' % basename(remote_path)
    remote_img = urllib2.urlopen(remote_path)
    k = Key(bucket)
    k.key = s3_filepath
    s3_url = None
    saved = None
    exists_in_s3 = bucket.get_key(s3_filepath)
    if not exists_in_s3:
        try:
            saved = k.set_contents_from_string(remote_img.read())
        except:
            logger.debug('Could not save %s to s3!' % basename(remote_path))
            return None
    elif exists_in_s3 or saved:
        try:
            s3_url = k.generate_url(expires_in=0, query_auth=False)
        except:
            logger.debug('Could not generate url for %s in s3!' %
                         basename(remote_path))
    if not s3_url:
        return None
    else:
        return s3_url
Esempio n. 22
0
    def push_via_file_path(cls,
                           file_path,
                           filename,
                           s3_dir,
                           mode='public',
                           **kwargs):
        """
        push a local file to s3
        :param file_path: the local path of the file
        :param filename: the name of the file stored locally
        :param s3_dir: the s3 directory to which the file is to be pushed
        :param mode: the mode of file storage public/private
        :return: the s3 key and url of the file
        """
        try:
            bucket = cls.get_s3_bucket(**kwargs)

            key_obj = Key(bucket)
            key_obj.key = "{}/{}".format(s3_dir, filename)
            key_obj.set_contents_from_filename(file_path)

            if mode == 'public':
                key_obj.make_public()
                url = key_obj.generate_url(expires_in=0, query_auth=False)
            else:
                url = cls.generate_private_url(key_name=key_obj.key, **kwargs)

            return key_obj.key, url
        except Exception as e:
            print("error pushing file to s3 : {}".format(e))
            return None, None
Esempio n. 23
0
    def push_via_file_object(cls,
                             file_obj,
                             filename,
                             s3_dir,
                             mode='private',
                             **kwargs):
        """
        push file object to s3 directory
        :param file_obj: the StringIO like file object to be pushed to s3
        :param filename: the name to store the object with
        :param s3_dir: the s3 directory to puch the object to
        :param mode: private or public url to be generated
        :return: the s3 key and the url generated for the file
        """
        try:
            # point to the beginning of the file
            file_obj.seek(0)

            bucket = cls.get_s3_bucket(**kwargs)

            key_obj = Key(bucket)
            key_obj.key = "{}/{}".format(s3_dir, filename)
            key_obj.set_contents_from_file(file_obj)

            if mode == 'public':
                key_obj.make_public()
                url = key_obj.generate_url(expires_in=0, query_auth=False)
            else:
                url = cls.generate_private_url(key_name=key_obj.key, **kwargs)

            return key_obj.key, url
        except Exception as e:
            print("error pushing file object to s3 : {}".format(e))
            return None, None
Esempio n. 24
0
def _upload_file_dict_to_s3(file_dict, key_dict, path, name):
    '''
    Upload dictionaries of filenames to S3 urls (and filenames to S3 keys)
    to S3 using provided keyname.
    This is useful because the s3_files column on submissions is currently too
    small.

    Returns:
        public_url: URL to access uploaded list
    '''
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucketname = settings.S3_BUCKET
    bucket = conn.create_bucket(bucketname)

    data = {}
    data['files'] = file_dict
    data['keys'] = key_dict

    prefix = getattr(settings, 'S3_PATH_PREFIX')
    path = '{0}/{1}'.format(prefix, path)

    k = Key(bucket)
    k.key = '{path}/{name}'.format(path=path, name=name)
    k.set_contents_from_string(json.dumps(data))
    public_url = k.generate_url(60 * 60 * 24 * 365)  # URL timeout in seconds.

    return public_url
Esempio n. 25
0
def postbuild_osx(package_name, args, bucket, package_s3_key):
    if args.upload:
        url = package_s3_key.generate_url(expires_in=0, query_auth=False)
    else:
        # For testing "brew install" locally
        url = "http://127.0.0.1:8000/%s" % package_name

    print "Generating formula..."
    sha1 = sha1_file(package_name)
    formula_str = BREW_FORMULA.format(url=url, sha1=sha1)
    with open("kite.rb", "w") as f:
        f.write(formula_str)

    if args.upload:
        print "Uploading new brew formula..."
        formula_key = Key(bucket)
        formula_key.key = "kite.rb"
        formula_key.set_contents_from_string(formula_str)
        formula_key.make_public()
        formula_url = formula_key.generate_url(expires_in=0, query_auth=False)

        print "kite tool has been uplaoded successfully.\n" \
              "Users can install it with:\n    " \
              "brew install \"%s\"" % formula_url
    else:
        print "Did not upload to S3. " \
              "If you want to upload, run with --upload flag."
Esempio n. 26
0
def upload_to_s3(filename, bucket):
    c = boto.connect_s3()
    b = c.get_bucket(bucket)
    k = Key(b)
    k.key = filename
    k.set_contents_from_filename(filename, policy='public-read')
    return k.generate_url(expires_in=0, query_auth=False, force_http=True)
Esempio n. 27
0
def testPicture(decodedData,connection,cursor):
	#print decodedData['data']
	#Make the file locally so that it can be uploaded to S3
	fileName = str(uuid.uuid1()) + ".jpg"
	fh = open("images/" + fileName, "wb")
	fh.write(decodedData['data'].decode('base64'))
	fh.close()
	
	#upload the file to S3
	conn = S3Connection(apikeys.AWSAccessKeyId, apikeys.AWSSecretKey)
	bucket = conn.get_bucket("devcontest", False, None)
	k = Key(bucket)
	k.key = fileName
	#uploads file
	k.set_contents_from_filename("images/" + fileName, None, None)
	#sets to public
	k.set_acl('public-read')
	#gets a url back
	url = k.generate_url(expires_in=0,query_auth=False)
	conn.close()

	#putting urls into dynamodb
	conn2 = boto.dynamodb.connect_to_region(
        'us-east-1',
        aws_access_key_id=apikeys.AWSAccessKeyId,
        aws_secret_access_key=apikeys.AWSSecretKey)
	table = conn2.get_table('Picture')
	#nosql db uses key, value pair. key is location id and value is url
	item = table.new_item(hash_key=decodedData['location_id'], range_key=url)
	item.put()

	return url
Esempio n. 28
0
def uploadToS3(videoFile):
  txtout = "S3Upload started on :: " + videoFile
  dbgout(txtout)
  
  AWS_ACCESS_KEY_ID = S3_KEYID
  AWS_SECRET_ACCESS_KEY = S3_KEY

  dbgout("Connecting & gettig bucket")
  conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
  bucket = conn.get_bucket(S3_BUCKETNAME) 

  dbgout("Creating key and setting content")
  k = Key(bucket)
  nameoffile = videoFile.split('/')[-1]
  k.key = S3_SUBFOLDER + nameoffile
  dbgout("Saving file to :: " + k.key)

  dbgout("Uploading contents from :: " + videoFile)
  k.set_contents_from_filename(videoFile)

  # Needs to be readable so the alerted users can see it
  k.set_acl('public-read')
  url = k.generate_url(expires_in=0, query_auth=False)

  dbgout("Upload complete")

  # Delete the file off the pi
  os.remove(videoFile)
  dbgout("Original file deleted")

  return url
Esempio n. 29
0
def upload_to_s3(filename, bucket):
    c = boto.connect_s3()
    b = c.get_bucket(bucket)
    k = Key(b)
    k.key = filename
    k.set_contents_from_filename(filename, policy='public-read')
    return k.generate_url(expires_in=0, query_auth=False, force_http=True)
Esempio n. 30
0
 def post(self):
     username = get_jwt_identity()
     my_urls = []
     files = request.files.getlist("file")
     print(files)
     for file in files:
         if file and allowed_file(file.filename):
             filename = secure_filename(file.filename)
             print(filename)
             k = Key(bucket)
             k.key = filename
             k.set_contents_from_file(file)
             k.set_acl('public-read')
             my_url = k.generate_url(expires_in=0,
                                     query_auth=False,
                                     force_http=False)
             now = datetime.now()
             formatted_date = now.strftime('%Y-%m-%d %H:%M:%S')
             # save file record to sql
             mysql_conn = mysql.connect()
             cursor = mysql_conn.cursor()
             query = "INSERT INTO files(file_name, file_size, file_url, created_date, user_name) \
                 VALUES('%s', %d, '%s', '%s', '%s')" % (
                 filename, k.size, my_url, formatted_date, username)
             cursor.execute(query)
             cursor.connection.commit()
             cursor.close()
             my_urls.append({"name": filename, "url": my_url})
     return {"status": 200}, 200
Esempio n. 31
0
    def getBatchToRate(experiment_id):
        ret = {}
        session = dbConn().get_session(dbConn().get_engine())
        experiment = session.query(models.experiments).filter(
            models.experiments.id == experiment_id).first()
        if experiment is None:
            ret['errors'] = []
            ret['errors'].append("Invalid experiment id")
            return apiDecorate(ret, 400, "Invalid experiment id")
        batches = session.query(models.batch).filter(
            models.batch.experiment_id == experiment.resource_id).filter(
                models.batch.rating == None).all()
        finishedBatch = []

        for batch in batches:
            if (batch.isCompleted == False):
                continue
            botoConn = boto.connect_s3(datasetHandler.DREAM_key,
                                       datasetHandler.DREAM_secretKey,
                                       host="objects-us-west-1.dream.io")
            bucket = botoConn.get_bucket(datasetHandler.DREAM_Bucket,
                                         validate=False)
            curBatch = {}
            curBatch['id'] = batch.id
            link = "e_" + experiment.resource_id + "/" + str(
                batch.local_resource_id) + "_res.json"
            itemk = Key(bucket)
            itemk.key = link
            print link
            link = itemk.generate_url(3600, query_auth=True, force_http=True)
            curBatch['link'] = link
            print link
            finishedBatch.append(curBatch)
        ret['batches'] = finishedBatch
        return apiDecorate(ret, 200, "success")
Esempio n. 32
0
def upload_to_s3(file_path, path, name):
    '''
    Upload file to S3 using provided keyname.

    Returns:
        public_url: URL to access uploaded file
    '''
    try:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucketname = settings.S3_BUCKETNAME
        try:
            bucket = conn.create_bucket(bucketname.lower())
        except Exception:
            bucket = conn.get_bucket(bucketname.lower())
        prefix = getattr(settings, 'S3_PATH_PREFIX')
        path = u'{0}/{1}'.format(prefix, path)
        key = u'{path}/{name}'.format(path=removeNonAscii(path), name=removeNonAscii(name))
        k = Key(bucket)
        k.key = key
        k.set_contents_from_filename(file_path)
        k.set_acl("public-read")
        k.set_metadata('filename', removeNonAscii(name))
        public_url = k.generate_url(60*60*24*365) # URL timeout in seconds.

        return True, public_url
    except Exception:
        error = "Could not connect to S3."
        log.exception(error)
        return False, error
Esempio n. 33
0
def artifacts_upload(artifacts):
    """Upload the artifacts to S3"""
    try:
        urls = []
        bucket, headers = setup_s3()
        for artifact in artifacts:
            key = bucket.get_key(artifact["key"])
            if key is None or artifact.get("force_upload"):
                key = Key(bucket)
                key.name = artifact["key"]
                headers['Content-Type'] = "application/json"
                key.set_contents_from_string(artifact["data"], headers=headers)
                key.set_acl("public-read")
            # return urls
            url = key.generate_url(expires_in=0, query_auth=False)
            # remove x-amz-security-token, which is inserted even if query_auth=False
            # ref: https://github.com/boto/boto/issues/1477
            uri = furl(url)
            try:
                uri.args.pop('x-amz-security-token')
            except:
                pass
            urls.append(uri.url)
    except Exception as e:
        raise Exception("Failed to upload artifact: %s" % e)

    return urls
Esempio n. 34
0
 def _get_file_url(self, file_name):
     '''
     Generate a URL for the given S3 file name
     '''
     k = Key(self._get_s3_bucket())
     k.key = self.DATA_PATH + file_name
     return k.generate_url(0, query_auth=False, force_http=True)
def publicUrlTest():
    result = 0
    userObj = dssSanityLib.getConnection()
    bucketpref = dssSanityLib.getsNewBucketName()
    b1 = userObj.create_bucket(bucketpref)

    k = Key(b1)
    k.key = 'userObj1'
    k.set_contents_from_string('Data of URL object')

    m = Key(b1)
    m.key = 'userObj1'
    urlname = m.generate_url(1000)
    print "\nThe userObj URL is: " + str(urlname)
    urlname = b1.generate_url(1000)
    print "\nThe bucket URL is: " + str(urlname)

    for i in range(1, 3):
        time.sleep(1)
        if i % 5 == 0:
            print str(2 - i) + " Seconds left before Obj deletion"
    m.delete()
    print "Object deleted\n"

    for i in range(1, 3):
        time.sleep(1)
        if i % 5 == 0:
            print str(2 - i) + " Seconds left before bucket deletion"
    userObj.delete_bucket(bucketpref)
    print "Bucket deleted\n"

    return result
Esempio n. 36
0
 def get_url(self, package):
     key = Key(self.bucket, self.get_path(package))
     url = key.generate_url(self.expire_after)
     if self.proxy_address:
         return proxy_address(url, self.proxy_address)
     else:
         return url
Esempio n. 37
0
def upload_to_s3(path, config):
    conn = S3Connection(config['s3']['access_key'], config['s3']['secret_key'])
    bucket = conn.get_bucket(config['s3']['bucket'])
    key = Key(bucket, str(uuid.uuid1())+"."+path.split(".")[-1])
    key.set_contents_from_filename(path)
    key.set_canned_acl('public-read')
    return key.generate_url(0, query_auth=False, force_http=True)
Esempio n. 38
0
def download_and_upload_image(image, bucket):
    item_id = image['id']
    item_type = "PSScene4Band"
    asset_types = ["analytic", "analytic_xml"]
    for asset_type in asset_types:
        item_url = 'https://api.planet.com/data/v1/item-types/{}/items/{}/assets'.format(
            item_type, item_id)
        # Request a new download URL
        result = requests.get(item_url,
                              auth=HTTPBasicAuth(os.environ['$PL_API_KEY'],
                                                 ''))
        download_url = result.json()[asset_type]['location']
        if (asset_type == 'analytic'):
            output_file = item_id + '.tif'
        elif (asset_type == 'analytic_xml'):
            output_file = item_id + '.xml'
        #download
        file_object = urllib.request.urlopen(download_url)
        # processed_img = object_size.draw_boxes(file_object.read())
        # fp = io.BytesIO(processed_img)
        fp = io.BytesIO(file_object.read())
        #upload
        k = Key(bucket)
        k.key = output_file
        k.set_contents_from_file(fp)
        upload_image(file, k)
        url = k.generate_url(3600)
        image['link'] = url
        print(item_id, item_type, "uploaded")
Esempio n. 39
0
    def update_s3_template(self, filename, template, bucket=None, timeout=None):
        """
        Send a template up to s3

        :param bucket: bucket name
        :type bucket: str
        :param filename: filename to store template as key
        :type filename: str
        :param template: Cloud formation template
        :type template: str
        """
        if bucket == None:
            bucket = Cloudformation.default_bucket
        if timeout == None:
            timeout = Cloudformation.default_s3_timeout
        try:
            s3conn = boto.connect_s3()
            bucket = s3conn.get_bucket(bucket)
            key = Key(bucket)
            key.key = filename
            key.set_contents_from_string(template)
            return key.generate_url(expires_in=timeout, query_auth=False, force_http=True)
        except boto.exception.BotoServerError, ex:
            raise CloudformationException(
                "error occured while updating s3 bucket / filename %s / %s : %s" % (bucket, filename, ex.message)
            )
Esempio n. 40
0
def register(id, url=None):
    """Register a UUID key in the global S3 bucket."""
    k = Key(registration_s3_bucket())
    k.key = registration_key(id)
    k.set_contents_from_string(url or 'missing')
    reg_url = k.generate_url(expires_in=0, query_auth=False)
    return reg_url
Esempio n. 41
0
    def stage_out(self, req):
        try:
            try:
                if self.bucket is None:
                    self.bucket = self.get_bucket(req['coll_id'])

                key = Key(self.bucket, os.path.basename(req['pfn']))
                key.set_contents_from_filename(req['pfn'])
            except boto.exception.S3ResponseError as error:
                self.bucket = self.get_bucket(req['coll_id'])
                key = Key(self.bucket, os.path.basename(req['pfn']))
                key.set_contents_from_filename(req['pfn'])

            if key.size == req['pfn_size']:
                self.logger.debug("Successfully staged out %s" % req['pfn'])
                if self.signed_url:
                    req['pfn'] = key.generate_url(self.lifetime, method='GET')
                else:
                    req['pfn'] = 's3://%s:%s/%s/%s' % (self.hostname,
                                                       self.port,
                                                       self.get_bucket_name(req['coll_id']),
                                                       os.path.basename(req['pfn']))
                return req
            else:
                self.logger.debug("Failed to stage out %s: size mismatch(local size: %s, remote size: %s)" % (
                                  req['pfn'], req['pfn_size'], key.size))
        except Exception as error:
            self.logger.error("Failed to stageout request(%s): %s, %s" % (req, error, traceback.format_exc()))
Esempio n. 42
0
def _upload_file_dict_to_s3(file_dict, key_dict, keyname, bucketname):
    '''
    Upload dictionaries of filenames to S3 urls (and filenames to S3 keys)
    to S3 using provided keyname.
    This is useful because the s3_files column on submissions is currently too
    small.

    Returns:
        public_url: URL to access uploaded list
    '''
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucketname = _get_fully_qualified_bucketname(bucketname)
    bucket = conn.create_bucket(bucketname)

    data = {}
    data['files'] = file_dict
    data['keys'] = key_dict

    k = Key(bucket)
    k.key = keyname
    k.set_contents_from_string(json.dumps(data))
    public_url = k.generate_url(60 * 60 * 24 * 365)  # URL timeout in seconds.

    return public_url
Esempio n. 43
0
    def upload_to_s3(self, contents, path):
        """Upload submission results to S3

        TODO:
            - Use query_auth=False for `generate_url` if bucket is public

        """

        try:
            s3 = S3Connection(self.aws_access_key, self.aws_secret_key)
            bucket = s3.get_bucket(self.s3_bucket, validate=False)

            if self.s3_prefix:
                keyname = os.path.join(self.s3_prefix, path)
            else:
                keyname = path

            key = Key(bucket, keyname)
            key.set_contents_from_string(contents, replace=True)
            s3_url = key.generate_url(60*60*24)
        except Exception as e:
            log.error("Error uploading results to S3: %s", e)
            s3_url = False

        return s3_url
Esempio n. 44
0
def Seppuku(why):
    # Get the instance ID
    r = requests.get("http://169.254.169.254/latest/meta-data/instance-id")
    if r.status_code != 200:
        wf.logger.logger.error("Seppuku() unable to get instance ID")
        exit(3)
    instance_id = r.text

    # Declare our intent
    wf.logger.logger.error("Seppuku(%s): Instance is stopping because [%s]" % (instance_id, why))

    # Save a copy of the latest syslog to S3
    s3_conn = boto.connect_s3()
    bucket = s3_conn.get_bucket('wf-instance-logs')
    key = Key(bucket)
    key.key = "%s.txt" % instance_id
    wf.logger.logger.error("Seppuku(%s): copying log to %s" % (instance_id, key.generate_url(0)))
    key.set_contents_from_filename('/var/log/syslog')


    # Now commit Seppuku
    ec2_conn = boto.ec2.connect_to_region("us-west-1")
    # this can throw an exception.  Protect later.
    ec2_conn.terminate_instances(instance_ids=[instance_id])
    time.sleep(60*5)

    # What!  No sleep?  Then halt
    subprocess.check_call(["sudo", "halt"])
    time.sleep(60*5)
    exit(9)
Esempio n. 45
0
    def get_presignedurl (self, filename,
                     bucket=settings.AWS_BUCKET,
                     public=False, presigned_seconds = 604800):
            url=""
            try:
                conn= S3Connection(settings.AWS_ACCESS_KEY_ID,
                                   settings.AWS_SECRET_ACCESS_KEY)
                b = conn.get_bucket(bucket)
                k=Key(b)
                k.key=filename

                #mime = mimetypes.guess_type(filename)[0]
                #if mime==None:
                    #print "I couldn't guess MIME because"
                    #print "I couldn't detect a file ext."
                    #print "Using 'application/octet-stream'"
                    #print "as the default MIME instead."
                #    mime = "application/octet-stream"
        
                #print "MIME Type = %s" % (mime)
                #k.set_metadata("Content-Type", mime)
                #print k
                #x=k.set_contents_from_filename(local_filepath)
                
                #if public==True:
                #    k.set_acl("public-read")
                
                url = k.generate_url(presigned_seconds, 'GET', force_http=False)
                                     #headers={"Content-Type":"application/octet-stream"})

            except:
                print sys.exc_info()
                return url
            finally:
                return url
Esempio n. 46
0
 def _get_file_url(self, file_name):
     '''
     Generate a URL for the given S3 file name
     '''
     k = Key(self._get_s3_bucket())
     k.key = self.DATA_PATH + file_name
     return k.generate_url(0, query_auth=False, force_http=True)
def restful_uploader():
    AWS_ACCESS_KEY_ID = 'AKIAIHNJNV3BX634MAZA'
    AWS_SECRET_ACCESS_KEY = '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'

    bucket_name = AWS_ACCESS_KEY_ID.lower() + 'data-center'
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID,
            AWS_SECRET_ACCESS_KEY)

    bucket = conn.create_bucket(bucket_name,
        location=boto.s3.connection.Location.DEFAULT)

    testfile = "valid_json.png"
    print 'Uploading %s to Amazon S3 bucket %s' % \
       (testfile, bucket_name)

    def percent_cb(complete, total):
        sys.stdout.write('.')
        sys.stdout.flush()

    k = Key(bucket)
    k.key = str(uuid.uuid1())+ "." +testfile.split('.')[1]
    k.set_contents_from_filename(testfile,
        cb=percent_cb, num_cb=10)
    k.set_acl('public-read')
    url = k.generate_url(expires_in=0, query_auth=False, force_http=True)
    print url

    payload = {'EventTag': 'AngelHack', 'gifImageUrl': url, 'note': 'Time is calling'}
    post_url = 'http://localhost:3000/events/gifs/558f71d4387f0e3740ab7a0f'
    r = requests.post(post_url, data=payload)
    print r.text
Esempio n. 48
0
def upload_maybe(fname):

    keyname = fname[len(INPUT_DIR) + 1:]
    key = bucket.get_key(keyname)
    uploaded = False

    fname_md5 = hashlib.md5()
    with open(fname, 'r') as f:
        fname_md5.update(f.read())

    hsh = fname_md5.hexdigest()

    if key is None or key.md5 != hsh:
        h = headers
        if keyname.endswith('sw.js'):
            h = copy.deepcopy(headers)
            h['Service-Worker-Allowed'] = '/'
        key = Key(bucket)
        key.name = keyname
        key.set_contents_from_filename(fname, headers=h)
        key.set_acl("public-read")
        uploaded = True

    url = key.generate_url(expires_in=0, query_auth=False)

    uri = furl(url)
    try:
        uri.args.pop('x-amz-security-token')
    except:
        pass
    url = uri.url
    return (url, uploaded)
Esempio n. 49
0
	def getUploadLink(self):
		k = Key(bucket)
		k.key = 'test-key' #Should be replaced dynamically with e.g. the uuid 
		k.set_contents_from_string('Hello World, I am Fred from Fribourg!')
		expires_in_seconds = 1800
		url = k.generate_url(expires_in_seconds)
		return url
def postbuild_osx(package_name, args, bucket, package_s3_key):
    if args.upload:
        url = package_s3_key.generate_url(expires_in=0, query_auth=False)
    else:
        # For testing "brew install" locally
        url = "http://127.0.0.1:8000/%s" % package_name

    print "Generating formula..."
    sha1 = sha1_file(package_name)
    formula_str = BREW_FORMULA.format(url=url, sha1=sha1)
    with open("kite.rb", "w") as f:
        f.write(formula_str)

    if args.upload:
        print "Uploading new brew formula..."
        formula_key = Key(bucket)
        formula_key.key = "kite.rb"
        formula_key.set_contents_from_string(formula_str)
        formula_key.make_public()
        formula_url = formula_key.generate_url(expires_in=0, query_auth=False)

        print "kite tool has been uplaoded successfully.\n" \
              "Users can install it with:\n    " \
              "brew install \"%s\"" % formula_url
    else:
        print "Did not upload to S3. " \
              "If you want to upload, run with --upload flag."
Esempio n. 51
0
def upload():
    conn = boto.connect_s3(os.environ.get('MORPH_AWS_ACCESS_KEY_ID'),
                           os.environ.get('MORPH_AWS_SECRET_ACCESS_KEY'),
                           validate_certs=False)
    print "About to get_bucket..."
    bucket = conn.get_bucket('morph-upload-test')
    k = Key(bucket)
    k.key = 'popolo-test.txt'
    print "About to set_contents_from_string..."
    k.set_contents_from_string("I'm a banana!")
    print "About to set_metadata..."
    k.set_metadata('Content-Type', 'text/plain')
    print "About to make_public..."
    k.make_public()
    print "About to generate_url..."
    print k.generate_url(0, query_auth=False, force_http=True)
Esempio n. 52
0
def upload_image(image_url, image_name):
    """аплоад изображения"""
    try:
        # соединение с S3 bucket
        connection = boto.connect_s3()
        bucket = connection.get_bucket(config.AWS_STORAGE_BUCKET_NAME)
        key = Key(bucket)

        # присвоение имени файла
        key.key = str(int(time())) + "-" + image_name + ".png"

        # чтение
        file_object = urllib2.urlopen(image_url)
        file_data = StringIO.StringIO(file_object.read())

        # запись
        key.content_type = "image/png"
        key.set_contents_from_file(file_data)

        # права на чтение
        key.make_public()

        result_url = key.generate_url(0, expires_in_absolute=True, force_http=True, query_auth=False)
        return result_url

    except Exception, e:
        return e
Esempio n. 53
0
def make_zip(directory):
    if None in [settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY]:
        raise ImproperlyConfigured("AWS configuration not set.")

    conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.create_bucket(settings.AWS_BUCKET)

    filename = os.path.basename(directory) + ".zip"
    zip_file = zipfile.ZipFile(filename, "w")

    for root, dirs, files in os.walk(directory):
        for file in files:
            path = os.path.join(root, file)

            arcname = path.replace(directory, "")

            zip_file.write(path, arcname)

    zip_file.close()

    k = Key(bucket)
    k.key = filename
    k.set_contents_from_filename(filename)
    k.set_acl("public-read")

    os.remove(filename)

    return k.generate_url(24 * 60 * 60)
Esempio n. 54
0
def _upload_file_dict_to_s3(file_dict, key_dict, path, name):
    '''
    Upload dictionaries of filenames to S3 urls (and filenames to S3 keys)
    to S3 using provided keyname.
    This is useful because the s3_files column on submissions is currently too
    small.

    Returns:
        public_url: URL to access uploaded list
    '''
    if settings.S3_HOST is None:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    else:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY,
                            host=settings.S3_HOST,
                            calling_format=OrdinaryCallingFormat())
    bucketname = settings.S3_BUCKET
    bucket = conn.create_bucket(bucketname)

    data = {}
    data['files'] = file_dict
    data['keys'] = key_dict

    prefix = getattr(settings, 'S3_PATH_PREFIX')
    path = '{0}/{1}'.format(prefix, path)

    k = Key(bucket)
    k.key = '{path}/{name}'.format(path=path, name=name)
    public_url = k.generate_url(60*60*24*365)  # URL timeout in seconds.
    k.set_contents_from_string(json.dumps(data))

    return public_url
Esempio n. 55
0
 def onGetContactPictureResult(self, resultGetPictureIqProtocolEntity,
                               getPictureIqProtocolEntity):
     # write to file example:
     #print dir(resultGetPictureIqProtocolEntity)
     #print dir(getPictureIqProtocolEntity)
     #resultGetPictureIqProtocolEntity.writeToFile("/tmp/yowpics/%s_%s.jpg" % (getPictureIqProtocolEntity.getTo(), "preview" if resultGetPictureIqProtocolEntiy.isPreview() else "full"))
     #filename = "%s/%s-fullprofile.jpg"%(tempfile.gettempdir(),resultGetPictureIqProtocolEntity.getPictureId())
     #print filename
     #with open(filename, 'wb') as f:
     #    f.write(resultGetPictureIqProtocolEntity.getPictureData())
     id = re.sub(r"@.*", "", getPictureIqProtocolEntity.getTo())
     filename = "%s-profile.jpg" % (id)
     print("checking %s", filename)
     k = self.b.get_key(filename)
     if k:
         url = k.generate_url(expires_in=0, query_auth=False)
         print("%s exists: %s" % (filename, url))
     else:
         k = Key(self.b)
         k.key = filename
         k.set_contents_from_string(
             str(resultGetPictureIqProtocolEntity.getPictureData()))
         k.set_metadata('Content-Type', 'image/jpeg')
         k.set_acl('public-read')
         url = k.generate_url(expires_in=0, query_auth=False)
         print("%s doesn't exist, created: %s" % (k, url))
Esempio n. 56
0
def _upload_to_s3(file_to_upload, path, name):
    '''
    Upload file to S3 using provided keyname.

    Returns:
        public_url: URL to access uploaded file
    '''
    if settings.S3_HOST is None:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    else:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY,
                            host=settings.S3_HOST,
                            calling_format=OrdinaryCallingFormat())
    bucketname = settings.S3_BUCKET
    bucket = conn.create_bucket(bucketname)

    prefix = getattr(settings, 'S3_PATH_PREFIX')
    path = '{0}/{1}'.format(prefix, path)

    k = Key(bucket)
    k.key = '{path}/{name}'.format(path=path, name=name)
    public_url = k.generate_url(60*60*24*365)  # URL timeout in seconds.
    k.set_metadata('filename', file_to_upload.name)
    k.set_contents_from_file(file_to_upload)

    return public_url
Esempio n. 57
0
def upload_to_s3(file_path, path, name):
    """
    Upload file to S3 using provided keyname.

    Returns:
        public_url: URL to access uploaded file
    """
    try:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucketname = settings.S3_BUCKETNAME
        try:
            bucket = conn.create_bucket(bucketname.lower())
        except Exception:
            bucket = conn.get_bucket(bucketname.lower())
        prefix = getattr(settings, "S3_PATH_PREFIX")
        path = u"{0}/{1}".format(prefix, path)
        key = u"{path}/{name}".format(path=removeNonAscii(path), name=removeNonAscii(name))
        k = Key(bucket)
        k.key = key
        k.set_contents_from_filename(file_path)
        k.set_acl("public-read")
        k.set_metadata("filename", removeNonAscii(name))
        public_url = k.generate_url(60 * 60 * 24 * 365)  # URL timeout in seconds.

        return True, public_url
    except Exception:
        error = "Could not connect to S3."
        log.exception(error)
        return False, error
Esempio n. 58
0
def artifacts_upload(artifacts):
    """Upload the artifacts to S3"""
    try:
        urls = []
        bucket, headers = setup_s3()
        for artifact in artifacts:
            key = bucket.get_key(artifact["key"])
            if key is None or artifact.get("force_upload"):
                key = Key(bucket)
                key.name = artifact["key"]
                headers['Content-Type'] = "application/json"
                key.set_contents_from_string(artifact["data"], headers=headers)
                key.set_acl("public-read")
            # return urls
            url = key.generate_url(expires_in=0, query_auth=False)
            # remove x-amz-security-token, which is inserted even if query_auth=False
            # ref: https://github.com/boto/boto/issues/1477
            uri = furl(url)
            try:
                uri.args.pop('x-amz-security-token')
            except:
                pass
            urls.append(uri.url)
    except Exception as e:
        raise Exception("Failed to upload artifact: %s" % e)

    return urls