Esempio n. 1
0
    def upload(self, filename):
        
        #Save first to get id
        self.save()
        
        #access bucket and key
        self.conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        self.bucket_obj = self.conn.get_bucket('media.dellarobbiathailand.com', True)
        k = Key(self.bucket_obj)
        
        #create and upload thumbnail
        url = self.create_thumbnail(filename)
        
        #get extension
        file,ext = os.path.splitext(filename)
        #set key name
        k.key = "library/"+str(self.id)+'.'+ext
        #upload
        k.set_contents_from_filename(filename)
        #delete from system
        os.remove(filename)

        k.set_canned_acl('private')
        
        #save file data to model
        self.bucket = 'media.dellarobbiathailand.com'
        self.key = k.key
        self.url = url
        self.save()
        
        data = {'bucket':self.bucket,'key':self.key, 'url':self.url}        
        return data
Esempio n. 2
0
def upload_to_s3(path, config):
    conn = S3Connection(config['s3']['access_key'], config['s3']['secret_key'])
    bucket = conn.get_bucket(config['s3']['bucket'])
    key = Key(bucket, str(uuid.uuid1())+"."+path.split(".")[-1])
    key.set_contents_from_filename(path)
    key.set_canned_acl('public-read')
    return key.generate_url(0, query_auth=False, force_http=True)
Esempio n. 3
0
 def upload(self, local_copy, obj):
     print "UPLOAD: local_copy.path='" + local_copy.path + "' " + \
             "obj='" + obj.name + "'"
     k = Key(self.bucket)
     k.key = obj.name
     k.set_contents_from_filename(local_copy.path, obj.headers)
     # k.set_contents_from_filename(local_copy.path)
     k.set_canned_acl('public-read', None)
Esempio n. 4
0
def upload_to_s3(local_path, key=None):
    if key is None:
        key = os.path.basename(local_path)
    bucket = helper.get_bucket()
    k = Key(bucket)
    k.key = key
    k.set_contents_from_filename(local_path)
    k.set_canned_acl('public-read')
    k.set_metadata('Content-Type', helper.get_mimetype(k.key))
    return k
Esempio n. 5
0
def upload_to_s3(bucket, fname):
    """
    Get the basename of a file, then upload it to S3
    with the same name
    """
    key_name = os.path.basename(fname)
    key = Key(bucket)
    key.key = key_name
    key.set_contents_from_filename(fname)
    key.set_canned_acl('public-read')
Esempio n. 6
0
def uploadRepo(awsKey, secretKey, stack, logFile):
    bucketName = stack + "-" + ambariBucket
    try:
        conn = boto.connect_s3(aws_access_key_id=awsKey, aws_secret_access_key=secretKey)
        conn.create_bucket(bucketName, location=Location.DEFAULT)
        bucket = conn.get_bucket(bucketName)
        k = Key(bucket)
        k.key = 'ambari.repo'
        k.set_contents_from_filename("/etc/yum.repos.d/ambari.repo")
        k.set_canned_acl('public-read')
    except Exception as e:
        logFile.write(e)
Esempio n. 7
0
 def refresh_original_file(self):
     #stream directly to s3 without saving to local filesystem
     request = urllib2.Request(self.original_url)
     response = urllib2.urlopen(request)
     conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     bucket = conn.create_bucket(settings.AWS_STORAGE_BUCKET_NAME)
     k = Key(bucket)
     k.name = str(uuid.uuid4())+"."+self.assumed_extension
     k.set_contents_from_string(response.read(), {'Content-Type' : response.info().gettype()})
     k.set_canned_acl('public-read')
     self.set_original_file_s3_key(k.name)
     self.save()
Esempio n. 8
0
def snail_mail_bulk_pdf_task(pdf_name, get, **kwargs):
    """Save a PDF file for all open snail mail tasks"""
    # pylint: disable=too-many-locals
    # pylint: disable=unused-argument
    # pylint: disable=too-many-statements
    cover_info = []
    bulk_merger = PdfFileMerger(strict=False)

    snails = SnailMailTaskFilterSet(
        get,
        queryset=SnailMailTask.objects.filter(resolved=False).order_by(
            '-amount',
            'communication__foia__agency',
        ).preload_pdf(),
    ).qs[:100]

    blank_pdf = FPDF()
    blank_pdf.add_page()
    blank = StringIO(blank_pdf.output(dest='S'))
    for snail in snails.iterator():
        # generate the pdf and merge all pdf attachments
        pdf = SnailMailPDF(snail.communication, snail.category, snail.switch,
                           snail.amount)
        prepared_pdf, page_count, files, _mail = pdf.prepare()
        cover_info.append((snail, page_count, files))

        if prepared_pdf is not None:
            # append to the bulk pdf
            bulk_merger.append(prepared_pdf)
            # ensure we align for double sided printing
            if PdfFileReader(prepared_pdf).getNumPages() % 2 == 1:
                blank.seek(0)
                bulk_merger.append(blank)

    # preprend the cover sheet
    cover_pdf = CoverPDF(cover_info)
    cover_pdf.generate()
    if cover_pdf.page % 2 == 1:
        cover_pdf.add_page()
    bulk_merger.merge(0, StringIO(cover_pdf.output(dest='S')))

    bulk_pdf = StringIO()
    bulk_merger.write(bulk_pdf)
    bulk_pdf.seek(0)

    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
    key = Key(bucket)
    key.key = pdf_name
    key.set_contents_from_file(bulk_pdf)
    key.set_canned_acl('public-read')
Esempio n. 9
0
    def upload_file(file_str, file_key):
        # connect to AWS S3
        s3 = S3Connection(const.AWS_ACCESS_KEY_ID, const.AWS_SECRET_ACCESS_KEY)

        # get a handle to the S3 bucket
        bucket_name = const.S3_BUCKET
        bucket = s3.get_bucket(bucket_name)
        k = Key(bucket)

        # Use Boto to upload the file to the S3 bucket
        k.key = file_key
        k.set_contents_from_string(file_str)
        k.set_canned_acl('public-read')
Esempio n. 10
0
def s3_upload(uploaded_file, id):
    s3conn = boto.connect_s3(AWS_ACCESS_KEY,AWS_SECRET_ACCESS_KEY)
    bucket = s3conn.get_bucket(S3_BUCKET)
    k = Key(bucket)
    k.key = 'id-' + str(id)
    k.content_type = uploaded_file.content_type

    if hasattr(uploaded_file,'temporary_file_path'):
        k.set_contents_from_filename(uploaded_file.temporary_file_path())
    else:
        k.set_contents_from_string(uploaded_file.read())
    k.set_canned_acl('public-read')
    return k.generate_url(expires_in=0, query_auth=False)
Esempio n. 11
0
def write_to_s3(account_number, bucket_name, key, data, encrypt=True):
    """
    Use STS to write to an S3 bucket

    :param account_number:
    :param bucket_name:
    :param data:
    """
    conn = assume_service(account_number, 's3')
    b = conn.get_bucket(bucket_name, validate=False)  # validate=False removes need for ListObjects permission

    k = Key(bucket=b, name=key)
    k.set_contents_from_string(data, encrypt_key=encrypt)
    k.set_canned_acl("bucket-owner-read")
Esempio n. 12
0
def uploadItemToS3(pathToImg,itemID):
    logging.info("Uploading %s (%s) to S3",itemID,pathToImg)

    bucketName = 'toast-artefacts'

    logging.info(s3Connection)

    bucket = s3Connection.get_bucket(bucketName)
    k = Key(bucket)
    k.key = 'generators/%s/%s.png' % (generatorName,itemID)
    k.set_contents_from_filename(pathToImg)
    k.set_canned_acl('public-read')

    return "https://s3-ap-southeast-2.amazonaws.com/%s/%s" % (bucketName,k.key)
Esempio n. 13
0
def s3upload(upload_bucket, filename, url):
    bucket = conn.get_bucket(upload_bucket)
    if bucket.get_key(filename) is None:
        print "Started downloading %s locally" % filename
        k = Key(bucket)
        k.key = filename
        file_contents = urllib2.urlopen(url)
        fp = cStringIO.StringIO(file_contents.read())
        print "Started uploading %s to s3" % filename
        k.set_contents_from_file(fp)
        k.set_canned_acl('public-read')
        print "Success uploading %s" % filename
    else:
        print "%s already uploaded" % filename
Esempio n. 14
0
 def upload_thumbnail(self, filename, key=None):
     
     k = Key(self.bucket_obj)
     
     #create key and upload
     k.key = key
     k.set_contents_from_filename(filename)
     #remove the original file 
     os.remove(filename)
     #make thumbnail public
     k.set_canned_acl('public-read')
     k.make_public()
     #create the url and return it
     url = "http://media.dellarobbiathailand.com.s3.amazonaws.com/"+key
     return url
Esempio n. 15
0
def write_to_s3(account_number, bucket_name, key, data, encrypt=True):
    """
    Use STS to write to an S3 bucket

    :param account_number:
    :param bucket_name:
    :param data:
    """
    conn = assume_service(account_number, 's3')
    b = conn.get_bucket(
        bucket_name, validate=False
    )  # validate=False removes need for ListObjects permission

    k = Key(bucket=b, name=key)
    k.set_contents_from_string(data, encrypt_key=encrypt)
    k.set_canned_acl("bucket-owner-read")
Esempio n. 16
0
def s3_upload(uploaded_file, id):
    s3conn = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)
    bucket = s3conn.get_bucket(S3_BUCKET)

    k = Key(bucket)
    k.key = 'id-' + str(id)
    k.content_type = uploaded_file.content_type

    if hasattr(uploaded_file, 'temporary_file_path'):
        k.set_contents_from_filename(uploaded_file.temporary_file_path())
    else:
        k.set_contents_from_string(uploaded_file.read())

    k.set_canned_acl('public-read')

    return k.generate_url(expires_in=0, query_auth=False)
Esempio n. 17
0
    def create(cls, author_id, description, file_name, tag_list, choice_data):
        # create the post first
        # upload image to aws s3

        new_post = cls(author_id=author_id, description=description)
        db.session.add(new_post)
        db.session.commit()

        if file_name:
            k1 = Key(bucket)
            k1.key = hashlib.sha512(str(new_post.post_id)).hexdigest()
            k1.set_contents_from_file(file_name)
            k1.set_canned_acl("public-read")
            new_post.file_name = k1.key
            db.session.commit()

        # if specified tags, create tags
        if tag_list:
            tag_names = tag_list.split(",")
            for tag_name in tag_names:
                tag = Tag.get_tag_by_name(tag_name)
                if not tag:  # create a new tag if tag doesn't already exist
                    tag = Tag.create(tag_name=tag_name)
                TagPost.create(tag_id=tag.tag_id, post_id=new_post.post_id)

        # create choices
        for choice_text, choice_file in choice_data:
            if choice_file:
                if allowed_file(choice_file.filename):
                    new_choice = Choice.create(choice_text=choice_text, post_id=new_post.post_id)

                    # upload image to aws s3
                    k = Key(bucket)
                    k.key = hashlib.sha512(str(new_choice.choice_id)).hexdigest()
                    k.set_contents_from_file(choice_file)
                    k.set_canned_acl("public-read")

                    # stored the hashed file id as url
                    new_choice.file_name = k.key
                    db.session.commit()

                else:
                    flash("the file type you uploaded is not valid")
            else:
                Choice.create(choice_text=choice_text, post_id=new_post.post_id)
        return new_post
def buildHostsFile(hostNames, awsKey, secretKey, stacks):
    print "Build /etc/hosts"
    print hostNames
    with open("/etc/hosts", "a") as hostsFile:
        for host in hostNames:
            hostIP = ""
            hostIP = str(host.split(".")[0])[3:].replace("-", ".")
            hostsFile.write(hostIP + "     " + host + "\n")

    bucketName = str(stacks) + "-ambari-repo"
    try:
        conn = boto.connect_s3(aws_access_key_id=awsKey, aws_secret_access_key=secretKey)
        bucket = conn.get_bucket(bucketName)
        print bucket
        k = Key(bucket)
        k.key = 'hosts'
        k.set_contents_from_filename("/etc/hosts")
        k.set_canned_acl('private')
    except Exception as e:
        pass
Esempio n. 19
0
def s3_upload_image(uploaded_file):
    s3conn = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)
    bucket = s3conn.get_bucket(S3_BUCKET)

    k = Key(bucket)

    # Unique id for every image uploaded
    # k.key = 'image-' + str(id)
    k.key = 'image-' + str(time.time()) + '-' + str(uuid.uuid4())
    print k.key ###
    k.content_type = uploaded_file.content_type

    if hasattr(uploaded_file, 'temporary_file_path'):
        k.set_contents_from_filename(uploaded_file.temporary_file_path())
    else:
        k.set_contents_from_string(uploaded_file.read())

    k.set_canned_acl('public-read')

    return k.generate_url(expires_in=0, query_auth=False)
def getandprocesstask(task_queue, process_queue, table, s3_bucket, no_of_images):
    is_dup=0
    if(no_of_images < 60):
    	try:
       	    msg=task_queue.get_messages()
            task_msg=msg[0].get_body()
            json_msg=json.loads(task_msg)
            task_queue.delete_message(msg[0])
            key = str(json_msg["task_id"])
            try:
                element = table.get_item(hash_key = key)
                is_dup=1;
            except Exception:
                is_dup=0

            if is_dup==0:
                element_data = {'Body': 'True'}
                element = table.new_item(hash_key = key, attrs = element_data)
                element.put()
                exe = 'wget ' + str(json_msg["task"])
                os.system(exe)
            else:
                print 'Duplicate task: ' + key
        except Exception as e:
            print "Listening";
            exec 'time.sleep(1)'
            getandprocesstask(task_queue, process_queue, table, s3_bucket, no_of_images)


    if(no_of_images==60):
        os.system("./setup.sh")
        tempcommand = 'mv output.mpg 0.mpg'
        os.system(tempcommand)
        k = Key(s3_bucket)
        k.Key = "0.mpg"
        k.set_contents_from_filename("0.mpg")
        k.set_canned_acl('public-read')
        video_url = k.generate_url(0, query_auth=False, force_http=True)
        processtask(0, process_queue, video_url)
	tempcommand = 'rm -rf 0.mpg'
        os.system(tempcommand)
Esempio n. 21
0
def buildHostsFile(hostNames, awsKey, secretKey, stacks):
    print "Build /etc/hosts"
    print hostNames
    with open("/etc/hosts", "a") as hostsFile:
        for host in hostNames:
            hostIP = ""
            hostIP = str(host.split(".")[0])[3:].replace("-", ".")
            hostsFile.write(hostIP + "     " + host + "\n")

    bucketName = str(stacks) + "-ambari-repo"
    try:
        conn = boto.connect_s3(aws_access_key_id=awsKey,
                               aws_secret_access_key=secretKey)
        bucket = conn.get_bucket(bucketName)
        print bucket
        k = Key(bucket)
        k.key = 'hosts'
        k.set_contents_from_filename("/etc/hosts")
        k.set_canned_acl('private')
    except Exception as e:
        pass
Esempio n. 22
0
def uploadToS3(aws_access_key_id,
               aws_secret_access_key,
               file,
               bucket,
               key,
               callback=None,
               md5=None,
               reduced_redundancy=False,
               content_type=None):

    logger = logging.getLogger(config["logname"])
    logger.info('Starting upload.')

    try:
        size = os.fstat(file.fileno()).st_size
    except:
        # Not all file objects implement fileno(),
        # so we fall back on this
        file.seek(0, os.SEEK_END)
        size = file.tell()

    conn = boto.connect_s3(aws_access_key_id, aws_secret_access_key)
    bucket = conn.get_bucket(bucket, validate=True)
    k = Key(bucket)
    k.key = key
    if content_type:
        k.set_metadata('Content-Type', content_type)
    sent = k.set_contents_from_file(file,
                                    cb=callback,
                                    md5=md5,
                                    reduced_redundancy=reduced_redundancy,
                                    rewind=True)

    file.seek(0)
    k.set_canned_acl('public-read')

    if sent == size:
        return True
    return False
Esempio n. 23
0
def imageProcess(cID, jID):
    try:
        os.system("./image.sh")
        file_name = str(cID) + str(jID) +".mpg"
        tempcommand = 'mv output.mpg ' + file_name
        os.system(tempcommand)
        print 'Uploading File'
        k = Key(bucket)
        k.Key = file_name
        k.set_contents_from_filename(file_name)
        k.set_canned_acl('public-read')
        url = k.generate_url(0, query_auth=False, force_http=True)
        print url
        m1 = Message()
        j = responseMessage(cID, jID, url)
        m1.set_body(j)
        processed_queue.write(m1)
        tempcommand = 'rm -rf ' + file_name
        os.system(tempcommand)
        global  imageCount
        imageCount = 0
    except Exception as e:
        print 'Interrupted' + str(e)
        imageCount = 0
Esempio n. 24
0
def uploadtoAMZN():

    # Upload files to Amazon S3
    # Change 'public-read' to 'private' if you want to manually set ACLs
    conn = boto.connect_s3()
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    k.key = path.basename(filename)
    k.set_contents_from_filename(filename)
    k.set_canned_acl('public-read')

    if oggexists:
        k.key = path.basename(OggFilename)
        k.set_contents_from_filename(OggFilename)
        k.set_canned_acl('public-read')

    k.key = path.basename(iTunesFile)
    k.set_contents_from_filename(iTunesFile)
    k.set_canned_acl('public-read')
Esempio n. 25
0
 def upload(self, local_copy, obj):
     k = Key(self.bucket)
     k.key = obj.name
     k.set_contents_from_filename(local_copy.path, obj.headers)
     k.set_canned_acl('public-read', None)
Esempio n. 26
0
 def test_set_object_acl(self):
     bucket = self.conn.create_bucket(self.bucket_name)
     k = Key(bucket)
     k.key = self.key_name
     k.set_canned_acl('public-read')
     self.assertEqual(k.get_acl().to_xml(), self.prAcl(self.user1))
# Connect to bucket
conn = S3Connection(access_key, secret_key)
bucket = conn.get_bucket(bucket_name)

qsa_urls = [] 
uploadFileNames = []

# Gather files to upload from source directory
for (sourceDir, dirname, filename) in os.walk(sourceDir):
    uploadFileNames.extend(filename)
    break

# Upload to S3
print 'Uploading ngram files to Amazon S3 bucket %s' % bucket_name
for filename in uploadFileNames:
    sourcepath = os.path.join(sourceDir + filename)
    destpath = os.path.join(destDir, filename)
     
    # Create new key in bucket and upload file to path
    k = Key(bucket)
    k.key = destpath
    k.set_contents_from_filename(sourcepath)
    
    # Make access to file private unless signature link is used
    k.set_canned_acl('private')
    qsa_urls.append(k.generate_url(link_expiration_time))

print 'Upload complete. Signature URLs for files: '
for url in qsa_urls:
    print url
Esempio n. 28
0
def edit_charge(charge_id):
    """ For updating charge information """ 
    charge_name = request.form.get('chargename')    
    date = request.form.get('date')
    descript = request.form.get('description')
    payer = request.form.get('whopaid')             
    currency = request.form.get('currency')
    paid = request.form.get('totalamt')
    user_amt = request.form.get('your-amt')         
    part_amt = request.form.get('part-amt')
    user_tot = request.form.get('your-total')
    part_tot = request.form.get('part-total')
    new_file = request.files.getlist('fileupload2')
    c_tags = request.form.get('hidden-tags')

    user = User.query.filter_by(user_id=session['user_id']).one()
    charge = Charge.query.filter_by(charge_id=charge_id).one()
    partner = User.query.filter(User.partner_id==user.partner_id, User.user_id != user.user_id).first()
    tags_available = Tag.query.filter(Tag.charges.any(charge_id=charge_id)).all()
    tags = Tag.query.filter_by(partner_id=user.partner_id).all()

    #upload any new files to s3
    for file in new_file:
        if file.mimetype == 'application/octet-stream':
            continue
        else:
            # check if uploaded file is one of the allowed types/extensions & make it secure
            if file and sep.allowed_file(file.filename):
                filename = secure_filename(file.filename)
                new_file = Files(file_name=filename,
                                 charge_id=charge_id)
                db.session.add(new_file)
                db.session.commit()
                k = Key(bucket)
                k.key = hashlib.sha512(str(new_file.file_id)).hexdigest()
                k.set_contents_from_file(file)
                k.set_canned_acl('public-read')
            else:
                flash("Invalid file type. The following are valid file types: txt, png, jpg, jpeg, gif, tiff")
                path = '/charge_edit_view/' + str(charge_id)
                return redirect(path)

    if payer == "youpaid":
        charge.payer = user.user_id
        charge.amt_owed = part_tot
    if payer == "partnerpaid":
        charge.payer = partner.user_id
        charge.amt_owed = user_tot
    if date:
        tran_date = datetime.strptime(date, "%m/%d/%Y")
        if tran_date != charge.transaction_datetime:
            charge.transaction_datetime = tran_date
    if charge_name != charge.charge_name:
        charge.charge_name = charge_name
    if paid != charge.amount:
        charge.amount = paid
    if currency != charge.currency_id: # TODO: UPDATE AFTER ADDING CURR CONVERSION
        charge.currency_id = currency
    if paid != charge.def_curr_amount: # TODO: UPDATE AFTER ADDING CURR CONVERSION
        charge.def_curr_amount = paid
    if descript != charge.description:
        charge.description = descript
    
    # update entries in tagscharges table - # TODO: need to update to use sets - don't need to overwrite every time
    charge.tags = []
    if c_tags:
        c_tags=c_tags.split(",")
        for c_tag in c_tags:
            c_tag_id = Tag.query.filter_by(tag_name=c_tag, 
                                           partner_id=user.partner_id).one()
            charge.tags.append(c_tag_id)
            db.session.commit()
    db.session.add(charge)
    db.session.commit()
    db.session.refresh(charge)
    return redirect("/dashboard")
Esempio n. 29
0
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, PROJECT_ROOT)

FILE_NAME = 'latest_upload.dump'

# Full filesystem path to the project.
from myproject.settings_local_dev import AWS_SECRET_ACCESS_KEY, AWS_ACCESS_KEY_ID

conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket_name = 'map-db-backup'

bucket = conn.create_bucket(bucket_name)

testfile = os.path.join(PROJECT_ROOT, FILE_NAME)

print 'Uploading %s to Amazon S3 bucket %s' % (testfile, bucket_name)


def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()


k = Key(bucket)
k.key = FILE_NAME + '_' + str(datetime.datetime.now().microsecond)
k.set_contents_from_filename(testfile, cb=percent_cb, num_cb=10)
k.set_canned_acl('public-read')

print '\n\nFile URL:\n\nhttps://s3.amazonaws.com/map-db-backup/%s' % k.key
Esempio n. 30
0
 def upload(self, local_copy, obj):
     k = Key(self.bucket)
     k.key = obj.name
     k.set_contents_from_filename(local_copy.path, obj.headers)
     k.set_canned_acl('public-read', None)
Esempio n. 31
0
def snail_mail_bulk_pdf_task(pdf_name, get, **kwargs):
    """Save a PDF file for all open snail mail tasks"""
    # pylint: disable=too-many-locals
    # pylint: disable=unused-argument
    cover_info = []
    bulk_merger = PdfFileMerger(strict=False)

    snails = SnailMailTaskFilterSet(
        get,
        queryset=SnailMailTask.objects.filter(resolved=False).order_by(
            'communication__foia__agency').preload_pdf(),
    ).qs

    blank_pdf = FPDF()
    blank_pdf.add_page()
    blank = StringIO(blank_pdf.output(dest='S'))
    for snail in snails:
        # generate the pdf and merge all pdf attachments
        pdf = SnailMailPDF(snail.communication, snail.category, snail.amount)
        pdf.generate()
        single_merger = PdfFileMerger(strict=False)
        single_merger.append(StringIO(pdf.output(dest='S')))
        files = []
        for file_ in snail.communication.files.all():
            if file_.get_extension() == 'pdf':
                try:
                    single_merger.append(file_.ffile)
                    files.append((file_, 'attached'))
                except (PdfReadError, ValueError):
                    files.append((file_, 'error'))
            else:
                files.append((file_, 'skipped'))
        single_pdf = StringIO()
        try:
            single_merger.write(single_pdf)
        except PdfReadError:
            cover_info.append((snail, None, files))
            continue
        else:
            cover_info.append((snail, pdf.page, files))

        # attach to the mail communication
        mail, _ = MailCommunication.objects.update_or_create(
            communication=snail.communication,
            defaults={
                'to_address': snail.communication.foia.address,
                'sent_datetime': timezone.now(),
            })
        single_pdf.seek(0)
        mail.pdf.save(
            '{}.pdf'.format(snail.communication.pk),
            ContentFile(single_pdf.read()),
        )

        # append to the bulk pdf
        single_pdf.seek(0)
        bulk_merger.append(single_pdf)
        # ensure we align for double sided printing
        if PdfFileReader(single_pdf).getNumPages() % 2 == 1:
            blank.seek(0)
            bulk_merger.append(blank)

    # preprend the cover sheet
    cover_pdf = CoverPDF(cover_info)
    cover_pdf.generate()
    if cover_pdf.page % 2 == 1:
        cover_pdf.add_page()
    bulk_merger.merge(0, StringIO(cover_pdf.output(dest='S')))

    bulk_pdf = StringIO()
    bulk_merger.write(bulk_pdf)
    bulk_pdf.seek(0)

    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
    key = Key(bucket)
    key.key = pdf_name
    key.set_contents_from_file(bulk_pdf)
    key.set_canned_acl('public-read')
Esempio n. 32
0
version_key = Key(bucket)
version_key.key = 'version'
if version_key.exists() :
  version = int(version_key.get_contents_as_string())
  version += 1
else :
  version = 0
  
version_key.set_contents_from_string(str(version))

# upload the files
js_key = Key(bucket)
js_key.key = str(version) + '.js'
js_key.set_metadata('Cache-Control', 'max-age=31536000')
js_key.set_contents_from_filename("logic-compiled.js")
js_key.set_canned_acl('public-read')

css_key = Key(bucket)
css_key.key = str(version) + '.css'
css_key.set_metadata('Cache-Control', 'max-age=31536000')
css_key.set_contents_from_filename("combined.css")
css_key.set_canned_acl('public-read')

f = open("logic-compiled.html")
html = ''.join(f.readlines())
html = html.replace('COMPILEDCSS', str(version) + '.css')
html = html.replace('COMPILEDJS', str(version) + '.js')
html_key = Key(bucket)
html_key.key = 'logic.html'
html_key.set_metadata('Content-Type', 'text/html')
html_key.set_metadata('Cache-Control', 'max-age=60');
Esempio n. 33
0
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, PROJECT_ROOT)

FILE_NAME = 'latest_upload.dump'

# Full filesystem path to the project.
from myproject.settings_local_dev import AWS_SECRET_ACCESS_KEY, AWS_ACCESS_KEY_ID


conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket_name = 'map-db-backup'

bucket = conn.create_bucket(bucket_name)

testfile = os.path.join(PROJECT_ROOT, FILE_NAME)

print 'Uploading %s to Amazon S3 bucket %s' % (testfile, bucket_name)


def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()


k = Key(bucket)
k.key = FILE_NAME + '_' + str(datetime.datetime.now().microsecond)
k.set_contents_from_filename(testfile, cb=percent_cb, num_cb=10)
k.set_canned_acl('public-read')

print '\n\nFile URL:\n\nhttps://s3.amazonaws.com/map-db-backup/%s' % k.key
Esempio n. 34
0
 def test_set_object_acl(self):
     bucket = self.conn.create_bucket(self.bucket_name)
     k = Key(bucket)
     k.key = self.key_name
     k.set_canned_acl('public-read')
     self.assertEqual(k.get_acl().to_xml(), self.prAcl(self.user1))
Esempio n. 35
0
def add_charge():
    """ View to add a new charge """

    charge_name = request.form.get('chargename')    
    date = request.form.get('date')
    descript = request.form.get('description')
    payer = request.form.get('whopaid')             
    currency = request.form.get('currency')
    paid = request.form.get('totalamt')
    user_amt = request.form.get('your-amt')         
    part_amt = request.form.get('part-amt')
    user_tot = request.form.get('your-total')
    part_tot = request.form.get('part-total')
    files = request.files.getlist('fileupload')
    c_tags = request.form.get('hidden-tags')

    user = User.query.filter_by(user_id = session['user_id']).one()
    partner = User.query.filter(User.partner_id==user.partner_id, User.user_id != user.user_id).first()

    # convert payer to user_id; amt_owed is the amount owed to the payer
    if payer=="youpaid":
        payer_id = user.user_id
        amt_owed = part_tot
    else:
        payer_id = partner.user_id
        amt_owed = user_tot

    # convert date to datetime - set to datetime.now() if none
    if date:
        tran_date = datetime.strptime(date, "%m/%d/%Y")
    else:
        tran_date = datetime.now()
        
    new_charge = Charge(charge_name=charge_name,
                        payer=payer_id,
                        partner_id=user.partner_id,
                        amt_owed=amt_owed,
                        amount=paid,
                        currency_id=currency,
                        def_curr_amount=paid,
                        transaction_datetime=tran_date,
                        description=descript
                        )

    db.session.add(new_charge)
    db.session.commit()

    # create new rows in tagscharges table
    if c_tags:
        c_tags=c_tags.split(",")
        for c_tag in c_tags:
            c_tag_id = Tag.query.filter_by(tag_name=c_tag, 
                                           partner_id=user.partner_id).one()
            new_charge.tags.append(c_tag_id)
            db.session.commit()

    #upload documents to s3
    for file in files:
        if file.mimetype == 'application/octet-stream':
            continue
        else:        
        # check if uploaded file is one of the allowed types/extensions & make it secure
            if file and sep.allowed_file(file.filename):
                filename = secure_filename(file.filename)
                new_file = Files(file_name=filename,
                                 charge_id=new_charge.charge_id)
                db.session.add(new_file)
                db.session.commit()
                k = Key(bucket)
                k.key = hashlib.sha512(str(new_file.file_id)).hexdigest()
                k.set_contents_from_file(file)
                k.set_canned_acl('public-read')
            else:
                flash("Invalid file type. The following are valid file types: txt, png, jpg, jpeg, gif, tiff")
                return redirect("/charge")
    return redirect("/dashboard")