Exemple #1
0
def S3Upload(upload_name, fileObj, bucket_name=None):
    print 'check upload args'
    if not bucket_name:
        raise ValueError('No Bucket Name')

    print 'conn'
    conn = S3.AWSAuthConnection(config.AWS_ACCESS_KEY_ID,
                                config.AWS_SECRET_ACCESS_KEY)

    content_type = mimetypes.guess_type(upload_name)[0]
    if not content_type:
        content_type = 'text/plain'
    print 'conn put'
    st = conn.put(bucket_name, upload_name, S3.S3Object(fileObj), {
        'x-amz-acl': 'public-read',
        'Content-Type': content_type
    })
    print 'end conn put'
    resp = st.http_response
    print 'resp', resp, resp.status
    if 200 != resp.status:
        print 'upload failed'
        print resp.msg
        return False
    print 'upload successed'
    return True
 def connect_s3(self):
     if tornado.options.options.shunt_s3:
         logging.info('skipping s3 connection --shunt-s3')
         return
     aws_access_key_id = _utf8(tornado.options.options.aws_key)
     aws_secret_access_key = _utf8(tornado.options.options.aws_secret)
     self.conn = S3.AWSAuthConnection(aws_access_key_id,
                                      aws_secret_access_key)
Exemple #3
0
def upload(filename):
    conn = S3.AWSAuthConnection(config.S3_ACCESS_KEY, config.S3_SECRET)
    result = conn.check_bucket_exists(config.S3_BUCKET)
    if result.status != 200:
        result = conn.create_located_bucket(config.S3_BUCKET, S3.Location.DEFAULT)

    assert 200 == conn.put(config.S3_BUCKET, os.path.basename(filename), read_file(filename)).http_response.status

    print "File %s successfully backed up to S3 (with same filename)." % filename
Exemple #4
0
def upload_s3(fname, mimetype, uname=''):
	if not uname:
		uname = os.path.basename(fname)

	filedata = open(fname, 'rb').read()

	conn = S3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
	conn.put(settings.BUCKET_NAME, uname, S3.S3Object(filedata),
		{'x-amz-acl': 'public-read', 'Content-Type': mimetype})
Exemple #5
0
def list_files():
    conn = S3.AWSAuthConnection(config.S3_ACCESS_KEY, config.S3_SECRET)
    result = conn.check_bucket_exists(config.S3_BUCKET)
    if result.status != 200:
        result = conn.create_located_bucket(config.S3_BUCKET,
                                            S3.Location.DEFAULT)

    result = conn.list_bucket(config.S3_BUCKET)
    assert 200 == result.http_response.status
    print "Size\t\tKey"
    for entry in result.entries:
        print "%s\t%s" % (entry.size, entry.key)
Exemple #6
0
def retrieve(filename):
    conn = S3.AWSAuthConnection(config.S3_ACCESS_KEY, config.S3_SECRET)
    assert 200 == conn.check_bucket_exists(config.S3_BUCKET).status

    result = conn.get(config.S3_BUCKET, filename)
    assert 200 == result.http_response.status

    f = open(filename, "w")
    f.write(result.object.data)
    f.close()

    print "File %s successfully retrieved (with same filename)." % filename
Exemple #7
0
 def __init__(self, access_key_id, secret_access_key, bucket,
              key_length=8, publish_domain=None, http=None,
              generate_key=generate_random_word):
     if publish_domain is None:
         publish_domain = '%s.%s' % (bucket, S3.DEFAULT_HOST)
     if http is None:
         http = httplib2.Http()
     self.conn = S3.AWSAuthConnection(access_key_id, secret_access_key)
     self.bucket = bucket
     self.key_length = key_length
     self.publish_domain = publish_domain
     self.http = http
     self.generate_key = generate_key
     if self.conn.check_bucket_exists(bucket).status == 404:
         self.conn.create_located_bucket(bucket, S3.Location.DEFAULT)
def update_s3():
    conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    for line in sys.stdin:
        filename = os.path.normpath(line[:-1])
        if filename == '.' or not os.path.isfile(filename):
            continue  # Skip this, because it's not a file.
        print "Uploading %s" % filename
        filedata = open(filename, 'rb').read()
        content_type = mimetypes.guess_type(filename)[0]
        if not content_type:
            content_type = 'text/plain'
        conn.put(BUCKET_NAME, filename, S3.S3Object(filedata), {
            'x-amz-acl': 'public-read',
            'Content-Type': content_type
        })
Exemple #9
0
def list_s3(request):
    """
    List Amazon S3 bucket contents

    """
    if S3 is not None:
        conn = S3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        generator = S3.QueryStringAuthGenerator(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, calling_format=S3.CallingFormat.VANITY)
        generator.set_expires_in(300)
        bucket_entries = conn.list_bucket(settings.AWS_BUCKET_NAME).entries
        entries = []
        for entry in bucket_entries:
            entry.s3url = generator.get(settings.AWS_BUCKET_NAME, entry.key)
            entries.append(entry)
        return direct_to_template(request, 'export/list_s3.html', {'object_list': entries, 's3support': True})
    else:
        return direct_to_template(request, 'export/list_s3.html', {'object_list': [], 's3support': False})
Exemple #10
0
 def __init__(self, upload_to='', stored_file_implementation=StoredFile):
     # try to work around bug S3 code which uses bad names of days
     # http://code.google.com/p/boto/issues/detail?id=140
     # but workaround doesn't work :(
     #import locale
     #    locale.setlocale(locale.LC_TIME, 'en_US.utf8')
     #    print 'create S3 storage'
     import settings
     import S3
     self.upload_to = upload_to
     conn = S3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID,
                                 settings.AWS_SECRET_ACCESS_KEY)
     #        _generator = S3.QueryStringAuthGenerator( settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY )
     if (conn.check_bucket_exists(settings.AWS_BUCKET_NAME).status == 200):
         pass
     else:
         conn.create_located_bucket(settings.AWS_BUCKET_NAME,
                                    settings.AWS_LOCATION).message
Exemple #11
0
def publish(filepath, s3bucket, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY,
            version):
    filename = filepath.split("/")[-1]
    s3key = "/".join([p['release.type'], p['project.key'], filename])

    print "Reading in content from %s" % filepath
    filedata = open(filepath, "rb").read()

    filehash = _sha(filedata).hexdigest()

    print "Preparing to upload %s to %s/%s" % (filename, s3bucket, s3key)

    content_type = mimetypes.guess_type(filename)[0]
    if content_type is None:
        content_type = 'text/plain'

    print "File appears to be %s" % content_type

    print "Connecting to S3..."
    conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

    print "Checking if bucket %s exists..." % s3bucket
    check = conn.check_bucket_exists(s3bucket)
    if (check.status == 200):
        print "Uploading %s to %s/%s" % (filename, s3bucket, s3key)
        print conn.put(
            s3bucket, s3key, S3.S3Object(filedata), {
                'Content-Type': content_type,
                'x-amz-acl': 'public-read',
                'x-amz-meta-project.name': 'Spring Python',
                'x-amz-meta-release.type': p['release.type'],
                'x-amz-meta-bundle.version': version,
                'x-amz-meta-package.file.name': filename
            }).message

        print "Uploading SHA1 digest to %s/%s" % (s3bucket, s3key + '.sha1')
        print conn.put(s3bucket, s3key + '.sha1',
                       S3.S3Object(filehash + ' ' + filename + "\n"), {
                           'Content-Type': content_type,
                           'x-amz-acl': 'public-read'
                       }).message
    else:
        print "Error code %s: Unable to publish" % check.status
Exemple #12
0
 def save(self):
     conn = S3.AWSAuthConnection(secrets.AWS_ID, secrets.AWS_SECRET_KEY)
     uploaded_filename = self.cleaned_data['filename'].name
     filename = '/%s/%s' % (self.project, uploaded_filename)
     content = self.cleaned_data['filename'].read()
     try:
         old_file = self.project.projectfile_set.get(
             filename=uploaded_filename)
         versions = old_file.projectfileversion_set.all().count()
         split_f = filename.rsplit('.', 1)
         name_no_ext = ''.join(split_f[:-1])
         filename = '%s-%s.%s' % (name_no_ext, versions + 1, split_f[-1])
         response = conn.put(defaults.bucket, filename, content)
         saved_file = old_file
         saved_file_revision = ProjectFileVersion(file=saved_file,
                                                  revision_name=filename,
                                                  user=self.user,
                                                  size=len(content))
         saved_file_revision.save()
         saved_file.current_revision = saved_file_revision
         saved_file.total_size += saved_file_revision.size
         saved_file.save()
     except ProjectFile.DoesNotExist, e:
         split_f = filename.rsplit('.', 1)
         name_no_ext = ''.join(split_f[:-1])
         filename = '%s-%s.%s' % (name_no_ext, 1, split_f[-1])
         response = conn.put(defaults.bucket, filename, content)
         saved_file = ProjectFile(project=self.project,
                                  filename=uploaded_filename,
                                  total_size=0)
         saved_file.save()
         saved_file_revision = ProjectFileVersion(file=saved_file,
                                                  revision_name=filename,
                                                  user=self.user,
                                                  size=len(content))
         saved_file_revision.save()
         saved_file.current_revision = saved_file_revision
         saved_file.total_size = saved_file_revision.size
         saved_file.save()
Exemple #13
0
def push_media_to_s3(subpath, content_type):
    """
    Upload a subpath of the media directory to S3.
    """
    if not settings.USE_S3:
        return
    import S3
    conn = S3.AWSAuthConnection(settings.S3_ACCESS_KEY, settings.S3_SECRET_KEY)
    localPath = os.path.join(settings.MEDIA_ROOT, subpath)
    obj = S3.S3Object(file(localPath).read())
    tries = 5
    while True:
        try:
            conn.put(settings.S3_BUCKET, settings.S3_PATH + subpath, obj, {
                'Content-Type': content_type,
                'x-amz-acl': 'public-read'
            })
        except:
            tries -= 1
            if not tries:
                raise
        else:
            return
Exemple #14
0
def files(request, project_name):
    """Files for a project. Shows the files uploaded for a project.
    Actions available:
    Add files:  Owner Participant
    """
    project = get_project(request, project_name)
    gen = S3.QueryStringAuthGenerator(secrets.AWS_ID, secrets.AWS_SECRET_KEY)
    addfileform = bforms.AddFileForm(project=project, user=request.user)
    if request.method == 'POST':
        if request.POST.has_key('Addfile'):
            addfileform = bforms.AddFileForm(project, request.user,
                                             request.POST, request.FILES)
            if addfileform.is_valid():
                addfileform.save()
                return HttpResponseRedirect('.')
        if request.POST.has_key('fileid'):
            fileid = int(request.POST['fileid'])
            file = ProjectFile.objects.get(project=project, id=fileid)
            conn = S3.AWSAuthConnection(secrets.AWS_ID, secrets.AWS_SECRET_KEY)
            for revision in file.projectfileversion_set.all():
                conn.delete(defaults.bucket, revision.revision_name)
            file.delete()
    payload = locals()
    return render(request, 'project/files.html', payload)
Exemple #15
0
def export_to_s3(request):
    """
    Dump the database and upload the dump to Amazon S3

    """
    if request.method == 'POST':
        if settings.DATABASE_ENGINE == 'mysql':
            cmd = MYSQLDUMP_CMD % (settings.DATABASE_HOST, settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME)
        elif settings.DATABASE_ENGINE == 'sqlite3':
            cmd = SQLITE3DUMP_CMD % settings.DATABASE_NAME
        else:
            raise ImproperlyConfigured, "Sorry, django-export only supports mysql and sqlite3 database backends."
        stdin, stdout = os.popen2(cmd)
        stdin.close()
        file_name = 'dump_%s.sql.bz2' % time.strftime('%Y%m%d-%H%M')
        conn = S3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        res = conn.put(settings.AWS_BUCKET_NAME, file_name, S3.S3Object(stdout.read()), {'Content-Type': 'application/x-bzip2',})
        if res.http_response.status == 200:
            request.user.message_set.create(message="%s" % _(u"%(filename)s saved on Amazon S3") % {'filename': file_name})
        else:
            request.user.message_set.create(message="%s" % _(u"Upload failed with %(status)s") % {'status': res.http_response.status})
        stdout.close()
        return HttpResponseRedirect('/admin/')
    return direct_to_template(request, 'export/export.html', {'what': _(u'Export Database to S3'), 's3support': (S3 is not None), 's3': True})
Exemple #16
0
 def conn(self):
     try:
         return self._conn
     except AttributeError:
         self._conn = S3.AWSAuthConnection(opts.key, opts.secret)
         return self._conn
Exemple #17
0
from urllib2 import URLError
import os
import re
import random
import secret as s
import fileinput
import S3
import time
import threading
import sys
import pdb
import json
import mapreduce
from claim_from_body import claims_from_html2

conn = S3.AWSAuthConnection(s.amazon_access, s.amazon_secret)

num_threads = 200

urls = []
downloaded = {}


def download_urls():
    threads = []
    for num in range(0, num_threads):
        t = DownloadThread()
        t.start()
        threads.append(t)
    for t in threads:
        t.join()
Exemple #18
0
import S3

AWS_ACCESS_KEY_ID = 'AKIAI7R3ZZAEQ2GBDHUA'
AWS_SECRET_ACCESS_KEY = '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'

# for subdomains (bucket.s3.amazonaws.com),
# the bucket name must be lowercase since DNS is case-insensitive
# BUCKET_NAME = "%s-test-bucket" % AWS_ACCESS_KEY_ID.lower();
BUCKET_NAME = "spajic"

conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
response = conn.list_bucket(BUCKET_NAME)
print response.entries[0].key
print response.entries[1].key
print response.entries[2].key

text = 'this is a test'
key = 'example.txt'
response = conn.put(BUCKET_NAME, key, text)
Exemple #19
0
import S3
conn = S3.AWSAuthConnection('18K4VFS99J8R21G8TBR2', 'EVf+5YzNyDMMXU12VFUf3N+LrTTSU/iWsf81Sp/M')
conn.delete('s3map', 'lib/s3map-1.0.jar')
Exemple #20
0
 def setUp(self):
     self.conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)