def get_s3_url(self): import secrets import S3 import defaults gen = S3.QueryStringAuthGenerator(secrets.AWS_ID, secrets.AWS_SECRET_KEY) url = gen.get(defaults.bucket, self.get_real_name()) return url
def setUp(self): self.generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) if (self.generator.is_secure == True): self.connection = httplib.HTTPSConnection( self.generator.server_name) else: self.connection = httplib.HTTPConnection( self.generator.server_name)
def list_s3(request): """ List Amazon S3 bucket contents """ if S3 is not None: conn = S3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) generator = S3.QueryStringAuthGenerator(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, calling_format=S3.CallingFormat.VANITY) generator.set_expires_in(300) bucket_entries = conn.list_bucket(settings.AWS_BUCKET_NAME).entries entries = [] for entry in bucket_entries: entry.s3url = generator.get(settings.AWS_BUCKET_NAME, entry.key) entries.append(entry) return direct_to_template(request, 'export/list_s3.html', {'object_list': entries, 's3support': True}) else: return direct_to_template(request, 'export/list_s3.html', {'object_list': [], 's3support': False})
def files(request, project_name): """Files for a project. Shows the files uploaded for a project. Actions available: Add files: Owner Participant """ project = get_project(request, project_name) gen = S3.QueryStringAuthGenerator(secrets.AWS_ID, secrets.AWS_SECRET_KEY) addfileform = bforms.AddFileForm(project=project, user=request.user) if request.method == 'POST': if request.POST.has_key('Addfile'): addfileform = bforms.AddFileForm(project, request.user, request.POST, request.FILES) if addfileform.is_valid(): addfileform.save() return HttpResponseRedirect('.') if request.POST.has_key('fileid'): fileid = int(request.POST['fileid']) file = ProjectFile.objects.get(project=project, id=fileid) conn = S3.AWSAuthConnection(secrets.AWS_ID, secrets.AWS_SECRET_KEY) for revision in file.projectfileversion_set.all(): conn.delete(defaults.bucket, revision.revision_name) file.delete() payload = locals() return render(request, 'project/files.html', payload)
import time import sys AWS_ACCESS_KEY_ID = '<INSERT YOUR AWS ACCESS KEY ID HERE>' AWS_SECRET_ACCESS_KEY = '<INSERT YOUR AWS SECRET ACCESS KEY HERE>' # remove these next two lines when you've updated your credentials. #print "update s3-driver.py with your AWS credentials" #sys.exit(); # convert the bucket to lowercase for vanity domains # the bucket name must be lowercase since DNS is case-insensitive BUCKET_NAME = AWS_ACCESS_KEY_ID.lower() + '-test-bucket' KEY_NAME = 'test-key' conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) # Check if the bucket exists. The high availability engineering of # Amazon S3 is focused on get, put, list, and delete operations. # Because bucket operations work against a centralized, global # resource space, it is not appropriate to make bucket create or # delete calls on the high availability code path of your application. # It is better to create or delete buckets in a separate initialization # or setup routine that you run less often. if (conn.check_bucket_exists(BUCKET_NAME).status == 200): print '----- bucket already exists! -----' else: print '----- creating bucket -----' print conn.create_located_bucket(BUCKET_NAME, S3.Location.DEFAULT).message # to create an EU bucket
def setUp(self): self.generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, False) self.connection = httplib.HTTPConnection(self.generator.server_name)