def upload_to_s3(csv_file_contents, bucket_name): # Not currently working bucket_creds = { "access_key_id": "", "additional_buckets": [], "bucket": "", "region": "", "secret_access_key": "" } vcap_services = os.getenv("VCAP_SERVICES") bucket_creds = json.loads(vcap_services) connection = boto.s3.connect_to_region( bucket_creds["region"], aws_access_key_id=bucket_creds["access_key_id"], aws_secret_access_key=bucket_creds["secret_access_key"], is_secure=True ) bucket = connection.get_bucket(bucket_creds["bucket"]) key = Key(bucket=bucket, name=bucket_name) f = StringIO(csv_file_contents) try: key.send_file(f) return "success" except: return "failed"
def put(self, s3_path, local_path): '''Put file from local path to s3 path''' if s3_path is None: s3_path = '' full_key = os.path.join(self.s3_key_prefix, s3_path) key = Key(self.bucket) key.key = full_key f = open(local_path, 'r') key.send_file(f) f.close()
def upload_to_s3(csv_file_contents, bucket_name): vcap_services = os.getenv("VCAP_SERVICES") vcap_services = json.loads(vcap_services) bucket = vcap["s3"][0]["credentials"]["bucket"] access_key_id = vcap["s3"][0]["credentials"]["access_key_id"] region = vcap["s3"][0]["credentials"]["region"] secret_access_key = vcap["s3"][0]["credentials"]["secret_access_key"] connection = boto.s3.connect_to_region(region, access_key_id, secret_access_key) bucket = connection.get_bucket(bucket) key = Key(bucket=bucket, name=bucket_name) f = StringIO(csv_file_contents) try: key.send_file(f) return "success" except: return "failed"
def test_dangling_delete_s3(self): # create bucket and send file in s3 conn = boto.connect_s3() conn.create_bucket('test') bucket = conn.get_bucket('test') prefix = get_prefix(self.model) for f in self.files: k = Key(bucket, '%s/%s' % (f['prefix'], f['file_path'])) with open(f['file_path'], 'rb') as test_file: k.send_file(test_file) # create 2 files in db self.factory.create_batch(2) result = omaha.limitation.handle_dangling_files( self.model, prefix, self.file_fields) for _file in result['data']: self.assertFalse( _file in [key.name for key in conn.get_bucket('test').get_all_keys()])
def Upload_Object(self, path, bucket): """ Uploads given file into bucket _path_: local path to file to upload. if _path_ is a directory, then an exception will be raised _bucket_: target bucket """ if os.path.isdir(path): raise NotImplementedError("Can't upload directories") fp = open(path, 'r') s3bucket = self._conn.get_bucket(bucket) s3obj = Key(s3bucket) s3obj.key = path s3obj.size = os.stat(path).st_size logger.debug("Calling send_file for object %s" % path) s3obj.send_file(fp)
def test_dangling_delete_s3(self): # create bucket and send file in s3 conn = boto.connect_s3() conn.create_bucket('test') bucket = conn.get_bucket('test') prefix = get_prefix(self.model) for f in self.files: k = Key(bucket, '%s/%s' % (f['prefix'], f['file_path'])) with open(f['file_path']) as test_file: k.send_file(test_file) # create 2 files in db self.factory.create_batch(2) result = omaha.limitation.handle_dangling_files( self.model, prefix, self.file_fields ) for _file in result['data']: self.assertFalse( _file in [key.name for key in conn.get_bucket('test').get_all_keys()] )
def uploadImage(imageFile): try: # create unqiue name and get extension unique_name = str(uuid.uuid1()) fileName, fileExtension = os.path.splitext(imageFile.name) # validate fileExtension if not re.match('\.(gif|png|jpg|jpeg)$', fileExtension.lower()): raise Exception # get bucket and upload image file bucket = getS3Bucket() key = Key(bucket) key.key = unique_name + fileExtension key.size = imageFile.size # size must be set in order for this to work key.send_file(imageFile) return True, unique_name except Exception, e: print e return False, None
def sendFile(self, key, file): #TODO: get this working. k = Key(self.BUCKET) k.key = key k.send_file(file)