Exemplo n.º 1
0
def store_uploaded_file(f, name):
    img_dir='/tmp/image/'
    if not os.path.exists(img_dir):
        os.mkdir(img_dir)
    file_name = id_generator()
    path = '%s%s' % (img_dir,file_name)
    with open(path , 'wb+') as destination:
        for chunk in f.chunks():
            destination.write(chunk)

    calling_format=boto.s3.connection.OrdinaryCallingFormat()
    connection = boto.s3.connection.S3Connection(aws_access_key_id=settings.IMAGECRUD['access_key'],
                      aws_secret_access_key=settings.IMAGECRUD['secret_key'],
                      is_secure=False,
                      host=settings.IMAGECRUD['s3_host'],
                      port=settings.IMAGECRUD['s3_port'],
                      calling_format=calling_format,
                      path=settings.IMAGECRUD['s3_path'])

    try:
        bucket = connection.get_bucket(settings.IMAGECRUD['img_bucket'])
    except:
        bucket = connection.create_bucket(settings.IMAGECRUD['img_bucket'])

    key_name= '%s.jpg' % id_generator()
    key = bucket.new_key(key_name)
    key.set_contents_from_filename(path)
    key.set_canned_acl('public-read')
    key.close()

    return 'http://%s:%s%s/%s/%s' % (settings.IMAGECRUD['s3_host'], settings.IMAGECRUD['s3_port'],settings.IMAGECRUD['s3_path'],settings.IMAGECRUD['img_bucket'],key_name)
Exemplo n.º 2
0
def store_uploaded_file(f, name):
    img_dir = '/tmp/image/'
    if not os.path.exists(img_dir):
        os.mkdir(img_dir)
    file_name = id_generator()
    path = '%s%s' % (img_dir, file_name)
    with open(path, 'wb+') as destination:
        for chunk in f.chunks():
            destination.write(chunk)

    calling_format = boto.s3.connection.OrdinaryCallingFormat()
    connection = boto.s3.connection.S3Connection(
        aws_access_key_id=settings.IMAGECRUD['access_key'],
        aws_secret_access_key=settings.IMAGECRUD['secret_key'],
        is_secure=False,
        host=settings.IMAGECRUD['s3_host'],
        port=settings.IMAGECRUD['s3_port'],
        calling_format=calling_format,
        path=settings.IMAGECRUD['s3_path'])

    try:
        bucket = connection.get_bucket(settings.IMAGECRUD['img_bucket'])
    except:
        bucket = connection.create_bucket(settings.IMAGECRUD['img_bucket'])

    key_name = '%s.jpg' % id_generator()
    key = bucket.new_key(key_name)
    key.set_contents_from_filename(path)
    key.set_canned_acl('public-read')
    key.close()

    return 'http://%s:%s%s/%s/%s' % (
        settings.IMAGECRUD['s3_host'], settings.IMAGECRUD['s3_port'],
        settings.IMAGECRUD['s3_path'], settings.IMAGECRUD['img_bucket'],
        key_name)
Exemplo n.º 3
0
def _upload_file_to_bucket(connection, filename, new_name):
    pathname = _generate_path_name_for(new_name)
    bucket = connection.get_bucket(BUCKET_NAME)
    key = boto.s3.key.Key(bucket)
    key.key = pathname
    key.set_contents_from_filename(filename)
    key.set_acl('public-read')
    return '{}/{}'.format(SERVER_URL, pathname)
Exemplo n.º 4
0
 def get_object(self, file_name):
     image = "my_image.jpeg"
     connection = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
                                  settings.AWS_SECRET_ACCESS_KEY)
     bucket = connection.get_bucket(self.BUCKETS.get('default'))
     # Get the Key object of the given key, in the bucket
     k = Key(bucket, file_name)
     # Get the contents of the key into a file
     k.get_contents_to_filename(image)
Exemplo n.º 5
0
 def __init__(self, bucket_name):
     region = "ap-south-1"
     connection = boto.s3.connect_to_region(region,
        aws_access_key_id = AWS_ACCESS_KEY_ID,
        aws_secret_access_key = AWS_SECRET_ACCESS_KEY,
        is_secure=True,
        calling_format = boto.s3.connection.OrdinaryCallingFormat())
     self.bucket = connection.get_bucket(bucket_name)
     self._base_url = "https://s3-%s.amazonaws.com/%s/" % (region, bucket_name)
Exemplo n.º 6
0
 def __init__(self, bucket_name):
     region = "ap-south-1"
     connection = boto.s3.connect_to_region(
         region,
         aws_access_key_id=AWS_ACCESS_KEY_ID,
         aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
         is_secure=True,
         calling_format=boto.s3.connection.OrdinaryCallingFormat())
     self.bucket = connection.get_bucket(bucket_name)
     self._base_url = "https://s3-%s.amazonaws.com/%s/" % (region,
                                                           bucket_name)
Exemplo n.º 7
0
    def delete(self, key):
        """
        :param key: The filename that was saved.
        :return: True on success and False on error
        """
        try:
            connection = boto.s3.connect_to_region(
                settings.AWS_LOCATION,
                aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
                aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)

            bucket = connection.get_bucket(self.BUCKETS.get('default'))
            # lets pour some data in balti ;)
            balti = connection.get_bucket(bucket)
            delete_content = Key(balti)
            delete_content.key = key
            balti.delete_key(delete_content)
            data = {'success': True, 'data': key}
        except Exception as e:
            data = {'success': False, 'data': key}
        return data
def data():
    try:
        data = request.form["data"]
        connection = boto.connect_s3()
        # update with your S3 bucket name here
        bucket_name = "test"
        bucket = connection.get_bucket(bucket_name, validate=False)
        key = Key(bucket)
        guid = uuid.uuid4()
        key.key = guid
        key.set_contents_from_string(data)
        key.make_public()
        return jsonify({"status": "success"}), 201
    except Exception as exception:
        return jsonify({"status": "error", "message": str(exception)}), 500
Exemplo n.º 9
0
def _file_already_exists_in_server(server, filename):
    '''
    Test if the filename already exists in the  S3 server.
    
    True if exits, false otherwise.
    '''
    pathname = _generate_path_name_for(filename)
    bucket_name = server.path
    #connection = boto.s3.connection.S3Connection()
    connection = boto.connect_s3(
        aws_access_key_id=_get_aws_access_key_id(),
        aws_secret_access_key=_get_aws_secret_access_key())
    bucket = connection.get_bucket(bucket_name)
    key = bucket.get_key(pathname)
    connection.close()
    return key != None
def image():
	try:
		print request.form
		data = request.form["data"]
		connection = boto.connect_s3()
		bucket_name = "shashin-test"
		bucket = connection.get_bucket(bucket_name, validate = False)
		key = Key(bucket)
		guid = uuid.uuid4()
		key.key = guid
		key.set_contents_from_string(data)
		key.make_public()
		return jsonify({"status" : "success", "guid" : guid}), 201
	except Exception as exception:
		print request.headers
		print str(exception)
		return jsonify({"status" : "error", "message" : str(exception)}), 500
Exemplo n.º 11
0
def image():
    try:
        print request.form
        data = request.form["data"]
        connection = boto.connect_s3()
        bucket_name = "shashin-test"
        bucket = connection.get_bucket(bucket_name, validate=False)
        key = Key(bucket)
        guid = uuid.uuid4()
        key.key = guid
        key.set_contents_from_string(data)
        key.make_public()
        return jsonify({"status": "success", "guid": guid}), 201
    except Exception as exception:
        print request.headers
        print str(exception)
        return jsonify({"status": "error", "message": str(exception)}), 500
Exemplo n.º 12
0
def make_case_insensitive(bucket, access, secret, key):
    """ Get filename permutations """
    global filenames
    filenames = []
    filename = os.path.basename(key)
    path = os.path.dirname(key)

    filename_permutations(filename)

    connection = boto.s3.connection.S3Connection(access, secret, True)
    b = connection.get_bucket(bucket)
    
    for fname in filenames:
        if fname == filename:
            continue
        
        k = b.new_key(os.path.join(path, fname))
        k.set_redirect(key)
Exemplo n.º 13
0
def _upload_file_to_bucket(server, filename):
    '''
    Upload the file to the bucket and returns the URL to serve that file.
    Using the server, upload filename to a bucket. The bucket
    is in server.path. After that, user server.url to generate
    the URL that will be used to server the image from now on
    and return that URL.
    '''
    _, filename_part = os.path.split(filename)
    pathname = _generate_path_name_for(filename_part)
    bucket_name = server.path
    #connection = boto.s3.connection.S3Connection()
    connection = boto.connect_s3(
        aws_access_key_id=_get_aws_access_key_id(),
        aws_secret_access_key=_get_aws_secret_access_key())
    bucket = connection.get_bucket(bucket_name)
    key = boto.s3.key.Key(bucket)
    key.key = pathname
    key.set_contents_from_filename(filename)
    key.set_acl('public-read')
    connection.close()
    return '{}/{}'.format(server.url, pathname)
Exemplo n.º 14
0
    def upload(self, image, key='image'):
        response = {'success': False, 'data': ''}
        try:
            # no host no is_secure as not using ssl
            connection = boto.s3.connect_to_region(
                'ap-south-1',
                aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
                aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
                calling_format=boto.s3.connection.OrdinaryCallingFormat())

            bucket = connection.get_bucket(self.BUCKETS.get('default'))
            k = Key(bucket)
            k.key = key
            try:
                k.set_contents_from_file(image.file)
                file_key = S3Upload.generate_uid_for_file()
                upload_content = bucket.new_key(file_key)
                upload_content.set_contents_from_file(image, rewind=True)
                upload_content.make_public()

            except:
                image = modify_image(image)
                client = boto3.client('s3')

                # boto3.resource('s3').ObjectAcl('bucket_name', 'object_key').put(ACL='public-read')
                file_key = client.put_object(
                    Body=image,
                    Bucket=self.BUCKETS.get('default'),
                    Key='Image.aws.txt',
                    ACL='public-read').get('ETag')[1:-2]
            return {
                'success': True,
                'data': settings.BUCKET_DOMAIN + file_key,
                'file_key': file_key
            }

        except Exception as e:
            # log the error in the critical section
            return response
Exemplo n.º 15
0
def delete(request, image_name):
    try:
        image=Image.objects.get(name=image_name)
        path = image.path
        token = path.split('/')
        key_name = token[len(token)-1]
        try:
            calling_format=boto.s3.connection.OrdinaryCallingFormat()
            connection = boto.s3.connection.S3Connection(aws_access_key_id=settings.IMAGECRUD['access_key'],
                      aws_secret_access_key=settings.IMAGECRUD['secret_key'],
                      is_secure=False,
                      host=settings.IMAGECRUD['s3_host'],
                      port=settings.IMAGECRUD['s3_port'],
                      calling_format=calling_format,
                      path=settings.IMAGECRUD['s3_path'])

            bucket = connection.get_bucket(settings.IMAGECRUD['img_bucket'])
            bucket.delete_key(key_name)
        except Exception, err:
            return HttpResponse(err, status=500)
        image.delete()
        return HttpResponse(status=200)
Exemplo n.º 16
0
def delete(request, image_name):
    try:
        image = Image.objects.get(name=image_name)
        path = image.path
        token = path.split('/')
        key_name = token[len(token) - 1]
        try:
            calling_format = boto.s3.connection.OrdinaryCallingFormat()
            connection = boto.s3.connection.S3Connection(
                aws_access_key_id=settings.IMAGECRUD['access_key'],
                aws_secret_access_key=settings.IMAGECRUD['secret_key'],
                is_secure=False,
                host=settings.IMAGECRUD['s3_host'],
                port=settings.IMAGECRUD['s3_port'],
                calling_format=calling_format,
                path=settings.IMAGECRUD['s3_path'])

            bucket = connection.get_bucket(settings.IMAGECRUD['img_bucket'])
            bucket.delete_key(key_name)
        except Exception, err:
            return HttpResponse(err, status=500)
        image.delete()
        return HttpResponse(status=200)
Exemplo n.º 17
0
    def load(self):
        "Long-lasting operation"
        accepted_file_formats = ('.jpg', '.png')
        connection = establish_connection_to_S3()
        self.bucket = connection.get_bucket(self.name)

        pickled_bucket = self.name + '.pickle'
        if os.path.isfile(pickled_bucket) and os.access(
                pickled_bucket, os.R_OK):
            print('Loading pickled bucket')
            with open(pickled_bucket, 'rb') as file_with_bucket:
                self.keys = pickle.load(file_with_bucket)
        else:
            for key in self.bucket:
                if key.name.endswith(accepted_file_formats):
                    self.keys.append(key)
            print('Saving pickled bucket')
            with open(pickled_bucket, 'wb') as file_with_bucket:
                pickle.dump(self.keys, file_with_bucket,
                            pickle.HIGHEST_PROTOCOL)

        self.loaded = True
        return self
Exemplo n.º 18
0
prefix = sys.argv[1]

# configuration
# create a `.dho_access` file in your home dir
# paste the access key on the first line
# paste the secret key on the second line
# paste the bucket name on the third line
f = open('%s/.dho_access' % expanduser("~"), 'r')
# required
dho_access = f.readline().strip()
dho_secret = f.readline().strip()
dho_bucket = f.readline().strip()

print 'Connecting to DreamObjects...'
connection = boto.connect_s3(aws_access_key_id=dho_access,
                             aws_secret_access_key=dho_secret,
                             host='objects-us-east-1.dream.io')

print 'Getting bucket %s...' % dho_bucket
bucket = connection.get_bucket(dho_bucket)

print 'Getting objects prefixed "%s"' % prefix
key_iter = bucket.list(prefix=prefix)

for key in key_iter:
    print key
    signed_url = key.generate_url(expires_in=60 * 60 * 24 * 14,
                                  query_auth=True,
                                  force_http=True)
    print signed_url
Exemplo n.º 19
0
import boto
import boto.s3
import boto.s3.connection
import os,sys,re
import utility

#The "set" content program for S3 using Python boto
#Mario Barrenechea - DSC Class, Fall 2012
connection = utility.get_s3_connection_from_env()
sysargs = len(sys.argv)
if sysargs >= 3 and sysargs <= 4:
	bucket_name = sys.argv[1]
	key_name = sys.argv[2]
	#If there does exist such a bucket instance...
	if utility.check_if_bucket_exists(connection, bucket_name) == True:
		bucket_instance = connection.get_bucket(bucket_name)
		bucket_key = bucket_instance.get_key(key_name)		
		#If there does exist such a bucket key...
		if utility.check_if_key_exists(bucket_instance, bucket_key.name) == True:
			#That means we should have <program name> <bucket> <key> 
			if sysargs == 3:
				print "[INFO]: We have <%s><%s><%s>" % (sys.argv[0], sys.argv[1], sys.argv[2])
				user_input = raw_input("Accepting input from stdin: ")
				bucket_key.set_contents_from_string(user_input)
				print "[OK]: The contents was successfully sent to %s:%s" % (bucket_instance, bucket_key)

			#That means we should have <program name> <bucket> <key> [file]
			elif sysargs == 4:
				print "[INFO]: We have <%s><%s><%s>[%s]" % (sys.argv[0], sys.argv[1], sys.argv[2], sys.argv[3])
				filename = sys.argv[3]
				if os.path.exists(filename) == True:
Exemplo n.º 20
0
def _file_already_exists_in_server(connection, filename):
    pathname = _generate_path_name_for(filename)
    bucket = connection.get_bucket(BUCKET_NAME)
    key = bucket.get_key(pathname)
    return key != None
Exemplo n.º 21
0
Prints duplicate keys in store __w__ directories
Check images are in order ie : 1.jpg, 2.jpg, 3.jpg, ......,
'''
import boto
import boto.s3.connection
from collections import Counter


connection = boto.s3.connect_to_region('ap-southeast-1',
					aws_access_key_id='AKIAI7CQBHPL42IO5NVA',
					aws_secret_access_key='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',
					is_secure=True,
					calling_format=boto.s3.connection.OrdinaryCallingFormat()
					)

bucket = connection.get_bucket('hyve-stores')

store_list = bucket.list()

store_dict = {}
# 379-chisel-spa-salon/photos/__w-200-400-600-800-1000-1200-1400__/99.jpg

for key in store_list:
	key_name = key.name
	if '/__w-' in key_name or '/__w__' in key_name:
		slug = key_name[:key_name.find('/')]
		pm = key_name[key_name.find('/') + 1:]
		w = pm[pm.find('/') + 1:]

		img = w[w.find('/') + 1:]
		pm = pm[:pm.find('/')]
  finally:
    count_ins += 1

f.close()
print("Successfully created %s instances" % (count_success_ins))

connection = boto.s3.connection.S3Connection(
          aws_access_key_id='adf938e8df4841129a5ca7089ab6b0ee',
          aws_secret_access_key='66f187d931f04af5805e899421bbfbef',
          port=8888,
          host='swift.rc.nectar.org.au',
          is_secure=True,
          validate_certs=False,
          calling_format=boto.s3.connection.OrdinaryCallingFormat()
        )

# buckets = connection.get_all_buckets()

container_name = "twitter_container"
try :
    b = connection.create_bucket(container_name)
except boto.exception.S3CreateError as e:
    if e.status == 409:
        print ("already exists the container: %s" % (container_name))
        pass 
except:
    print "Unexpected error:", sys.exc_info()[0]

b = connection.get_bucket('twitter_container', validate=False)
print ("Bucket: %s is ready" %(b.name))
Exemplo n.º 23
0
Arquivo: s3.py Projeto: jhance/changes
 def get_bucket(self, connection):
     return connection.get_bucket(self.bucket_name)
Exemplo n.º 24
0
def get_bucket(bucket):
    if not bucket in BUCKET_CACHE:
        connection = get_connection(settings.SIMPLEFLOW_S3_HOST)
        BUCKET_CACHE[bucket] = connection.get_bucket(bucket)
    return BUCKET_CACHE[bucket]
Exemplo n.º 25
0
def newProfilePicture(picture, crop, oauth=False):
    if not oauth:
        if not imageFiletype(picture.filename):
            return

    # Get image and open with PIL
    picture = Image.open(picture)

    # If picture is from Facebook, resize image so that it's 270x270
    if oauth:
        picture = picture.resize((270, 270), Image.ANTIALIAS)

    # Get crop dimensions
    crop = crop.split(',')
    left = int(crop[0])
    top = int(crop[1])
    right = int(crop[2])
    bottom = int(crop[3])
    cropped = picture.crop((left, top, right, bottom))  # Crop picture

    # Small profile picture
    smallProfilePicture = cropped.resize((80, 80),
                                         Image.ANTIALIAS).convert('RGB')
    smallProfilePictureBuffer = cStringIO.StringIO()
    smallProfilePicture.save(smallProfilePictureBuffer,
                             format='JPEG',
                             optimize=True,
                             quality=80)
    small = base64.b64encode(smallProfilePictureBuffer.getvalue())

    # Large profile picture
    largeProfilePicture = cropped.resize((270, 270),
                                         Image.ANTIALIAS).convert('RGB')
    largeProfilePictureBuffer = cStringIO.StringIO()
    largeProfilePicture.save(largeProfilePictureBuffer,
                             format='JPEG',
                             optimize=True,
                             quality=100)
    large = base64.b64encode(largeProfilePictureBuffer.getvalue())

    # Blurred cover profile picture
    coverProfilePicture = cropped.resize(
        (500, 500),
        Image.ANTIALIAS).convert('RGB').filter(ImageFilter.GaussianBlur(10))
    coverProfilePictureBuffer = cStringIO.StringIO()
    coverProfilePicture.save(coverProfilePictureBuffer,
                             format='JPEG',
                             optimize=True,
                             quality=100)
    cover = base64.b64encode(coverProfilePictureBuffer.getvalue())

    # Update picture value in database
    current_user.picture = uuid4().hex
    db.session.commit()

    # Connect to S3 bucket
    connection = boto.s3.connect_to_region(
        'us-east-1',
        aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],
        aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'],
        calling_format=boto.s3.connection.OrdinaryCallingFormat())

    bucket = connection.get_bucket('data.revisify.com', validate=False)

    # Delete existing user folder which contains the profile picture
    for key in bucket.list(prefix=str(current_user.id) + '/'):
        key.delete()

    # Create key and upload small profile picture
    smallKey = bucket.new_key('/'.join(
        [str(current_user.id), current_user.picture + '_80x80.jpeg']))
    smallKey.set_contents_from_string(base64.b64decode(small))
    smallKey.set_acl(
        'public-read')  # Set permissions to be publicly accessable

    # Create key and upload large profile picture
    largeKey = bucket.new_key('/'.join(
        [str(current_user.id), current_user.picture + '_270x270.jpeg']))
    largeKey.set_contents_from_string(base64.b64decode(large))
    largeKey.set_acl('public-read')

    # Create key and upload cover image
    coverKey = bucket.new_key('/'.join(
        [str(current_user.id), current_user.picture + '_cover.jpeg']))
    coverKey.set_contents_from_string(base64.b64decode(cover))
    coverKey.set_acl('public-read')
Exemplo n.º 26
0
    is_secure=False,
    validate_certs=False,
    calling_format=boto.s3.connection.OrdinaryCallingFormat())

################
# For objects
################
#bucket = connection.create_bucket('panasonic123')
#b1 = connection.get_bucket('panasonic123')

################
# For buckets
################

#bucket = connection.create_bucket('pandam1')
b = connection.get_bucket('pratap')
b.set_acl('private')

k = Key(b)
k.key = 'testKey3'
k.set_contents_from_string("This is a string of S3 2222")

print(k.get_contents_as_string())

k1 = Key(b)
k1.key = 'testKey1'
k1.set_contents_from_string("This is a string of S3 2222")

print(k1.get_contents_as_string())

k2 = Key(b)
Exemplo n.º 27
0
import config
from random import shuffle

import boto.s3.connection
from boto.s3.key import Key

connection = boto.s3.connection.S3Connection(
          aws_access_key_id=config.aws_access_key_id,
          aws_secret_access_key=config.aws_secret_access_key,
          port=8888,
          host='swift.rc.nectar.org.au',
          is_secure=True,
          validate_certs=False,
          calling_format=boto.s3.connection.OrdinaryCallingFormat()
        )
bucket = [connection.get_bucket('global'),connection.get_bucket('globalbwd'),connection.get_bucket('australia'),connection.get_bucket('mediterranean')]
botokey={}
botokey['Global']=Key(bucket[0])
botokey['GlobalBwd']=Key(bucket[1])
botokey['Australia']=Key(bucket[2])
botokey['Mediterranean']=Key(bucket[3])


class NotCached(Exception): pass
class NotWritten(Exception): pass

def get_filename(closest_index,type):
    return type + 'Closest_index' + str(closest_index).zfill(5)

def get_cached_results(closest_index,type):
    try:
Exemplo n.º 28
0
s3_access_key=os.environ['S3_ACCESS_KEY_ID']
s3_secret_key=os.environ['S3_SECRET_ACCESS_KEY']

parser = argparse.ArgumentParser(description='set bucket acl')
parser.add_argument('--port', type=int, action='store', default=8000 )
parser.add_argument('--host', type=str, action='store', default='localhost')
parser.add_argument('--name', type=str, action='store', default='mybucket')
parser.add_argument('--key',type=str, action='store', default=s3_access_key)
parser.add_argument('--secret',type=str, action='store', default=s3_secret_key)
parser.add_argument('--filename',type=str, action='store', default='acl')

args = parser.parse_args()

connection = boto.s3.connection.S3Connection(
    aws_access_key_id= args.key,
    aws_secret_access_key= args.secret,
    is_secure=False,
    port=args.port,
    host=args.host,
    calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)

f = open(args.filename, 'r')
acl = f.read()

print acl

bucket = connection.get_bucket(args.name)
bucket.set_xml_acl(acl)
result = bucket.get_acl()
print result
Exemplo n.º 29
0
        count_success_ins += 1
    finally:
        count_ins += 1

f.close()
print("Successfully created %s instances" % (count_success_ins))

connection = boto.s3.connection.S3Connection(
    aws_access_key_id='adf938e8df4841129a5ca7089ab6b0ee',
    aws_secret_access_key='66f187d931f04af5805e899421bbfbef',
    port=8888,
    host='swift.rc.nectar.org.au',
    is_secure=True,
    validate_certs=False,
    calling_format=boto.s3.connection.OrdinaryCallingFormat())

# buckets = connection.get_all_buckets()

container_name = "twitter_container"
try:
    b = connection.create_bucket(container_name)
except boto.exception.S3CreateError as e:
    if e.status == 409:
        print("already exists the container: %s" % (container_name))
        pass
except:
    print "Unexpected error:", sys.exc_info()[0]

b = connection.get_bucket('twitter_container', validate=False)
print("Bucket: %s is ready" % (b.name))
Exemplo n.º 30
0
## Connect to the gateway
# try:
ssl._https_verify_certificates(False)
connection = boto.connect_s3(
    aws_access_key_id=AWS_ACCESS_KEY,
    aws_secret_access_key=AWS_SECRET_KEY,
    host=HOST,
    port=PORT,
    is_secure=SSL,
    calling_format=boto.s3.connection.OrdinaryCallingFormat())
# except:
#     report(NAG_CRITICAL,"ERROR: Couldn't connect to gateway {h}:{p}".format(h=HOST, p=PORT))
#     raise

try:
    bucket = connection.get_bucket(BUCKET_NAME)
except:
    report(NAG_CRITICAL,
           "ERROR: Couldn't use bucket {b}".format(b=BUCKET_NAME))

## Read some random stuff
try:
    with open('/dev/urandom', 'r') as ur:
        randomdata = ur.read(1024)
except:
    report(NAG_UNKNOWN, "ERROR: Test couldn't read random data")

## Generate name
now = datetime.now()
time_string = now.isoformat()
object_name = "{b}_s3_{t}".format(b=BUCKET_NAME, t=time_string)
Exemplo n.º 31
0
 def tearDownClass(cls):
     connection = boto.connect_s3(aws_access_key_id=key_id, aws_secret_access_key=access_key)
     bucket = connection.get_bucket(TestIntegrationS3.mart)
     for key in bucket.list():
         key.delete()
     connection.delete_bucket(TestIntegrationS3.mart)
Exemplo n.º 32
0
 def get_bucket(self, connection):
     return connection.get_bucket(self.bucket_name)
Exemplo n.º 33
0
# start interactive screen capture
print 'Capturing screenshot...'
if not os.path.exists('/tmp/' + this_month):
    os.mkdir('/tmp/' + this_month)
subprocess.call(['screencapture', '-i', '/tmp/%s' % filename])

print 'Connecting to DreamObjects...'
connection = boto.connect_s3(
    aws_access_key_id=dho_access_key,
    aws_secret_access_key=dho_secret_key,
    host='objects.dreamhost.com'
)

print 'Getting target bucket...'
bucket = connection.get_bucket(dho_screenshots_bucket)
key = bucket.new_key(filename)
print 'Uploading to DreamObjects...'
key.set_contents_from_file(open('/tmp/%s' % filename, 'rb'))
key.set_canned_acl('private')

signed_url = key.generate_url(
    expires_in=60*60*3,
    query_auth=True,
    force_http=True
)
print 'Screenshot available at:'
print '\t', signed_url

print 'Copying url to clipboard...'
os.system('echo "%s" | pbcopy' % signed_url)
Exemplo n.º 34
0
# start interactive screen capture
print 'Capturing screenshot...'
if not os.path.exists('/tmp/' + this_month):
    os.mkdir('/tmp/' + this_month)
subprocess.call(['screencapture', '-i', '/tmp/%s' % filename])

print 'Connecting to DreamObjects...'
connection = boto.connect_s3(
    aws_access_key_id=dho_access_key,
    aws_secret_access_key=dho_secret_key,
    host='objects.dreamhost.com'
)

print 'Getting target bucket...'
bucket = connection.get_bucket(dho_screenshots_bucket)
key = bucket.new_key(filename)
print 'Uploading to DreamObjects...'
key.set_contents_from_file(open('/tmp/%s' % filename, 'rb'))
key.set_canned_acl('private')

signed_url = key.generate_url(
    expires_in=1800,
    query_auth=True,
    force_http=True
)
print 'Screenshot available at:'
print '\t', signed_url

print 'Copying url to clipboard...'
os.system('echo "%s" | pbcopy' % signed_url)
Exemplo n.º 35
0
import argparse
import os

from boto.connection import AWSAuthConnection

s3_access_key=os.environ['S3_ACCESS_KEY_ID']
s3_secret_key=os.environ['S3_SECRET_ACCESS_KEY']

parser = argparse.ArgumentParser(description='s3_tenant_test')
parser.add_argument('--port', type=int, action='store', default=8000 )
parser.add_argument('--key',type=str, action='store', default=s3_access_key)
parser.add_argument('--secret',type=str, action='store', default=s3_secret_key)
parser.add_argument('--tenant',type=str, action='store', default='testx')
parser.add_argument('--bucket',type=str, action='store', default='bucket1')
args = parser.parse_args()

print "connecting"
connection = boto.s3.connection.S3Connection(
    aws_access_key_id= args.key,
    aws_secret_access_key=args.secret,
    is_secure=False,
    port=args.port,
    host="localhost",
    calling_format = boto.s3.connection.OrdinaryCallingFormat())

print "bucket " + args.bucket + " tenant " + args.tenant
bucket = connection.create_bucket(args.tenant + ":" + args.bucket)
print bucket

bucket = connection.get_bucket(args.tenant + ":" + args.bucket)
Exemplo n.º 36
0
EC2_SECRET  = os.getenv('EC2_SECRET_KEY')
S3_URL = utility.parse_url(os.getenv('S3_URL'))
EC2_URL = utility.parse_url(os.getenv('EC2_URL'))

calling_format=boto.s3.connection.OrdinaryCallingFormat()

connection = boto.s3.connection.S3Connection(
    aws_access_key_id=EC2_ACCESS,
    aws_secret_access_key=EC2_SECRET,
    is_secure=False,
    host=S3_URL[0],
    port=S3_URL[1],
    calling_format=calling_format,
    path=S3_URL[2])

print "Connection is ", connection

#Run commands

rs = connection.get_all_buckets()
for b in rs:
    print "bucket is ", b.name

print "Now, look for a specific bucket (dirk-bucket)"

bucket_instance = connection.get_bucket("dirk-bucket")
keys = bucket_instance.get_all_keys()
for k in keys:
    print "key is ", k

Exemplo n.º 37
0
def connect_to_bucket(connection):
    return connection.get_bucket(Settings.s3_bucket_name, validate=False)