コード例 #1
0
ファイル: _helper.py プロジェクト: ehartung/senza
def check_s3_bucket(bucket_name: str, region: str):
    with Action("Checking S3 bucket {}..".format(bucket_name)):
        exists = False
        try:
            s3 = boto.s3.connect_to_region(region)
            exists = s3.lookup(bucket_name, validate=True)
        except:
            pass
    if not exists:
        with Action("Creating S3 bucket {}...".format(bucket_name)):
            s3.create_bucket(bucket_name, location=region)
コード例 #2
0
ファイル: test.py プロジェクト: tebriel/dd-trace-py
    def test_double_patch(self):
        s3 = boto.s3.connect_to_region("us-east-1")
        tracer = get_dummy_tracer()
        writer = tracer.writer
        Pin(service=self.TEST_SERVICE, tracer=tracer).onto(s3)

        patch()
        patch()

        # Get the created bucket
        s3.create_bucket("cheese")
        spans = writer.pop()
        assert spans
        eq_(len(spans), 1)
コード例 #3
0
ファイル: aws_tools.py プロジェクト: bkacar/asr-pipeline
def push_database_to_s3(dbpath, S3_BUCKET, S3_KEYBASE):
    """Pushes the startup files for a job to S3.
        jobid is the ID of the Job object,
        filepath is the path on this server to the file."""
    #s3 = S3Connection()
    print "\n. Pushing the database to S3"
    
    s3 = S3Connection()
    
    #s3 = boto.connect_s3()
    #print "46: connected to", s3.Location
    
#     allBuckets = s3.get_all_buckets()
#     for bucket in allBuckets:
#         print(str(bucket.name))
#         allKeys = bucket.get_all_keys()
#         for key in allKeys:
#             print "\t", key.name
    
    bucket = s3.lookup(S3_BUCKET)
    if bucket == None:
        bucket = s3.create_bucket(S3_BUCKET, location=S3LOCATION)
        bucket.set_acl('public-read')
        
    SQLDB_KEY = S3_KEYBASE + "/sqldb"
    key = bucket.get_key(SQLDB_KEY)
    if key == None:
        key = bucket.new_key(SQLDB_KEY)
    key.set_contents_from_filename(dbpath)
    key.set_acl('public-read')    
コード例 #4
0
ファイル: aws_tools.py プロジェクト: bkacar/asr-pipeline
def aws_update_status(message, S3_BUCKET, S3_KEYBASE):
    if S3_BUCKET == None:
        print "\n. Error, you haven't defined an S3 bucket."
        exit()
    if S3_KEYBASE == None:
        print "\n. Error, you haven't defined an S3 key base."
        exit()
                
    """Update the status field in AWS S3 for this job"""
    #s3 = boto.connect_s3()
    s3 = S3Connection()
    
    bucket = s3.lookup(S3_BUCKET)
    if bucket == None:
        bucket = s3.create_bucket(S3_BUCKET, location=S3LOCATION)
        bucket.set_acl('public-read')
    
    STATUS_KEY = S3_KEYBASE + "/status"
    key = bucket.get_key(STATUS_KEY)
    if key == None:
        key = bucket.new_key(STATUS_KEY)

        #key = bucket.get_key(STATUS_KEY) 
    if key == None:
        print "\n. Error 39 - the key is None"
        exit()   
    key.set_contents_from_string(message)
    key.set_acl('public-read')
    
    print "\n. S3 Status Update:", key.get_contents_as_string()
コード例 #5
0
ファイル: multipartTransferTest.py プロジェクト: PureQsh/toil
def openS3(keySize=None):
    """
    Creates an AWS bucket. If keySize is given, a key of random bytes is created and its handle
    is yielded. If no keySize is given an empty bucket handle is yielded. The bucket and all
    created keys are cleaned up automatically.

    :param int keySize: Size of key to be created.
    """
    if keySize is not None and keySize < 0:
        raise ValueError('Key size must be greater than zero')
    with closing(boto.s3.connect_to_region(AWSMultipartCopyTest.region)) as s3:
        bucket = s3.create_bucket('multipart-transfer-test-%s' % uuid.uuid4(),
                                  location=region_to_bucket_location(AWSMultipartCopyTest.region))
        try:
            keyName = 'test'
            if keySize is None:
                yield bucket
            else:
                key = bucket.new_key(keyName)
                content = os.urandom(keySize)
                key.set_contents_from_string(content)

                yield bucket.get_key(keyName)
        finally:
            for key in bucket.list():
                key.delete()
            bucket.delete()
コード例 #6
0
ファイル: monitor.py プロジェクト: apowers/openstack-nova
def store_graph(instance_id, filename):
    """
    Transmits the specified graph file to internal object store on cloud
    controller.
    """
    # TODO(devcamcar): Need to use an asynchronous method to make this
    #       connection. If boto has some separate method that generates
    #       the request it would like to make and another method to parse
    #       the response we can make our own client that does the actual
    #       request and hands it off to the response parser.
    s3 = boto.s3.connection.S3Connection(
        aws_access_key_id=FLAGS.aws_access_key_id,
        aws_secret_access_key=FLAGS.aws_secret_access_key,
        is_secure=False,
        calling_format=boto.s3.connection.OrdinaryCallingFormat(),
        port=FLAGS.s3_port,
        host=FLAGS.s3_host)
    bucket_name = '_%s.monitor' % instance_id

    # Object store isn't creating the bucket like it should currently
    # when it is first requested, so have to catch and create manually.
    try:
        bucket = s3.get_bucket(bucket_name)
    except Exception:
        bucket = s3.create_bucket(bucket_name)

    key = boto.s3.Key(bucket)
    key.key = os.path.basename(filename)
    key.set_contents_from_filename(filename)
コード例 #7
0
ファイル: aws_tools.py プロジェクト: bkacar/asr-pipeline
def aws_checkpoint(checkpoint, S3_BUCKET, S3_KEYBASE):
    if S3_BUCKET == None:
        print "\n. Error, you haven't defined an S3 bucket."
        exit()
    if S3_KEYBASE == None:
        print "\n. Error, you haven't defined an S3 key base."
        exit()
                
    """Update the status field in AWS S3 for this job"""
    #s3 = boto.connect_s3()
    s3 = S3Connection()
    
    bucket = s3.lookup(S3_BUCKET)
    if bucket == None:
        bucket = s3.create_bucket(S3_BUCKET, location=S3LOCATION)
        bucket.set_acl('public-read')
    
    CHECKPOINT_KEY = S3_KEYBASE + "/checkpoint"
    key = bucket.get_key(CHECKPOINT_KEY)
    if key == None:
        key = bucket.new_key(CHECKPOINT_KEY)
        #key = bucket.get_key(STATUS_KEY) 
    if key == None:
        print "\n. Error 67 - the key is None"
        exit()   
    key.set_contents_from_string(checkpoint.__str__())
    key.set_acl('public-read')    
    print "\n. S3 Checkpoint:", key.get_contents_as_string()
コード例 #8
0
ファイル: test.py プロジェクト: tebriel/dd-trace-py
    def test_s3_client(self):
        s3 = boto.s3.connect_to_region("us-east-1")
        tracer = get_dummy_tracer()
        writer = tracer.writer
        Pin(service=self.TEST_SERVICE, tracer=tracer).onto(s3)

        s3.get_all_buckets()
        spans = writer.pop()
        assert spans
        eq_(len(spans), 1)
        span = spans[0]
        eq_(span.get_tag(http.STATUS_CODE), "200")
        eq_(span.get_tag(http.METHOD), "GET")
        eq_(span.get_tag('aws.operation'), "get_all_buckets")

        # Create a bucket command
        s3.create_bucket("cheese")
        spans = writer.pop()
        assert spans
        eq_(len(spans), 1)
        span = spans[0]
        eq_(span.get_tag(http.STATUS_CODE), "200")
        eq_(span.get_tag(http.METHOD), "PUT")
        eq_(span.get_tag('path'), '/')
        eq_(span.get_tag('aws.operation'), "create_bucket")

        # Get the created bucket
        s3.get_bucket("cheese")
        spans = writer.pop()
        assert spans
        eq_(len(spans), 1)
        span = spans[0]
        eq_(span.get_tag(http.STATUS_CODE), "200")
        eq_(span.get_tag(http.METHOD), "HEAD")
        eq_(span.get_tag('aws.operation'), "head_bucket")
        eq_(span.service, "test-boto-tracing.s3")
        eq_(span.resource, "s3.head")
        eq_(span.name, "s3.command")

        # Checking for resource incase of error
        try:
            s3.get_bucket("big_bucket")
        except Exception:
            spans = writer.pop()
            assert spans
            span = spans[0]
            eq_(span.resource, "s3.head")
コード例 #9
0
ファイル: aws.py プロジェクト: achim/sevenseconds
def configure_s3_buckets(account_name: str, cfg: dict):
    account_id = get_account_id()
    for _, config in cfg.get("s3_buckets", {}).items():
        for region in config.get("regions", []):
            bucket_name = config["name"]
            bucket_name = bucket_name.replace("{account_id}", account_id).replace("{region}", region)
            s3 = boto.s3.connect_to_region(region)
            with Action("Checking S3 bucket {}..".format(bucket_name)):
                bucket = s3.lookup(bucket_name)
            if not bucket:
                with Action("Creating S3 bucket {}..".format(bucket_name)):
                    s3.create_bucket(bucket_name, location=region)
            with Action("Updating policy for S3 bucket {}..".format(bucket_name)):
                bucket = s3.lookup(bucket_name)
                policy_json = json.dumps(config.get("policy"))
                policy_json = policy_json.replace("{bucket_name}", bucket_name)
                bucket.set_policy(policy_json)
コード例 #10
0
 def _createExternalStore(self):
     import boto.s3
     s3 = boto.s3.connect_to_region(self.testRegion)
     try:
         return s3.create_bucket(bucket_name='import-export-test-%s' % uuid.uuid4(),
                                 location=region_to_bucket_location(self.testRegion))
     except:
         with panic(log=logger):
             s3.close()
コード例 #11
0
def get_or_create_bucket():
    s3 = get_s3_connection()
    b = s3.lookup(AWS_BUCKET)
    if b is None:
        print('Creating bucket: ' + AWS_BUCKET + ' in region: ' + AWS_REGION + '...')
        b = s3.create_bucket(AWS_BUCKET, location=AWS_REGION)
    else:
        print('Found bucket: ' + AWS_BUCKET + '.')

    return b
コード例 #12
0
def get_or_create_bucket():
    s3 = get_s3_connection()
    b = s3.lookup(AWS_BUCKET)
    if b is None:
        print('Creating bucket: ' + AWS_BUCKET + ' in region: ' + AWS_REGION + '...')
        LOCATION = AWS_REGION if AWS_REGION != 'us-east-1' else ''
        b = s3.create_bucket(AWS_BUCKET, location=LOCATION, policy='public-read')
        b.set_acl('public-read')
        b.configure_website('index.html', 'error.html')
        set_bucket_policy(b)
        set_bucket_role_policy()
    else:
        print('Found bucket: ' + AWS_BUCKET + '.')

    return b
コード例 #13
0
#Assignment-2

import boto 
import boto.s3
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import sys
import time

#User credentials
aws_access_key_id = raw_input("Enter your access key")
aws_secret_access_key= raw_input("Enter your secret key")

#Bucket creation
s3 = boto.connect_s3(aws_access_key_id,aws_secret_access_key)
bucket = s3.create_bucket('sarathawss3',location=boto.s3.connection.Location.DEFAULT)

def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

k = Key(bucket)
k.key = 'cloud\earthquake.csv'
print "Uploading..\n"

#File Uploading
starttime = time.time();
k.set_contents_from_filename('C:/Users/WELCOME/Desktop/cloud2/all_month.csv',cb=percent_cb, num_cb=10)
endtime = time.time();
totaltimetoupload=endtime-starttime
コード例 #14
0
ファイル: s3_import.py プロジェクト: Mechelix/bigsky
    import sys
    argv = argv or sys.argv
    stdin = stdin or sys.stdin
    stdout = stdout or sys.stdout
    stderr = stderr or sys.stderr

    try:
        argv = FLAGS(argv)[1:]
    except gflags.FlagsError, e:
        stderr.write("%s\\nUsage: %s update_id_addresses\\n%s\n" %
                     (e, sys.argv[0], FLAGS))
        return 1
    s3 = boto.s3.connect_to_region(FLAGS.bucket_region)

    try:
        bucket = s3.create_bucket(FLAGS.bucket, location=FLAGS.bucket_region)
    except boto.exception.S3CreateError:
        bucket = s3.get_bucket(FLAGS.bucket)
    visited = set(int(k.key) for k in bucket.list())
    
    sqs = boto.sqs.connect_to_region(FLAGS.region)
    q = sqs.create_queue(FLAGS.source)
    targets = map(sqs.create_queue, FLAGS.targets)
    
    incomings = q.get_messages()
    while incomings:
        for incoming in incomings:
            message = json.loads(incoming.get_body())
            if int(message['id']) in visited:
                print "Skipping", message
            else:
コード例 #15
0
ファイル: __init__.py プロジェクト: stevenorum/dosidicus
def create_archive_from_directory(root_directory, *args, **kwargs):
    aws_access_key_id = kwargs.get('aws_access_key_id', kwargs.get('access', None))
    aws_secret_access_key = kwargs.get('aws_secret_access_key', kwargs.get('secret', None))
    region_name = kwargs.get('region', 'us-east-1')
    stage = kwargs.get('stage', 'alpha')
    stripped_root_directory = root_directory[:-1] if root_directory[-1] == '/' else root_directory
    project_name = kwargs.get('project_name', stripped_root_directory.split('/')[-1])

    autoconfigure = kwargs.get('autoconfigure', False)

    iam = boto.connect_iam(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)
    account_id = iam.get_user()['get_user_response']['get_user_result']['user']['arn'].split(':')[4]
    bucket_name = kwargs.get('bucket_name', '{}-dosidicus'.format(account_id))
    m = hashlib.md5()
    m.update('{}{}{}'.format(account_id,project_name,time.time()))
    version_id = m.hexdigest()

    archive_name = '{}-{}'.format(project_name, version_id)
    archive_file = os.path.join('/tmp','{}.zip'.format(archive_name))
    archive_key = os.path.split(archive_file)[1]

    root_length = len(stripped_root_directory)

    with zipfile.ZipFile(file=archive_file, mode='a') as newzip:
        for root, dirnames, filenames in os.walk(root_directory):
            for filename in filenames:
                file_path = os.path.join(root, filename)
                arc_path = file_path[root_length:]
                newzip.write(filename=file_path, arcname=arc_path)
        apply_relevant_configs(project_root=root_directory, region=region_name, stage=stage, zip=newzip)
        if autoconfigure:
            # Could just assign each one to a new variable, but grouping them together like this kinda feels good.
            values = {'STAGE': stage}
            for z in newzip.infolist():
                if z.filename.endswith('wsgi.py'):
                    values['WSGIPath'] = z.filename
                if z.filename.endswith('settings.py'):
                    values['DJANGO_SETTINGS_MODULE'] = z.filename[:-3].replace('/','.')
                if 'static/' in z.filename:
                    start_index = z.filename.find('static/')
                    end_index = start_index + 7
                    values['/static/'] = z.filename[:end_index]
            if values.get('DJANGO_SETTINGS_MODULE'):
                values['appname'] = values['DJANGO_SETTINGS_MODULE'].split('/')[0]

            config_contents = """option_settings:
  "aws:elasticbeanstalk:application:environment":
    STAGE: "{}"
    DJANGO_SETTINGS_MODULE: "{}"
    "PYTHONPATH": "/opt/python/current/app/{}:$PYTHONPATH"
  "aws:elasticbeanstalk:container:python":
    WSGIPath: {}
    NumProcesses: 3
    NumThreads: 20
  "aws:elasticbeanstalk:container:python:staticfiles":
    "/static/": "{}"
container_commands:
  01_migrate:
    command: "python manage.py migrate --noinput"
    leader_only: true
""".format(values['STAGE'], values['DJANGO_SETTINGS_MODULE'], values['appname'], values['WSGIPath'], values['/static/'])
            # if a user happens to already have a config file at .ebextensions/dosidicus.auto.config, well, that's really weird.
            newzip.writestr('.ebextensions/dosidicus.auto.config', config_contents)

    s3 = boto.s3.connect_to_region(region_name=region_name, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)

    if not s3.lookup(bucket_name=bucket_name):
        s3.create_bucket(bucket_name=bucket_name)

    bucket = s3.get_bucket(bucket_name=bucket_name)
    key = boto.s3.key.Key(bucket=bucket, name=archive_key)

    key.set_contents_from_filename(filename=archive_file)
    return bucket_name, archive_key, archive_name
コード例 #16
0
ファイル: upload.py プロジェクト: WeiChengLiou/setAWS
def loadbkt(bktname):
    try:
        bkt = s3.get_bucket(bktname)
    except:
        bkt = s3.create_bucket(bktname, location=regionname)
    return bkt
コード例 #17
0
# LAB : ASSIGNMENT 2
# https://aws.amazon.com/articles/3998

import boto 
import boto.s3
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import sys
import time


aws_access_key_id = raw_input("Enter your access key")
aws_secret_access_key= raw_input("Enter your secret key")

s3 = boto.connect_s3(aws_access_key_id,aws_secret_access_key)
bucket = s3.create_bucket('uravitejbucket',location=boto.s3.connection.Location.DEFAULT)


def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

k = Key(bucket)
k.key = 'assignment2/earthquake.csv'
print "Uploading..\n"
starttime = time.time();
k.set_contents_from_filename('E:/Spring 2015/CSE6331-004/Ass2/all_month.csv',cb=percent_cb, num_cb=10)
endtime = time.time();
totaltimetoupload=endtime-starttime
print "Sucessfully Uploaded"
print "Time taken to upload file ";
コード例 #18
0
parser.add_argument("-l", "--log", dest='log_level', default='WARNING',
                    choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
                    help="The logging level to use. [default: WARNING]")
args = parser.parse_args()

configure_logging(log, args.log_level)

def isSelected(region):
    return True if RegionMap[region].find(args.region) != -1 else False

# execute business logic
credentials = {'aws_access_key_id': args.aws_access_key_id, 'aws_secret_access_key': args.aws_secret_access_key}
heading = "Creating S3 buckets named '" + args.bucket + "'"
locations = class_iterator(Location)
if args.region:
    heading += " (filtered by region '" + args.region + "')"
    locations = filter(isSelected, locations)

s3 = boto.connect_s3(**credentials)

print heading + ":"
for location in locations:
    region = RegionMap[location]
    pprint(region, indent=2)
    try:
        bucket_name = args.bucket + '-' + region
        print 'Creating bucket ' + bucket_name
        s3.create_bucket(bucket_name, location=getattr(Location, location))
    except boto.exception.BotoServerError, e:
        log.error(e.error_message)
コード例 #19
0
 def __init__(self, bucket_name):
     s3 = boto.connect_s3()
     self.bucket = s3.create_bucket(bucket_name, policy='public-read')
     self.bucket.configure_website('index.html')
コード例 #20
0
 def __init__(self, bucket_name):
     s3 = boto.connect_s3()
     self.bucket = s3.create_bucket(bucket_name, policy='public-read')
     self.bucket.configure_website('index.html')