コード例 #1
0
    def setUp(self):
        trytond.tests.test_tryton.install_module('nereid_s3')
        self.static_file = POOL.get('nereid.static.file')
        self.static_folder = POOL.get('nereid.static.folder')

        # Mock S3Connection
        self.s3_api_patcher = patch(
            'boto.s3.connection.S3Connection', autospec=True
        )
        PatchedS3 = self.s3_api_patcher.start()

        # Mock S3Key
        self.s3_key_patcher = patch(
            'boto.s3.key.Key', autospec=True
        )
        PatchedS3Key = self.s3_key_patcher.start()

        PatchedS3.return_value = connection.S3Connection('ABCD', '123XYZ')
        PatchedS3.return_value.get_bucket = lambda bucket_name: Bucket(
            PatchedS3.return_value, 'tryton-test-s3'
        )

        PatchedS3Key.return_value = Key(
            Bucket(PatchedS3.return_value, 'tryton-test-s3'), 'some key'
        )
        PatchedS3Key.return_value.key = "some key"
        PatchedS3Key.return_value.get_contents_as_string = lambda *a: 'testfile'
        PatchedS3Key.return_value.set_contents_from_string = \
            lambda value: 'testfile'
コード例 #2
0
ファイル: test_connection.py プロジェクト: tax/boto
    def test_basic_anon(self):
        auth_con = S3Connection()
        # create a new, empty bucket
        bucket_name = 'test-%d' % int(time.time())
        auth_bucket = auth_con.create_bucket(bucket_name)

        # try read the bucket anonymously
        anon_con = S3Connection(anon=True)
        anon_bucket = Bucket(anon_con, bucket_name)
        try:
            iter(anon_bucket.list()).next()
            self.fail("anon bucket list should fail")
        except S3ResponseError:
            pass

        # give bucket anon user access and anon read again
        auth_bucket.set_acl('public-read')
        try:
            iter(anon_bucket.list()).next()
            self.fail("not expecting contents")
        except S3ResponseError:
            self.fail("we should have public-read access.")
        except StopIteration:
            pass

        # cleanup
        auth_con.delete_bucket(auth_bucket)
コード例 #3
0
ファイル: store.py プロジェクト: gitMbar/tilequeue
def make_s3_store(bucket_name,
                  aws_access_key_id=None, aws_secret_access_key=None,
                  path='osm', reduced_redundancy=False, date_prefix=''):
    conn = connect_s3(aws_access_key_id, aws_secret_access_key)
    bucket = Bucket(conn, bucket_name)
    s3_store = S3(bucket, date_prefix, path, reduced_redundancy)
    return s3_store
コード例 #4
0
def main():
    authenticate()

    connection = S3Connection(AWS_ACCESS_ID, os.environ['aws-secret-key'])
    bucket = Bucket(connection, S3_BUCKET)

    publish(bucket)
コード例 #5
0
 def execute(self, context, obj):
     connection = S3Connection()
     bucket = Bucket(connection=connection, name=context['bucket'])
     key1 = Key(bucket=bucket, name=context['name'])
     key2 = Key(bucket=bucket, name=context['name'] + '.encrypted')
     key2.set_contents_from_string(key1.get_contents_as_string())
     return 'done'
コード例 #6
0
def load(context, url, callback):
    enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER',
                                            default=False)

    if enable_http_loader and url.startswith('http'):
        return http_loader.load_sync(context,
                                     url,
                                     callback,
                                     normalize_url_func=_normalize_url)

    url = urllib2.unquote(url)

    bucket = context.config.get('S3_LOADER_BUCKET', default=None)

    if not bucket:
        bucket, url = _get_bucket(url,
                                  root_path=context.config.S3_LOADER_ROOT_PATH)

    if _validate_bucket(context, bucket):
        bucket_loader = Bucket(connection=get_connection(context), name=bucket)
        file_key = None
        try:
            file_key = bucket_loader.get_key(url)
        except Exception, e:
            logger.warn("ERROR retrieving image from S3 {0}: {1}".format(
                url, str(e)))

        if file_key:
            callback(file_key.read())
            return
コード例 #7
0
def pull_from_hyperstore(key_name):
    conn = boto.connect_s3(host='tims4.mobi-cloud.com',
                           port=80,
                           is_secure=False)
    bucket = Bucket(conn, bucket_name)
    gkey = Key(bucket=bucket, name=key_name)
    gkey.get_contents_to_filename("this.json")
コード例 #8
0
def load(context, url, callback):

    enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER',
                                            default=False)

    if enable_http_loader and 'http' in url:
        return http_loader.load(context, url, callback)

    url = urllib2.unquote(url)

    if context.config.S3_LOADER_BUCKET:
        bucket = context.config.S3_LOADER_BUCKET
    else:
        bucket, url = _get_bucket(url)
        if not _validate_bucket(context, bucket):
            return callback(None)

    bucket_loader = Bucket(
        connection=thumbor_aws.connection.get_connection(context), name=bucket)

    file_key = bucket_loader.get_key(url)
    if not file_key:
        return callback(None)

    return callback(file_key.read())
コード例 #9
0
def getNextResults(pid):
    try:
        conn = boto.connect_s3(host='tims4.mobi-cloud.com',
                               port=80,
                               is_secure=False)
    except Exception as e:
        conn = boto.connect_s3(
            aws_access_key_id="00c36f16c2600f70ae60",
            aws_secret_access_key="XsSbmCIfcYrX5NdCBj7n1QSaU2lhdgDJJBDlT7VE",
            host='tims4.mobi-cloud.com',
            port=80,
            is_secure=False)
    bucket = Bucket(conn, bucket_name)
    results = bucket.get_all_keys(max_keys=1,
                                  headers=None,
                                  prefix="topics/" + str(my_topic) +
                                  "/ProducerID=" + str(pid) + "/",
                                  marker=idToLastResult[pid])
    if len(results) == 1:
        print(keyToFileName(results[0]))
    if len(results) > 0:
        idToLastResult[pid] = keyToFileName(results[0])
        results[0].get_contents_to_filename("this.json")
        return results[0]
    else:
        return None
コード例 #10
0
    def test_basic_anon(self):
        auth_con = S3Connection()
        # create a new, empty bucket
        bucket_name = 'test-%d' % int(time.time())
        auth_bucket = auth_con.create_bucket(bucket_name)

        # try read the bucket anonymously
        anon_con = S3Connection(anon=True)
        anon_bucket = Bucket(anon_con, bucket_name)
        try:
            next(iter(anon_bucket.list()))
            self.fail("anon bucket list should fail")
        except S3ResponseError:
            pass

        # give bucket anon user access and anon read again
        auth_bucket.set_acl('public-read')
        time.sleep(10)  # Was 5 secondes, turns out not enough
        try:
            next(iter(anon_bucket.list()))
            self.fail("not expecting contents")
        except S3ResponseError as e:
            self.fail("We should have public-read access, but received "
                      "an error: %s" % e)
        except StopIteration:
            pass

        # cleanup
        auth_con.delete_bucket(auth_bucket)
コード例 #11
0
def main():
    opt_parser = OptionParser(usage=USAGE)
    opt_parser.add_option('-v',
                          '--verbose',
                          action='store_true',
                          default=False)
    opt_parser.add_option('-a', '--aws-creds', default=None)
    opts, args = opt_parser.parse_args()
    if not args:
        raise Exception(USAGE)

    if opts.verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.WARNING)
    logger.addHandler(logging.StreamHandler(sys.stderr))

    if opts.aws_creds:
        aws_access_key, aws_secret_key = get_aws_creds_file(opts.aws_creds)
    else:
        aws_access_key, aws_secret_key = get_aws_creds_env()

    s3_bucket_name = args[0]
    base_paths = args[1:]
    s3_cxn = S3Connection(aws_access_key, aws_secret_key)
    s3_bucket = Bucket(s3_cxn, s3_bucket_name)
    for base_path in args:
        logger.debug('getting rotated ossec logs in %s', base_path)
        for log in get_logs(base_path):
            if not log.is_archived(s3_bucket):
                log.archive(s3_bucket)
            elif log.expired:
                log.remove()
コード例 #12
0
ファイル: test_buckets.py プロジェクト: dmoror/eucaconsole
 def test_acl_permission_choices_for_create_bucket(self):
     bucket = Bucket()
     form = self.form_class(self.request, bucket_object=bucket)
     permission_choices = dict(form.get_permission_choices())
     self.assertEqual(permission_choices.get('FULL_CONTROL'), 'Full Control')
     self.assertEqual(permission_choices.get('READ'), 'View/Download objects')
     self.assertEqual(permission_choices.get('WRITE'), 'Create/delete objects')
コード例 #13
0
ファイル: s3.py プロジェクト: nova0930/pegasus
def rmup(args):
    parser = option_parser("rmup URL [UPLOAD]")
    parser.add_option("-a", "--all", dest="all", action="store_true",
        default=False, help="Cancel all uploads for the specified bucket")
    options, args = parser.parse_args(args)

    if options.all:
        if len(args) < 1:
            parser.error("Specify bucket URL")
    else:
        if len(args) != 2:
            parser.error("Specify bucket URL and UPLOAD")
        upload = args[1]

    uri = parse_uri(args[0])

    if uri.bucket is None:
        raise Exception("URL must contain a bucket: %s" % args[0])
    if uri.key is not None:
        raise Exception("URL cannot contain a key: %s" % args[0])

    config = get_config(options)
    conn = get_connection(config, uri)

    # There is no easy way to do this with boto
    b = Bucket(connection=conn, name=uri.bucket)
    for up in b.list_multipart_uploads():
        if options.all or up.id == upload:
            info("Removing upload %s" % up.id)
            up.cancel_upload()
コード例 #14
0
def process_all(action, s3_key, s3_secret, bucket_name, prefix, local_folder,
                queue, thread_count, max_retry, zone):
    """
    Orchestrates the en-queuing and consuming threads in conducting:
    1. Local folder structure construction
    2. S3 key en-queuing
    3. S3 key uploading/downloading if file updated

    :param action:                  download or upload
    :param s3_key:                  Your S3 API Key
    :param s3_secret:               Your S3 API Secret
    :param bucket_name:             Your S3 bucket name
    :param prefix:                  The path to the S3 folder to be downloaded. Example: bucket_root/folder_1
    :param local_folder:            The local folder you wish to upload/download the files from/to
    :param queue:                   A ProcessKeyQueue instance to enqueue all the keys in
    :param thread_count:            The number of threads that you wish s3concurrent to use
    :param max_retry:               The max times for s3concurrent to retry uploading/downloading a key
    :return:                        True is all processed, false if interrupted in any way
    """

    # conn = S3Connection(s3_key, s3_secret)
    S3Connection.DefaultHost = 's3' + zone + '.amazonaws.com'
    conn = boto.s3.connect_to_region(
        zone,
        aws_access_key_id=s3_key,
        aws_secret_access_key=s3_secret,
        is_secure=True,
        calling_format=boto.s3.connection.OrdinaryCallingFormat(),
    )

    bucket = Bucket(connection=conn, name=bucket_name)

    if action == 'download':
        target_function = enqueue_s3_keys_for_download
    else:
        target_function = enqueue_s3_keys_for_upload

    enqueue_thread = threading.Thread(target=target_function,
                                      args=(bucket, prefix, local_folder,
                                            queue))
    enqueue_thread.daemon = True
    enqueue_thread.start()

    queue.queuing_started()

    consume_thread = threading.Thread(target=consume_queue,
                                      args=(queue, action, thread_count,
                                            max_retry))
    consume_thread.daemon = True
    consume_thread.start()

    while not queue.all_processed:
        # report progress every 10 secs
        logger.info('{0} keys enqueued, and {1} keys {2}ed'.format(
            queue.enqueued_counter, queue.de_queue_counter, action))
        time.sleep(10)

    logger.info('{0} keys enqueued, and {1} keys {2}ed'.format(
        queue.enqueued_counter, queue.de_queue_counter, action))
コード例 #15
0
ファイル: s3.py プロジェクト: lydiaauch/celery-s3
 def s3_bucket(self):
     conn = connect_to_region(
         self.aws_region,
         aws_access_key_id=self.aws_access_key_id,
         aws_secret_access_key=self.aws_secret_access_key,
         is_secure=True,
     )
     return Bucket(connection=conn, name=self.bucket_name)
コード例 #16
0
 def execute(self, context, obj):
     connection = S3Connection()
     bucket = Bucket(connection=connection, name=context['bucket'])
     key = Key(bucket=bucket, name=context['name'])
     if key.exists():
         return 'done'
     else:
         return 'missing'
コード例 #17
0
ファイル: test_key.py プロジェクト: wuxi20/Pythonista
 def test_restore_header_with_ongoing_restore(self):
     self.set_http_response(status_code=200,
                            header=[('x-amz-restore',
                                     'ongoing-request="true"')])
     b = Bucket(self.service_connection, 'mybucket')
     k = b.get_key('myglacierkey')
     self.assertTrue(k.ongoing_restore)
     self.assertIsNone(k.expiry_date)
コード例 #18
0
    def test_500_retry(self, sleep_mock):
        self.set_http_response(status_code=500)
        b = Bucket(self.service_connection, 'mybucket')
        k = b.new_key('test_failure')
        fail_file = StringIO('This will attempt to retry.')

        with self.assertRaises(BotoServerError):
            k.send_file(fail_file)
コード例 #19
0
ファイル: test_key.py プロジェクト: wuxi20/Pythonista
 def test_restore_completed(self):
     self.set_http_response(
         status_code=200,
         header=[('x-amz-restore', 'ongoing-request="false", '
                  'expiry-date="Fri, 21 Dec 2012 00:00:00 GMT"')])
     b = Bucket(self.service_connection, 'mybucket')
     k = b.get_key('myglacierkey')
     self.assertFalse(k.ongoing_restore)
     self.assertEqual(k.expiry_date, 'Fri, 21 Dec 2012 00:00:00 GMT')
コード例 #20
0
ファイル: store.py プロジェクト: apollo-mapping/tilequeue
def make_s3_store(bucket_name,
                  aws_access_key_id=None, aws_secret_access_key=None,
                  path='osm', reduced_redundancy=False, date_prefix='',
                  delete_retry_interval=60, logger=None):
    conn = connect_s3(aws_access_key_id, aws_secret_access_key)
    bucket = Bucket(conn, bucket_name)
    s3_store = S3(bucket, date_prefix, path, reduced_redundancy,
                  delete_retry_interval, logger)
    return s3_store
コード例 #21
0
ファイル: monitor.py プロジェクト: s905060/inviso
    def __init__(self, jobflow, cluster_id, cluster_name, bucket, prefix, **kwargs):
        super(S3Mr2LogMonitor, self).__init__(**kwargs)

        self.jobflow = jobflow
        self.cluster_id = cluster_id
        self.cluster_name = cluster_name
        self.bucket = bucket
        self.prefix = prefix

        self.emr_logs = Bucket(boto.connect_s3(), bucket)
コード例 #22
0
 def get_attached_file_url(self):
     if self.attached_file:
         conn = S3Connection(
             aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
             aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
             is_secure=True)
         bucket = Bucket(conn, settings.AWS_MEDIA_BUCKET_NAME)
         fileKey = Key(bucket, self.attached_file)
         return fileKey.generate_url(600)
     return None
コード例 #23
0
    def test_should_not_raise_kms_related_integrity_errors(self):
        self.set_http_response(
            status_code=200,
            header=[('x-amz-server-side-encryption-aws-kms-key-id', 'key'),
                    ('etag', 'not equal to key.md5')])
        bucket = Bucket(self.service_connection, 'mybucket')
        key = bucket.new_key('test_kms')
        file_content = StringIO('Some content to upload.')

        # Should not raise errors related to integrity checks:
        key.send_file(file_content)
コード例 #24
0
 def get_value(self, key):
     bucket = Bucket(self.conn, self.bucket_id)
     item = Key(bucket)
     item.key = key
     try:
         value = item.get_contents_as_string()
     except Exception, e:
         value = None
         exc_type, exc_value, exc_traceback = sys.exc_info()
         lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
         logging.warning(''.join('!! ' + line for line in lines))  
コード例 #25
0
    def test_500_retry(self):
        self.set_http_response(status_code=500)
        b = Bucket(self.service_connection, 'mybucket')
        k = b.new_key('test_failure')
        fail_file = StringIO('This will attempt to retry.')

        try:
            k.send_file(fail_file)
            self.fail("This shouldn't ever succeed.")
        except BotoServerError:
            pass
コード例 #26
0
ファイル: models.py プロジェクト: ebramanti/wardrobe
 def delete(self, *args, **kwargs):
     if settings.DEBUG:
         os.remove(self.image.path)
     else:
         conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                             settings.AWS_SECRET_ACCESS_KEY)
         b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
         k = Key(b)
         k.key = self.image.url.replace(
             'https://my_outfits.s3.amazonaws.com/', '').split('?')[0]
         b.delete_key(k)
     super(ImageUpload, self).delete(*args, **kwargs)
コード例 #27
0
 def test_parse_tagging_response(self):
     self.set_http_response(status_code=200)
     b = Bucket(self.service_connection, 'mybucket')
     api_response = b.get_tags()
     # The outer list is a list of tag sets.
     self.assertEqual(len(api_response), 1)
     # The inner list is a list of tags.
     self.assertEqual(len(api_response[0]), 2)
     self.assertEqual(api_response[0][0].key, 'Project')
     self.assertEqual(api_response[0][0].value, 'Project One')
     self.assertEqual(api_response[0][1].key, 'User')
     self.assertEqual(api_response[0][1].value, 'jsmith')
コード例 #28
0
ファイル: tasks.py プロジェクト: lawandeneel/EXIFinator
def initialize_jobs(bucket_name):
    setup_context()
    jobs_count = 0
    conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
    bucket = Bucket(connection=conn, name=bucket_name)
    for key in bucket.list():
        queue_job("tasks.Download", {
            "bucket_name": bucket_name,
            "key_name": key.key
        },
                  queue=download_queue)
        jobs_count += 1
    return jobs_count
コード例 #29
0
    def test__get_all_query_args(self):
        bukket = Bucket()

        # Default.
        qa = bukket._get_all_query_args({})
        self.assertEqual(qa, '')

        # Default with initial.
        qa = bukket._get_all_query_args({}, 'initial=1')
        self.assertEqual(qa, 'initial=1')

        # Single param.
        qa = bukket._get_all_query_args({
            'foo': 'true'
        })
        self.assertEqual(qa, 'foo=true')

        # Single param with initial.
        qa = bukket._get_all_query_args({
            'foo': 'true'
        }, 'initial=1')
        self.assertEqual(qa, 'initial=1&foo=true')

        # Multiple params with all the weird cases.
        multiple_params = {
            'foo': 'true',
            # Ensure Unicode chars get encoded.
            'bar': '☃',
            # Ensure unicode strings with non-ascii characters get encoded
            'baz': u'χ',
            # Underscores are bad, m'kay?
            'some_other': 'thing',
            # Change the variant of ``max-keys``.
            'maxkeys': 0,
            # ``None`` values get excluded.
            'notthere': None,
            # Empty values also get excluded.
            'notpresenteither': '',
        }
        qa = bukket._get_all_query_args(multiple_params)
        self.assertEqual(
            qa,
            'bar=%E2%98%83&baz=%CF%87&foo=true&max-keys=0&some-other=thing'
        )

        # Multiple params with initial.
        qa = bukket._get_all_query_args(multiple_params, 'initial=1')
        self.assertEqual(
            qa,
            'initial=1&bar=%E2%98%83&baz=%CF%87&foo=true&max-keys=0&some-other=thing'
        )
コード例 #30
0
    def remove_from_s3(self):
        """
        Removes file for this model instance from S3
        """
        conn = boto.connect_s3()

        # loop over buckets, we have more than one, and remove this playground.
        if app_config.S3_BUCKETS:
            for bucket in app_config.S3_BUCKETS:
                b = Bucket(conn, bucket)
                k = Key(b)
                k.key = '%s/playground/%s.html' % (app_config.PROJECT_SLUG,
                                                   self.slug)
                b.delete_key(k)