def new_data_to_publish(config, section, blob):
    # Get the metadata for our old chunk

    # If necessary fetch the existing data from S3, otherwise open a local file
    if ((config.has_option('main', 's3_upload')
         and config.getboolean('main', 's3_upload'))
        or (config.has_option(section, 's3_upload')
            and config.getboolean(section, 's3_upload'))):
        conn = boto.s3.connection.S3Connection()
        bucket = conn.get_bucket(config.get('main', 's3_bucket'))
        s3key = config.get(section, 's3_key') or config.get(section, 'output')
        key = bucket.get_key(s3key)
        if key is None:
            # most likely a new list
            print("{0} looks like it hasn't been uploaded to "
                  "s3://{1}/{2}".format(section, bucket.name, s3key))
            key = boto.s3.key.Key(bucket)
            key.key = s3key
            key.set_contents_from_string("a:1:32:32\n" + 32 * '1')
        current = tempfile.TemporaryFile()
        key.get_contents_to_file(current)
        current.seek(0)
    else:
        current = open(config.get(section, 'output'), 'rb')

    old = chunk_metadata(current)
    current.close()

    new = chunk_metadata(blob)

    if old['checksum'] != new['checksum']:
        return True
    return False
def put_string_to_key(bucket, key_name, content, is_public, callback=None):
    """Write string to key in S3 bucket. If contents of existing key are
    unchanged, there will be no modification.
    Params:
        bucket (boto.s3 object): The bucket to write to.
        key_name (str): The key to write to (must include any applicable prefix).
        content (str): The content to write to the key.
        is_public (bool): Whether the new object should be publicly readable.
        callback (function): An optional progress callback.
    """
    key = bucket.get_key(key_name)
    if key:
        etag = key.etag.strip('"').lower()
        local_etag = hashlib.md5(content).hexdigest().lower()

        if etag == local_etag:
            # key contents haven't changed
            return

    key = bucket.new_key(key_name)
    mimetype = mimetypes.guess_type(key_name)[0]
    if mimetype:
        key.set_metadata('Content-Type', mimetype)

    policy = 'public-read' if is_public else None

    key.set_contents_from_string(content, policy=policy, cb=callback)
    key.close()
Exemple #3
0
def put_string_to_key(bucket, key_name, content, is_public, callback=None):
    """Write string to key in S3 bucket. If contents of existing key are
    unchanged, there will be no modification.
    Params:
        bucket (boto.s3 object): The bucket to write to.
        key_name (str): The key to write to (must include any applicable prefix).
        content (str): The content to write to the key.
        is_public (bool): Whether the new object should be publicly readable.
        callback (function): An optional progress callback.
    """
    key = bucket.get_key(key_name)
    if key:
        etag = key.etag.strip('"').lower()
        local_etag = hashlib.md5(content).hexdigest().lower()

        if etag == local_etag:
            # key contents haven't changed
            return

    key = bucket.new_key(key_name)
    mimetype = mimetypes.guess_type(key_name)[0]
    if mimetype:
        key.set_metadata('Content-Type', mimetype)

    policy = 'public-read' if is_public else None

    key.set_contents_from_string(content, policy=policy, cb=callback)
    key.close()
def new_data_to_publish(config, section, blob):
    # Get the metadata for our old chunk

    # If necessary, fetch the existing data from S3, otherwise open a local file
    if ((config.has_option('main', 's3_upload')
         and config.getboolean('main', 's3_upload'))
            or (config.has_option(section, 's3_upload')
                and config.getboolean(section, 's3_upload'))):
        conn = boto.s3.connection.S3Connection()
        bucket = conn.get_bucket(config.get('main', 's3_bucket'))
        s3key = config.get(section, 's3_key') or config.get(section, 'output')
        key = bucket.get_key(s3key)
        if key is None:
            # most likely a new list
            print "{0} looks like it hasn't been uploaded to s3://{1}/{2}".format(
                section, bucket.name, s3key)
            key = boto.s3.key.Key(bucket)
            key.key = s3key
            key.set_contents_from_string("a:1:32:32\n" + 32 * '1')
        current = tempfile.TemporaryFile()
        key.get_contents_to_file(current)
        current.seek(0)
    else:
        current = open(config.get(section, 'output'), 'rb')

    old = chunk_metadata(current)
    current.close()

    new = chunk_metadata(blob)

    if old['checksum'] != new['checksum']:
        return True
    return False
Exemple #5
0
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region('us-west-1')
    bucket = s3_conn.create_bucket("foobar")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    conn = boto.cloudformation.connect_to_region('us-west-1')
    conn.create_stack('new-stack', template_url=key_url)

    stack = conn.describe_stacks()[0]
    stack.stack_name.should.equal('new-stack')
    stack.get_template().should.equal(
        {
        'GetTemplateResponse': {
            'GetTemplateResult': {
                'TemplateBody': dummy_template_json,
                'ResponseMetadata': {
                    'RequestId': '2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE'
                }
            }
        }

    })
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region("us-west-1")
    bucket = s3_conn.create_bucket("foobar", location="us-west-1")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    conn = boto.cloudformation.connect_to_region("us-west-1")
    conn.create_stack("new-stack", template_url=key_url)

    stack = conn.describe_stacks()[0]
    stack.stack_name.should.equal("new-stack")
    stack.get_template().should.equal(
        {
            "GetTemplateResponse": {
                "GetTemplateResult": {
                    "TemplateBody": dummy_template_json,
                    "ResponseMetadata": {
                        "RequestId": "2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE"
                    },
                }
            }
        }
    )
 def test_delete_key(self):
     bucket = self.s3.get_bucket(self.old_style_bucket_id)
     key = bucket.new_key('testkey')
     key.set_contents_from_string('test')
     key.close()
     self.assertEqual([key.name for key in bucket.get_all_keys()], ['testkey'])
     _delete_key(bucket, 'testkey')
     self.assertEqual([key.name for key in bucket.get_all_keys()], [])
Exemple #8
0
def updates_join(config, email, postcode):
    email = email.lower().replace("/", "_")
    bucket = _get_s3_bucket(config)

    key = boto.s3.key.Key(bucket)
    key.key = "updates/" + str(email)
    key.set_contents_from_string(postcode)

    url = key.generate_url(expires_in=0, query_auth=False)
Exemple #9
0
 def test_delete_key(self):
     bucket = self.s3.get_bucket(self.old_style_bucket_id)
     key = bucket.new_key('testkey')
     key.set_contents_from_string('test')
     key.close()
     self.assertEqual([key.name for key in bucket.get_all_keys()],
                      ['testkey'])
     _delete_key(bucket, 'testkey')
     self.assertEqual([key.name for key in bucket.get_all_keys()], [])
Exemple #10
0
def updates_join(config, email, postcode):
    email = email.lower().replace("/", "_")
    bucket = _get_s3_bucket(config)

    key = boto.s3.key.Key(bucket)
    key.key = "updates/" + str(email)
    key.set_contents_from_string(postcode)

    url = key.generate_url(expires_in=0, query_auth=False)
Exemple #11
0
    def mkdir(self):
        s3 = self.server.get_s3_connection()
        s3.create_bucket()

        if not self.key_name.endswith('/'):
            self.key_name = self.key_name + '/'
        key = boto.s3.key.Key(bucket=self.bucket)
        key.key = self.key_name
        key.set_contents_from_string('')
 def upload(self, docid, page_text):
     try:
         key = boto.s3.key.Key(self.bucket, name=docid)
         headers = {'Content-Type': 'text/html', 'Content-Encoding': 'gzip'}
         key.set_contents_from_string(self._gzip(page_text),
                                      headers=headers)
         return True
     except Exception, ex:
         print "Failed to upload to SQS", ex
         raise
 def upload(self, docid, page_text):
     try:
         key = boto.s3.key.Key(self.bucket, name=docid)   
         headers = {'Content-Type': 'text/html',
                    'Content-Encoding': 'gzip'}
         key.set_contents_from_string(self._gzip(page_text), headers=headers)     
         return True
     except Exception, ex:
         print "Failed to upload to SQS", ex
         raise
Exemple #14
0
def add_cv(config, person_id, contents, filename):
    person_id = str(int(person_id))
    assert person_id != 0

    bucket = _get_s3_bucket(config)

    when = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S-")

    key = boto.s3.key.Key(bucket)
    key.key = "cvs/" + str(person_id) + "/" + when + filename
    key.set_contents_from_string(contents)
    key.set_acl('public-read')
Exemple #15
0
def add_cv(config, person_id, contents, filename):
    person_id = str(int(person_id))
    assert person_id != 0

    bucket = _get_s3_bucket(config)

    when = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S-")

    key = boto.s3.key.Key(bucket)
    key.key = "cvs/" + str(person_id) + "/" + when + filename
    key.set_contents_from_string(contents)
    key.set_acl('public-read')
Exemple #16
0
def upload_gearbox_app(upload_release_file):
    name = parser.get('release', 'name')
    version = parser.get('release', 'version')
    s3_conn = boto.connect_s3()
    bucket = s3_conn.get_bucket(bucket_name)
    key = boto.s3.key.Key(bucket)
    key.key = '{0}/{1}.tar.gz'.format(name, version)
    key.set_contents_from_filename('gearbox_dist/{0}.tar.gz'.format(version))
    print "Uploaded gearbox update"
    if upload_release_file:
        key = boto.s3.key.Key(bucket)
        key.key = '{0}/LATEST'.format(name)
        key.set_contents_from_string(version)
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region("us-west-1")
    bucket = s3_conn.create_bucket("foobar")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    conn = boto.cloudformation.connect_to_region("us-west-1")
    conn.create_stack("new-stack", template_url=key_url)

    stack = conn.describe_stacks()[0]
    stack.stack_name.should.equal("new-stack")
    stack.get_template().should.equal(dummy_template)
Exemple #18
0
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region('us-west-1')
    bucket = s3_conn.create_bucket("foobar")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    conn = boto.cloudformation.connect_to_region('us-west-1')
    conn.create_stack('new-stack', template_url=key_url)

    stack = conn.describe_stacks()[0]
    stack.stack_name.should.equal('new-stack')
    stack.get_template().should.equal(dummy_template)
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region('us-west-1')
    bucket = s3_conn.create_bucket("foobar")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    cf_conn = boto3.client('cloudformation', region_name='us-west-1')
    cf_conn.create_stack(
        StackName='stack_from_url',
        TemplateURL=key_url,
    )

    cf_conn.get_template(StackName="stack_from_url")['TemplateBody'].should.equal(dummy_template)
Exemple #20
0
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region('us-west-1')
    bucket = s3_conn.create_bucket("foobar")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    cf_conn = boto3.client('cloudformation', region_name='us-west-1')
    cf_conn.create_stack(
        StackName='stack_from_url',
        TemplateURL=key_url,
    )

    cf_conn.get_template(StackName="stack_from_url")[
        'TemplateBody'].should.equal(dummy_template)
    def test_create_get_delete_object(self):
        # S3 Create, get and delete object
        bucket_name = rand_name("s3bucket-")
        object_name = rand_name("s3object-")
        content = 'x' * 42
        bucket = self.client.create_bucket(bucket_name)
        self.addResourceCleanUp(self.destroy_bucket,
                                self.client.connection_data, bucket_name)

        self.assertTrue(bucket.name == bucket_name)
        with contextlib.closing(boto.s3.key.Key(bucket)) as key:
            key.key = object_name
            key.set_contents_from_string(content)
            readback = key.get_contents_as_string()
            self.assertTrue(readback == content)
            bucket.delete_key(key)
            self.assertBotoError(self.s3_error_code.client.NoSuchKey,
                                 key.get_contents_as_string)
    def test_create_get_delete_object(self):
        # S3 Create, get and delete object
        bucket_name = data_utils.rand_name("s3bucket")
        object_name = data_utils.rand_name("s3object")
        content = 'x' * 42
        bucket = self.client.create_bucket(bucket_name)
        self.addResourceCleanUp(self.destroy_bucket,
                                self.client.connection_data,
                                bucket_name)

        self.assertTrue(bucket.name == bucket_name)
        with contextlib.closing(boto.s3.key.Key(bucket)) as key:
            key.key = object_name
            key.set_contents_from_string(content)
            readback = key.get_contents_as_string()
            self.assertTrue(readback == content)
            bucket.delete_key(key)
            self.assertBotoError(self.s3_error_code.client.NoSuchKey,
                                 key.get_contents_as_string)
Exemple #23
0
def _fetch_candidates(config):
    bucket = _get_s3_bucket(config)
    key_name = "cache/candidates.csv"

    url = "https://yournextmp.com/media/candidates.csv"
    r = requests.get(url)

    if r.status_code == 200:
        r.encoding = 'utf-8'
        text = r.text
        # save to bucket
        key = boto.s3.key.Key(bucket)
        key.key = key_name
        key.set_contents_from_string(text)
    else:
        print("couldn't read from YourNextMP; loading candidates from S3")
        key = bucket.get_key(key_name)
        text = key.get_contents_as_string().decode('utf-8')

    return csv.DictReader(io.StringIO(text))
Exemple #24
0
def _fetch_candidates(config):
    bucket = _get_s3_bucket(config)
    key_name = "cache/candidates.csv"

    url = "https://yournextmp.com/media/candidates.csv"
    r = requests.get(url)

    if r.status_code == 200:
        r.encoding = 'utf-8'
        text = r.text
        # save to bucket
        key = boto.s3.key.Key(bucket)
        key.key = key_name
        key.set_contents_from_string(text)
    else:
        print("couldn't read from YourNextMP; loading candidates from S3")
        key = bucket.get_key(key_name)
        text = key.get_contents_as_string().decode('utf-8')

    return csv.DictReader(io.StringIO(text))
def test_create_stack_from_s3_url():
    s3_conn = boto.s3.connect_to_region("us-west-1")
    bucket = s3_conn.create_bucket("foobar")
    key = boto.s3.key.Key(bucket)
    key.key = "template-key"
    key.set_contents_from_string(dummy_template_json)
    key_url = key.generate_url(expires_in=0, query_auth=False)

    conn = boto.cloudformation.connect_to_region("us-west-1")
    conn.create_stack("new-stack", template_url=key_url)

    stack = conn.describe_stacks()[0]
    stack.stack_name.should.equal("new-stack")
    stack.get_template().should.equal(
        {
            "GetTemplateResponse": {
                "GetTemplateResult": {
                    "TemplateBody": dummy_template_json,
                    "ResponseMetadata": {"RequestId": "2d06e36c-ac1d-11e0-a958-f9382b6eb86bEXAMPLE"},
                }
            }
        }
    )
Exemple #26
0
def save_jail_report_to_s3(bucket, html, timestamp):
    """Uploads the jail report HTML to S3 with a timestamp.

    The timestamp is used to set the filename / key.

    :param html: The contents of the retrieved report.
    :type html: str
    :param timestamp: When the report was retrieved, preferably in UTC.
    :type timestamp: datetime.datetime
    """
    key = boto.s3.key.Key(
        bucket=bucket,
        name=_make_jail_report_key_name(timestamp=timestamp),
    )
    log.debug('Saving report to key: %r', key)
    key.set_contents_from_string(
        string_data=html,
        headers={
            'Content-Type': 'text/html',
        },
    )
    log.info('Saved jail report to S3: %r', key)
    return key.name
def new_data_to_publish_to_s3(config, section, new):
    # Get the metadata for our old chunk

    # If necessary fetch the existing data from S3, otherwise open a local file
    if ((config.has_option('main', 's3_upload')
         and config.getboolean('main', 's3_upload'))
            or (config.has_option(section, 's3_upload')
                and config.getboolean(section, 's3_upload'))):
        conn = boto.s3.connection.S3Connection()
        bucket = conn.get_bucket(config.get('main', 's3_bucket'))
        s3key = config.get(section, 's3_key') or config.get(section, 'output')
        key = bucket.get_key(s3key)
        if key is None:
            # most likely a new list
            print('{0} looks like it hasn\'t been uploaded to '
                  's3://{1}/{2}'.format(section, bucket.name, s3key))
            key = boto.s3.key.Key(bucket)
            key.key = s3key
            key.set_contents_from_string('a:1:32:32\n' + 32 * '1')
        current = tempfile.TemporaryFile()
        key.get_contents_to_file(current)
        key.set_acl('bucket-owner-full-control')
        if CLOUDFRONT_USER_ID is not None:
            key.add_user_grant('READ', CLOUDFRONT_USER_ID)
        current.seek(0)
    else:
        current = open(config.get(section, 'output'), 'rb')

    old = chunk_metadata(current)
    current.close()

    s3_upload_needed = False
    if old['checksum'] != new['checksum']:
        s3_upload_needed = True

    return s3_upload_needed
Exemple #28
0
def _save_mug_shot_to_s3(bucket, inmate):
    if inmate.mug is None:
        raise ValueError('Must have image data in order to save.')
    # Compute the hash only once and save the result.
    image_hash = inmate.sha1
    key = boto.s3.key.Key(
        bucket=bucket,
        name='mugshots/{first}/{second}/{hash}.jpg'.format(
            first=image_hash[0:2],
            second=image_hash[2:4],
            hash=image_hash,
        ),
    )
    log.debug('Saving mugshot for inmate-ID %s to S3: %r', inmate.id, key)
    key.set_contents_from_string(
        string_data=inmate.mug,
        headers={
            'Cache-Control': 'max-age=31556952,public',
            'Content-Type': 'image/jpeg',
        },
        # If we've seen this before, keep the original timestamp.
        replace=False,
        policy='public-read',
    )
Exemple #29
0
 def put_content(self, path, content):
     path = self._init_path(path)
     key = self.makeKey(path)
     key.set_contents_from_string(content, encrypt_key=(self._config.s3_encrypt is True))
     return path
Exemple #30
0
 def put_content(self, path, content):
     self._initialize_cloud_conn()
     path = self._init_path(path)
     key = self._key_class(self._cloud_bucket, path)
     key.set_contents_from_string(content, **self._upload_params)
     return path
Exemple #31
0
 def store_archive(self):
     key = boto.s3.key.Key(self.__bucket__, self.__filename__)
     key.set_contents_from_string(json.dumps(self.__archive__))
Exemple #32
0
 def put_content(self, path, content):
     path = self._init_path(path)
     key = self.makeKey(path)
     key.set_contents_from_string(
         content, encrypt_key=(self._config.s3_encrypt is True))
     return path
Exemple #33
0
 def put_content(self, path, content):
     path = self._init_path(path)
     key = boto.s3.key.Key(self._s3_bucket, path)
     key.set_contents_from_string(content)
     return path
Exemple #34
0
 def put_content(self, path, content):
     path = self._init_path(path)
     key = boto.s3.key.Key(self._s3_bucket, path)
     key.set_contents_from_string(
         content, encrypt_key=(self._config.s3_encrypt is True))
     return path
Exemple #35
0
 def upload(self, auth=False):
     key = boto.s3.key.Key(ctx.bucket, ctx.key_for(self.name))
     key.set_contents_from_string(
         str(self), headers={'Content-Type': 'application/json'}
     )
     return ctx.url_for(key, auth=auth)
def test():
    print '--- running AWS s3 examples ---'
    c = boto.s3.connection.S3Connection(conf.AWS_ACCESS_KEY, conf.AWS_SECRET_ACCESS_KEY)

    print 'original bucket number:', len(c.get_all_buckets())
    
    bucket_name = 'yet.another.s3.example.code'
    print 'creating a bucket:', bucket_name
    try:
        bucket = c.create_bucket(bucket_name)
    except boto.exception.S3CreateError  as e:
        print ' ' * 4, 'error occured:'
        print ' ' * 8, 'http status code:', e.status
        print ' ' * 8, 'reason:', e.reason
        print ' ' * 8, 'body:', e.body
        return

    test_bucket_name = 'no.existence.yet.another.s3.example.code'
    print 'if you just want to know whether the bucket(\'%s\') exists or not' % (test_bucket_name,), \
        'and don\'t want to get this bucket'
    try:
        test_bucket = c.head_bucket(test_bucket_name)
    except boto.exception.S3ResponseError as e:
        if e.status == 403 and e.reason == 'Forbidden':
            print ' ' * 4, 'the bucket(\'%s\') exists but you don\'t have the permission.' % (test_bucket_name,)
        elif e.status == 404 and e.reason == 'Not Found':
            print ' ' * 4, 'the bucket(\'%s\') doesn\'t exist.' % (test_bucket_name,)

    print 'or use lookup() instead of head_bucket() to do the same thing.', \
        'it will return None if the bucket does not exist instead of throwing an exception.'
    test_bucket = c.lookup(test_bucket_name)
    if test_bucket is None:
        print ' ' * 4, 'the bucket(\'%s\') doesn\'t exist.' % (test_bucket_name,)

    print 'now you can get the bucket(\'%s\')' % (bucket_name,)
    bucket = c.get_bucket(bucket_name)

    print 'add some objects to bucket ', bucket_name
    keys = ['sample.txt', 'notes/2006/January/sample.txt', 'notes/2006/February/sample2.txt',\
           'notes/2006/February/sample3.txt', 'notes/2006/February/sample4.txt', 'notes/2006/sample5.txt']
    print ' ' * 4, 'these key names are:'
    for name in keys:
        print ' ' * 8, name
    
    filename = './_test_dir/sample.txt'
    print ' ' * 4, 'you can contents of object(\'%s\') from filename(\'%s\')' % (keys[0], filename,)
    key = boto.s3.key.Key(bucket, keys[0])
    bytes_written = key.set_contents_from_filename(filename)
    assert bytes_written == os.path.getsize(filename), '    error occured:broken file'
        
    print ' ' * 4, 'or set contents of object(\'%s\') by opened file object' % (keys[1],)
    fp = open(filename, 'r')
    key = boto.s3.key.Key(bucket, keys[1])
    bytes_written = key.set_contents_from_file(fp)
    assert bytes_written == os.path.getsize(filename), '    error occured:broken file'

    print ' ' * 4, 'you can also set contents the remaining key objects from string'
    for name in keys[2:]:
        print ' ' * 8, 'key:', name
        key = boto.s3.key.Key(bucket, name)
        s = 'This is the content of %s ' % (name,)
        key.set_contents_from_string(s)
        print ' ' * 8, '..contents:', key.get_contents_as_string()
        # use get_contents_to_filename() to save contents to a specific file in the filesystem.

    #print 'You have %d objects in bucket %s' % ()    
    
    print 'list all objects added into \'%s\' bucket' % (bucket_name,)
    print ' ' * 4, 'list() automatically handles all of the result paging from S3.'
    print ' ' * 4, 'You just need to keep iterating until there are no more results.'
    print ' ' * 4, '---------------------------------------------------------------'
    bucket_size = 0
    for key in bucket.list():
        print ' ' * 4, key.name
        bucket_size += key.size
    print ' ' * 4, 'bucket size:', bucket_size, 'bytes.'
    # do not caculate bucket size or number of objects when you have millions of objects in a bucket.

    p = 'notes/2006/'
    print 'list objects start with \'%s\'' % (p,)
    objs = bucket.list(prefix = p)
    for key in objs:
        print ' ' * 4, key.name

    print 'list objects or key prefixs like \'%s/*\', something like what\'s in the top of \'%s\' folder ?' % (p, p,)
    objs = bucket.list(prefix = p, delimiter = '/')
    for key in objs:
        print ' ' * 4, key.name

    keys_per_page = 4
    print 'manually handle the results paging from s3,', ' number of keys per page:', keys_per_page
    print ' ' * 4, 'get page 1'
    objs = bucket.get_all_keys(max_keys = keys_per_page)
    for key in objs:
        print ' ' * 8, key.name

    print ' ' * 4, 'get page 2'
    last_key_name = objs[-1].name   #last key of last page is the marker to retrive next page.
    objs = bucket.get_all_keys(max_keys = keys_per_page, marker = last_key_name)
    for key in objs:
        print ' ' * 8, key.name
    """
    get_all_keys() a lower-level method for listing contents of a bucket.
    This closely models the actual S3 API and requires you to manually handle the paging of results. 
    For a higher-level method that handles the details of paging for you, you can use the list() method.
    """

    print 'you must delete all objects in the bucket \'%s\' before delete this bucket' % (bucket_name, )
    print ' ' * 4, 'you can delete objects one by one'
    bucket.delete_key(keys[0])
    print ' ' * 4, 'or you can delete multiple objects using a single HTTP request with delete_keys().'
    bucket.delete_keys(keys[1:])

    #TODO print 'after previous deletion, we now have %d objects in bucket(\'%s\')' % (len(bucket.list()), bucket_name,)
    print 'now you can delete the bucket \'%s\'' % (bucket_name,)
    c.delete_bucket(bucket)
Exemple #37
0
 def mkdir(self):
     if not self.key_name.endswith('/'):
         self.key_name = self.key_name + '/'
     key = boto.s3.key.Key(bucket=self.bucket)
     key.key = self.key_name
     key.set_contents_from_string('')
Exemple #38
0
    def worker(base_path):
        mtime = path = 0
        while 1:
            try:
                mtime, queued_path = queue.get()

                path = queued_path
                if path is None:
                    return

                key = boto.s3.key.Key(bucket)

                if mtime is None:  # delete
                    try:
                        try:
                            key.key = bucket_prefix + path
                            key.delete()
                        except Exception:
                            logger.exception('deleting %r, retrying' % key.key)
                            time.sleep(9)
                            key.key = bucket_prefix + path
                            key.delete()
                    except Exception:
                        if index is not None:
                            # Failed to delete. Put the key back so we
                            # try again later
                            index[queued_path] = 1
                        raise

                elif mtime is GENERATE:
                    (path, s3mtime) = path
                    fspath = join(base_path, path.encode(encoding))
                    if exists(fspath):
                        # Someone created a file since we decided to
                        # generate one.
                        continue

                    fspath = dirname(fspath)
                    data = "Index of " + path[:-len(INDEX_HTML) - 1]
                    data = [
                        "<!-- generated -->",
                        "<html><head><title>%s</title></head><body>" % data,
                        "<h1>%s</h1><table>" % data,
                        "<tr><th>Name</th><th>Last modified</th><th>Size</th>"
                        "</tr>",
                    ]
                    for name in sorted(os.listdir(fspath)):
                        if name.startswith('.'):
                            continue  # don't index dot files
                        name_path = join(fspath, name)
                        if isdir(name_path):
                            name = name + '/'
                            size = '-'
                        else:
                            size = os.stat(name_path).st_size
                        mtime = time.ctime(os.stat(name_path).st_mtime)
                        name = name.decode(encoding)
                        data.append('<tr><td><a href="%s">%s</a></td>\n'
                                    '    <td>%s</td><td>%s</td></tr>' %
                                    (name, name, mtime, size))
                    data.append("</table></body></html>\n")
                    data = '\n'.join(data)

                    digest = hashlib.md5(data.encode(encoding)).hexdigest()
                    if digest != s3mtime:
                        # Note that s3mtime is either a previous
                        # digest or it's 0 (cus path wasn't in s3) or
                        # it's an s3 upload time.  The test above
                        # works in all of these cases.
                        key.key = bucket_prefix + path
                        key.set_metadata('generated', 'true')
                        try:
                            key.set_contents_from_string(
                                data,
                                headers={'Content-Type': 'text/html'},
                            )
                        except Exception:
                            logger.exception(
                                'uploading generated %r, retrying' % path)
                            time.sleep(9)
                            key.set_contents_from_string(
                                data,
                                headers={'Content-Type': 'text/html'},
                            )

                        if s3mtime:
                            # update (if it was add, mtime would be 0)
                            if cloudfront:
                                invalidations.append(path)

                    if index is not None:
                        index[path] = digest

                else:  # upload
                    try:
                        if had_index:
                            # We only store mtimes to the nearest second.
                            # We don't have a fudge factor, so there's a
                            # chance that someone might update the file in
                            # the same second, so we check if a second has
                            # passed and sleep if it hasn't.
                            now = time_time_from_sixtuple(
                                time.gmtime(time.time()))
                            if not now > mtime:
                                time.sleep(1)

                        key.key = bucket_prefix + path
                        path = join(base_path, path)
                        try:
                            key.set_contents_from_filename(
                                path.encode(encoding))
                        except Exception:
                            logger.exception('uploading %r %r, retrying' %
                                             (mtime, path))
                            time.sleep(9)
                            key.set_contents_from_filename(
                                path.encode(encoding))

                    except Exception:
                        if index is not None:
                            # Upload failed. Remove from index so we
                            # try again later (if the path is still
                            # around).
                            index.pop(queued_path)
                        raise

            except Exception:
                logger.exception('processing %r %r' % (mtime, path))
            finally:
                queue.task_done()
Exemple #39
0
 def upload(self, auth=False):
     key = boto.s3.key.Key(ctx.bucket, ctx.key_for(self.name))
     key.set_contents_from_string(
         str(self), headers={'Content-Type': 'application/json'}
     )
     return ctx.url_for(key, auth=auth)
Exemple #40
0
Thanks!

Francis
Volunteer, Democracy Club CVs

P.S. If you want to upload something else as your CV, make
a Word or PDF document, and go here:
{link}
'''.format(link=link,
            linkedin_url=candidate['linkedin_url'],
            name=candidate['name'])

            print("=========================\n" + body)

            #candidate['email'] = '*****@*****.**'

            msg = flask_mail.Message(
                body=body,
                subject="Easily share your LinkedIn CV with your voters!",
                sender=("Democracy Club CVs", "*****@*****.**"),
                recipients=[(candidate['name'], candidate['email'])])

            main.mail.send(msg)

            # record sent
            bucket = lookups._get_s3_bucket(main.app.config)
            key = boto.s3.key.Key(bucket)
            key.key = "mailed/linkedin/" + str(candidate['id']) + ".sent"
            key.set_contents_from_string("sent")
Exemple #41
0
 def upload_test_file(self):
     key =  Key(name=KEY_NAME, bucket=self.bucket)
     key.set_contents_from_string( str(datetime.datetime.now()))
Exemple #42
0
 def put_content(self, path, content):
     path = self._init_path(path)
     key = boto.s3.key.Key(self._s3_bucket, path)
     key.set_contents_from_string(content)
     return path
Exemple #43
0
import sys
from pyspark import SparkContext

sc = SparkContext()
word_counts = sc.textFile(sys.argv[1])\
    .flatMap(lambda line: line.split(' ')) \
    .map(lambda word: (word, 1)) \
    .reduceByKey(lambda count1, count2: count1 + count2) \
    .takeOrdered(50, lambda (word, count): -count)

# Log results to S3
import json
import boto.s3, boto.s3.key
conn = boto.s3.connect_to_region("eu-west-1")
bucket = conn.get_bucket("oc-calculsdistribues")
key = boto.s3.key.Key(bucket, "words.txt")

key.set_contents_from_string(json.dumps(word_counts, indent=2))