Пример #1
0
 def upload_helper(self, num_kilobytes):
     key_name = str(uuid.uuid4())
     bucket = self.conn.create_bucket(self.bucket_name)
     md5_expected = md5_from_file(kb_file_gen(num_kilobytes))
     file_obj = kb_file_gen(num_kilobytes)
     key = Key(bucket, key_name)
     key.set_contents_from_file(file_obj,
                                md5=key.get_md5_from_hexdigest(md5_expected))
     self.assertEqual(md5_expected, remove_double_quotes(key.etag))
Пример #2
0
 def upload_helper(self, num_kilobytes):
     key_name = str(uuid.uuid4())
     bucket = self.conn.create_bucket(self.bucket_name)
     md5_expected = md5_from_file(kb_file_gen(num_kilobytes))
     file_obj = kb_file_gen(num_kilobytes)
     key = Key(bucket, key_name)
     key.set_contents_from_file(file_obj,
                                md5=key.get_md5_from_hexdigest(md5_expected))
     self.assertEqual(md5_expected, remove_double_quotes(key.etag))
Пример #3
0
def upload_s3(file_path, bucket_name, file_key, force=False, acl='private'):
    """Upload a local file to S3.
    """
    file_path = path(file_path)
    bucket = open_s3(bucket_name)

    if file_path.isdir():
        # Upload the contents of the dir path.
        paths = file_path.listdir()
        paths_keys = zip(paths, ['%s/%s' % (file_key, p.name) for p in paths])
    else:
        # Upload just the given file path.
        paths_keys = [(file_path, file_key)]

    for p, k in paths_keys:
        headers = {}
        s3_key = bucket.get_key(k)
        if not s3_key:
            from boto.s3.key import Key
            s3_key = Key(bucket, k)

        content_type = mimetypes.guess_type(p)[0]
        if content_type:
            headers['Content-Type'] = content_type
        file_size = p.stat().st_size
        file_data = p.bytes()
        file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(
            hashlib.md5(file_data).hexdigest())

        # Check the hash.
        if s3_key.etag:
            s3_md5 = s3_key.etag.replace('"', '')
            if s3_md5 == file_md5:
                info('Hash is the same. Skipping %s' % file_path)
                continue
            elif not force:
                # Check if file on S3 is older than local file.
                s3_datetime = datetime.datetime(*time.strptime(
                    s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
                local_datetime = datetime.datetime.utcfromtimestamp(
                    p.stat().st_mtime)
                if local_datetime < s3_datetime:
                    info("File %s hasn't been modified since last " \
                         "being uploaded" % (file_key))
                    continue
        # File is newer, let's process and upload
        info("Uploading %s..." % (file_key))

        try:
            s3_key.set_contents_from_string(file_data,
                                            headers,
                                            policy=acl,
                                            replace=True,
                                            md5=(file_md5, file_md5_64))
        except Exception as e:
            error("Failed: %s" % e)
            raise
Пример #4
0
 def upload_parts_helper(self, zipped_parts_and_md5s, expected_md5):
     key_name = str(uuid.uuid4())
     bucket = self.conn.create_bucket(self.bucket_name)
     upload = bucket.initiate_multipart_upload(key_name)
     key = Key(bucket, key_name)
     for idx, (part, md5_of_part) in enumerate(zipped_parts_and_md5s):
         upload.upload_part_from_file(part, idx + 1,
                                      md5=key.get_md5_from_hexdigest(md5_of_part))
     upload.complete_upload()
     actual_md5 = md5_from_key(key)
     self.assertEqual(expected_md5, actual_md5)
Пример #5
0
 def upload_parts_helper(self, zipped_parts_and_md5s, expected_md5):
     key_name = str(uuid.uuid4())
     bucket = self.conn.create_bucket(self.bucket_name)
     upload = bucket.initiate_multipart_upload(key_name)
     key = Key(bucket, key_name)
     for idx, (part, md5_of_part) in enumerate(zipped_parts_and_md5s):
         upload.upload_part_from_file(part, idx + 1,
                                      md5=key.get_md5_from_hexdigest(md5_of_part))
     upload.complete_upload()
     actual_md5 = md5_from_key(key)
     self.assertEqual(expected_md5, actual_md5)
Пример #6
0
Файл: s3.py Проект: ponty/paved
def upload_s3(file_path, bucket_name, file_key, force=False, acl='private'):
    """Upload a local file to S3.
    """
    file_path = path(file_path)
    bucket = open_s3(bucket_name)

    if file_path.isdir():
        # Upload the contents of the dir path.
        paths = file_path.listdir()
        paths_keys = zip(paths, ['%s/%s' % (file_key, p.name) for p in paths])
    else:
        # Upload just the given file path.
        paths_keys = [(file_path, file_key)]

    for p, k in paths_keys:
        headers = {}
        s3_key = bucket.get_key(k)
        if not s3_key:
            from boto.s3.key import Key
            s3_key = Key(bucket, k)

        content_type = mimetypes.guess_type(p)[0]
        if content_type:
            headers['Content-Type'] = content_type
        file_size = p.stat().st_size
        file_data = p.bytes()
        file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(hashlib.md5(file_data).hexdigest())

        # Check the hash.
        if s3_key.etag:
            s3_md5 = s3_key.etag.replace('"', '')
            if s3_md5 == file_md5:
                info('Hash is the same. Skipping %s' % file_path)
                continue
            elif not force:
                # Check if file on S3 is older than local file.
                s3_datetime = datetime.datetime(*time.strptime(
                    s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
                local_datetime = datetime.datetime.utcfromtimestamp(p.stat().st_mtime)
                if local_datetime < s3_datetime:
                    info("File %s hasn't been modified since last " \
                         "being uploaded" % (file_key))
                    continue
        # File is newer, let's process and upload
        info("Uploading %s..." % (file_key))
        
        try:
            s3_key.set_contents_from_string(file_data, headers, policy=acl, replace=True, md5=(file_md5, file_md5_64))
        except Exception as e:
            error("Failed: %s" % e)
            raise