예제 #1
0
    def test_key_with_files(self):
        """
        test simple key 'from_file' and 'to_file' functions
        """
        log = logging.getLogger("test_key_with_files")
        bucket_name = "com-dougfort-test-key-with-files"
        key_name = "A" * 1024
        test_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        test_file_size = 1024 ** 2
        buffer_size = 1024

        log.debug("writing %s bytes to %s" % (
            test_file_size, test_file_path, 
        ))
        bytes_written = 0
        with open(test_file_path, "w") as output_file:
            while bytes_written < test_file_size:
                output_file.write(_random_string(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "r") as archive_file:
            write_key.set_contents_from_file(archive_file)        
        self.assertTrue(write_key.exists())

        # create another key with the same name 
        read_key = Key(bucket, key_name)

        # read back the data
        retrieve_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        with open(retrieve_file_path, "w") as retrieve_file:
            read_key.get_contents_to_file(retrieve_file)      
        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False)
        )

        # delete the key
        read_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
예제 #2
0
    def test_uninterrupted_resumable(self):
        """
        test get_contents_to_file without any interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(test_dir_path, "test-orignal")
        test_file_size = 1024**2
        buffer_size = 1024

        log.debug("writing {0} bytes to {1}".format(test_file_size,
                                                    test_file_path))
        bytes_written = 0
        with open(test_file_path, "wb") as output_file:
            while bytes_written < test_file_size:
                output_file.write(os.urandom(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(test_dir_path, "tracker-file")
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path)

        # read back the data
        retrieve_file_path = os.path.join(test_dir_path,
                                          "test_key_with_files-orignal")
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file,
                                           res_download_handler=\
                                            download_handler)

        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False))

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())

        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)
예제 #3
0
    def test_uninterrupted_resumable(self):
        """
        test get_contents_to_file without any interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(
            test_dir_path, "test-orignal"
        )
        test_file_size = 1024 ** 2
        buffer_size = 1024

        log.debug("writing {0} bytes to {1}".format(test_file_size, 
                                                    test_file_path))
        bytes_written = 0
        with open(test_file_path, "wb") as output_file:
            while bytes_written < test_file_size:
                output_file.write(os.urandom(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)        
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(
            test_dir_path, "tracker-file"
        )
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path
        )

        # read back the data
        retrieve_file_path = os.path.join(
            test_dir_path, "test_key_with_files-orignal"
        )
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file, 
                                           res_download_handler=\
                                            download_handler)      

        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False)
        )

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)
예제 #4
0
    def test_interrupted_resumable(self):
        """
        test get_contents_to_file with a simulated interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(
            test_dir_path, "test-orignal"
        )
        test_file_size = 1024 ** 2
        interrupted_size = 1024 * 42

        test_data = os.urandom(test_file_size)

        log.debug("writing {0} bytes to {1}".format(test_file_size, 
                                                    test_file_path))
        with open(test_file_path, "wb") as output_file:
            output_file.write(test_data)

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
#        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)        
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(
            test_dir_path, "tracker-file"
        )
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path
        )

        retrieve_file_path = os.path.join(
            test_dir_path, "test_key_with_files-orignal"
        )

        # copy some of the data to the retrieve file to simulate an
        # interrupted retrieve
        with open(retrieve_file_path, "wb") as output_file:
            output_file.write(test_data[interrupted_size:])

        # spoof the resumable handler into thinking it has a retrieve
        # in progress
        download_handler._save_tracker_info(write_key)

        # resume the retrieve
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file, 
                                           res_download_handler=\
                                            download_handler)      

        # read back the retrieved data
        with open(retrieve_file_path, "rb") as retrieve_file:
            retrieved_data = retrieve_file.read()

        self.assertEqual(len(retrieved_data), len(test_data))
        self.assertTrue(retrieved_data == test_data)

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)
예제 #5
0
    def test_key_with_files_and_callback(self):
        """
        test simple key 'from_file' and 'to_file' functions
        """
        def _archive_callback(bytes_sent, total_bytes):
            print >> sys.stderr, "archived", str(bytes_sent), "out of", \
                    str(total_bytes)

        def _retrieve_callback(bytes_sent, total_bytes):
            print >> sys.stderr, "retrieved", str(bytes_sent), "out of", \
                    str(total_bytes)

        log = logging.getLogger("test_key_with_files")
        bucket_name = "com-dougfort-test-key-with-files-and-callback"
        key_name = "A" * 1024
        test_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        test_file_size = 1024 ** 2
        buffer_size = 1024

        log.debug("writing %s bytes to %s" % (
            test_file_size, test_file_path, 
        ))
        bytes_written = 0
        with open(test_file_path, "w") as output_file:
            while bytes_written < test_file_size:
                output_file.write(_random_string(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "r") as archive_file:
            write_key.set_contents_from_file(
                archive_file, cb=_archive_callback
            )        
        self.assertTrue(write_key.exists())

        # create another key with the same name 
        read_key = Key(bucket, key_name)

        # read back the data
        retrieve_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        # 2011-08-08 dougfort boto aborts if you don't tell it the size
        read_key.size = test_file_size
        with open(retrieve_file_path, "w") as retrieve_file:
            read_key.get_contents_to_file(
                retrieve_file, cb=_retrieve_callback
            )      
        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False)
        )

        # delete the key
        read_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
예제 #6
0
    def test_interrupted_resumable(self):
        """
        test get_contents_to_file with a simulated interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(test_dir_path, "test-orignal")
        test_file_size = 1024**2
        interrupted_size = 1024 * 42

        test_data = os.urandom(test_file_size)

        log.debug("writing {0} bytes to {1}".format(test_file_size,
                                                    test_file_path))
        with open(test_file_path, "wb") as output_file:
            output_file.write(test_data)

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        #        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(test_dir_path, "tracker-file")
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path)

        retrieve_file_path = os.path.join(test_dir_path,
                                          "test_key_with_files-orignal")

        # copy some of the data to the retrieve file to simulate an
        # interrupted retrieve
        with open(retrieve_file_path, "wb") as output_file:
            output_file.write(test_data[interrupted_size:])

        # spoof the resumable handler into thinking it has a retrieve
        # in progress
        download_handler._save_tracker_info(write_key)

        # resume the retrieve
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file,
                                           res_download_handler=\
                                            download_handler)

        # read back the retrieved data
        with open(retrieve_file_path, "rb") as retrieve_file:
            retrieved_data = retrieve_file.read()

        self.assertEqual(len(retrieved_data), len(test_data))
        self.assertTrue(retrieved_data == test_data)

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())

        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)