def test_key_with_files_and_callback(self): """ test simple key 'from_file' and 'to_file' functions """ def _archive_callback(bytes_sent, total_bytes): print >> sys.stderr, "archived", str(bytes_sent), "out of", \ str(total_bytes) def _retrieve_callback(bytes_sent, total_bytes): print >> sys.stderr, "retrieved", str(bytes_sent), "out of", \ str(total_bytes) log = logging.getLogger("test_key_with_files") bucket_name = "com-dougfort-test-key-with-files-and-callback" key_name = "A" * 1024 test_file_path = os.path.join( _test_dir_path, "test_key_with_files-orignal" ) test_file_size = 1024 ** 2 buffer_size = 1024 log.debug("writing %s bytes to %s" % ( test_file_size, test_file_path, )) bytes_written = 0 with open(test_file_path, "w") as output_file: while bytes_written < test_file_size: output_file.write(_random_string(buffer_size)) bytes_written += buffer_size # create the bucket bucket = self._s3_connection.create_bucket(bucket_name) self.assertTrue(bucket is not None) self.assertEqual(bucket.name, bucket_name) # create an empty key write_key = Key(bucket) # set the name write_key.name = key_name self.assertFalse(write_key.exists()) # upload some data with open(test_file_path, "r") as archive_file: write_key.set_contents_from_file( archive_file, cb=_archive_callback ) self.assertTrue(write_key.exists()) # create another key with the same name read_key = Key(bucket, key_name) # read back the data retrieve_file_path = os.path.join( _test_dir_path, "test_key_with_files-orignal" ) # 2011-08-08 dougfort boto aborts if you don't tell it the size read_key.size = test_file_size with open(retrieve_file_path, "w") as retrieve_file: read_key.get_contents_to_file( retrieve_file, cb=_retrieve_callback ) self.assertTrue( filecmp.cmp(test_file_path, retrieve_file_path, shallow=False) ) # delete the key read_key.delete() self.assertFalse(write_key.exists()) # delete the bucket self._s3_connection.delete_bucket(bucket_name)