def test_content(self): size = randint(512, 1024) content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content, size=size) with largefile.content.open("rb") as stream: data = stream.read() self.assertEqual(content, data)
def test_progress_accumulates_all_files(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) final_size = 0 final_total_size = 0 sizes = [random.randint(512, 1024) for _ in range(3)] total_sizes = [random.randint(1025, 2048) for _ in range(3)] types = [ BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE, BOOT_RESOURCE_FILE_TYPE.BOOT_KERNEL, BOOT_RESOURCE_FILE_TYPE.BOOT_INITRD, ] for size in sizes: final_size += size total_size = total_sizes.pop() final_total_size += total_size filetype = types.pop() content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content=content, size=total_size) factory.make_BootResourceFile(resource_set, largefile, filename=filetype, filetype=filetype) progress = 100.0 * final_size / float(final_total_size) self.assertAlmostEqual(progress, resource_set.progress)
def test_empty_content(self): size = 0 content = b"" largefile = factory.make_LargeFile(content, size=size) with largefile.content.open("rb") as stream: data = stream.read() self.assertEqual(content, data)
def test_POST_creates_boot_resource_with_empty_largefile(self): self.become_admin() # Create a largefile to get a random sha256 and size. We delete it # immediately so the new resource does not pick it up. largefile = factory.make_LargeFile() with post_commit_hooks: largefile.delete() name = factory.make_name('name') architecture = make_usable_architecture(self) params = { 'name': name, 'architecture': architecture, 'sha256': largefile.sha256, 'size': largefile.total_size, } response = self.client.post(reverse('boot_resources_handler'), params) self.assertEqual(http.client.CREATED, response.status_code) parsed_result = json_load_bytes(response.content) resource = BootResource.objects.get(id=parsed_result['id']) resource_set = resource.sets.first() rfile = resource_set.files.first() self.assertEqual((largefile.sha256, largefile.total_size, False), (rfile.largefile.sha256, rfile.largefile.total_size, rfile.largefile.complete))
def test_get_or_create_file_from_content_returns_same_largefile(self): largefile = factory.make_LargeFile() stream = largefile.content.open('rb') self.addCleanup(stream.close) self.assertEqual( largefile, LargeFile.objects.get_or_create_file_from_content(stream))
def make_empty_resource_file(self, rtype=None, content=None): # Create a largefile to use the generated content, # sha256, and total_size. if content is None: content = factory.make_bytes(1024) total_size = len(content) largefile = factory.make_LargeFile(content=content, size=total_size) sha256 = largefile.sha256 with largefile.content.open('rb') as stream: content = stream.read() with post_commit_hooks: largefile.delete() # Empty largefile largeobject = LargeObjectFile() largeobject.open().close() largefile = LargeFile.objects.create(sha256=sha256, total_size=total_size, content=largeobject) if rtype is None: rtype = BOOT_RESOURCE_TYPE.UPLOADED resource = factory.make_BootResource(rtype=rtype) resource_set = factory.make_BootResourceSet(resource) rfile = factory.make_BootResourceFile(resource_set, largefile) return rfile, content
def test_delete_does_nothing_if_linked(self): largefile = factory.make_LargeFile() resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) factory.make_BootResourceFile(resource_set, largefile) largefile.delete() self.assertTrue(LargeFile.objects.filter(id=largefile.id).exists())
def test_deletes_content_asynchronously(self): self.patch(signals.largefiles, "delete_large_object_content_later") largefile = factory.make_LargeFile() self.addCleanup(largefile.content.unlink) with post_commit_hooks: largefile.delete() self.assertThat(signals.largefiles.delete_large_object_content_later, MockCalledOnceWith(largefile.content))
def make_complete_boot_resource_set(self, resource): resource_set = factory.make_BootResourceSet(resource) filename = factory.make_name('name') filetype = factory.pick_enum(BOOT_RESOURCE_FILE_TYPE) largefile = factory.make_LargeFile() factory.make_BootResourceFile( resource_set, largefile, filename=filename, filetype=filetype) return resource_set
def test_deletes_content_asynchronously_for_queries_too(self): self.patch(signals.largefiles, "delete_large_object_content_later") for _ in 1, 2: largefile = factory.make_LargeFile() self.addCleanup(largefile.content.unlink) with post_commit_hooks: LargeFile.objects.all().delete() self.assertThat(signals.largefiles.delete_large_object_content_later, MockCallsMatch(call(ANY), call(ANY)))
def test_progress_handles_zero_division(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) filetype = BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE total_size = random.randint(1025, 2048) largefile = factory.make_LargeFile(content=b"", size=total_size) factory.make_BootResourceFile( resource_set, largefile, filename=filetype, filetype=filetype) self.assertEqual(0, resource_set.progress)
def test_POST_validates_size_matches_total_size_for_largefile(self): self.become_admin() largefile = factory.make_LargeFile() name = factory.make_name('name') architecture = make_usable_architecture(self) params = { 'name': name, 'architecture': architecture, 'sha256': largefile.sha256, 'size': largefile.total_size + 1, } response = self.client.post(reverse('boot_resources_handler'), params) self.assertEqual(http.client.BAD_REQUEST, response.status_code)
def test_boot_resource_set_to_dict(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) total_size = random.randint(1024, 2048) content = factory.make_bytes(random.randint(512, 1023)) largefile = factory.make_LargeFile(content=content, size=total_size) rfile = factory.make_BootResourceFile(resource_set, largefile) dict_representation = boot_resource_set_to_dict(resource_set) self.assertEqual(resource_set.version, dict_representation['version']) self.assertEqual(resource_set.label, dict_representation['label']) self.assertEqual(resource_set.total_size, dict_representation['size']) self.assertEqual(False, dict_representation['complete']) self.assertEqual(resource_set.progress, dict_representation['progress']) self.assertEqual(boot_resource_file_to_dict(rfile), dict_representation['files'][rfile.filename])
def test_total_size(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) total_size = 0 sizes = [random.randint(512, 1024) for _ in range(3)] types = [ BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE, BOOT_RESOURCE_FILE_TYPE.BOOT_KERNEL, BOOT_RESOURCE_FILE_TYPE.BOOT_INITRD, ] for size in sizes: total_size += size filetype = types.pop() largefile = factory.make_LargeFile(size=size) factory.make_BootResourceFile( resource_set, largefile, filename=filetype, filetype=filetype) self.assertEqual(total_size, resource_set.total_size)
def test__schedules_unlink(self): # We're going to capture the delayed call that # delete_large_object_content_later() creates. clock = self.patch(largefile_module, "reactor", Clock()) with transaction.atomic(): largefile = factory.make_LargeFile() oid = largefile.content.oid with post_commit_hooks: largefile.delete() # Deleting `largefile` resulted in a call being scheduled. delayed_calls = clock.getDelayedCalls() self.assertThat(delayed_calls, HasLength(1)) [delayed_call] = delayed_calls # It is scheduled to be run on the next iteration of the reactor. self.assertFalse(delayed_call.called) self.assertThat( delayed_call, MatchesStructure( func=MatchesStructure.byEquality(__name__="unlink"), args=MatchesListwise([Is(largefile.content)]), kw=Equals({}), time=Equals(0), ), ) # Call the delayed function ourselves instead of advancing `clock` so # that we can wait for it to complete (it returns a Deferred). func = wait_for(30)(delayed_call.func) # Wait 30 seconds. func(*delayed_call.args, **delayed_call.kw) # The content has been removed from the database. with transaction.atomic(): error = self.assertRaises( psycopg2.OperationalError, LargeObjectFile(oid).open, "rb" ) self.assertDocTestMatches( "ERROR: large object ... does not exist", str(error) )
def test_boot_resource_file_to_dict(self): size = random.randint(512, 1023) total_size = random.randint(1024, 2048) content = factory.make_bytes(size) largefile = factory.make_LargeFile(content=content, size=total_size) resource = factory.make_BootResource(rtype=BOOT_RESOURCE_TYPE.UPLOADED) resource_set = factory.make_BootResourceSet(resource) rfile = factory.make_BootResourceFile(resource_set, largefile) dict_representation = boot_resource_file_to_dict(rfile) self.assertEqual(rfile.filename, dict_representation['filename']) self.assertEqual(rfile.filetype, dict_representation['filetype']) self.assertEqual(rfile.largefile.sha256, dict_representation['sha256']) self.assertEqual(total_size, dict_representation['size']) self.assertEqual(False, dict_representation['complete']) self.assertEqual(rfile.largefile.progress, dict_representation['progress']) self.assertEqual( reverse('boot_resource_file_upload_handler', args=[resource.id, rfile.id]), dict_representation['upload_uri'])
def test_progress_increases_from_0_to_100(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) filetype = BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE total_size = 100 current_size = 0 largefile = factory.make_LargeFile(content=b"", size=total_size) factory.make_BootResourceFile( resource_set, largefile, filename=filetype, filetype=filetype) stream = largefile.content.open() self.addCleanup(stream.close) self.assertEqual(0, resource_set.progress) for _ in range(total_size): stream.write(b"a") largefile.size += 1 largefile.save() current_size += 1 self.assertAlmostEqual( 100.0 * current_size / float(total_size), resource_set.progress)
def test_complete_returns_false_for_one_incomplete_file(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) types = [ BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE, BOOT_RESOURCE_FILE_TYPE.BOOT_KERNEL, BOOT_RESOURCE_FILE_TYPE.BOOT_INITRD, ] for _ in range(2): filetype = types.pop() factory.make_boot_resource_file_with_content( resource_set, filename=filetype, filetype=filetype) size = random.randint(512, 1024) total_size = random.randint(1025, 2048) filetype = types.pop() content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content=content, size=total_size) factory.make_BootResourceFile( resource_set, largefile, filename=filetype, filetype=filetype) self.assertFalse(resource_set.complete)
def test_POST_creates_boot_resource_with_already_existing_largefile(self): self.become_admin() largefile = factory.make_LargeFile() name = factory.make_name('name') architecture = make_usable_architecture(self) params = { 'name': name, 'architecture': architecture, 'sha256': largefile.sha256, 'size': largefile.total_size, } response = self.client.post(reverse('boot_resources_handler'), params) self.assertEqual(http.client.CREATED, response.status_code) parsed_result = json_load_bytes(response.content) resource = BootResource.objects.get(id=parsed_result['id']) resource_set = resource.sets.first() rfile = resource_set.files.first() self.assertEqual(largefile, rfile.largefile)
def test_has_file(self): largefile = factory.make_LargeFile() self.assertTrue(LargeFile.objects.has_file(largefile.sha256))
def test_complete_returns_True_when_content_is_complete(self): largefile = factory.make_LargeFile() self.assertTrue(largefile.complete)
def test_complete_returns_False_when_content_incomplete(self): size = randint(512, 1024) total_size = randint(1025, 2048) content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content, size=total_size) self.assertFalse(largefile.complete)
def test_progress_of_empty_file(self): size = 0 content = b"" largefile = factory.make_LargeFile(content, size=size) self.assertEqual(0, largefile.progress)
def test_progress(self): size = randint(512, 1024) total_size = randint(1025, 2048) content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content, size=total_size) self.assertEqual(total_size / float(size), largefile.progress)
def test_valid_returns_False_when_content_doesnt_have_equal_sha256(self): largefile = factory.make_LargeFile() with largefile.content.open("wb") as stream: stream.write(factory.make_bytes(size=largefile.total_size)) self.assertFalse(largefile.valid)
def test_valid_returns_True_when_content_has_equal_sha256(self): largefile = factory.make_LargeFile() self.assertTrue(largefile.valid)
def test_get_file(self): largefile = factory.make_LargeFile() obj = LargeFile.objects.get_file(largefile.sha256) self.assertEqual(largefile, obj)