def make_empty_resource_file(self, rtype=None, content=None): # Create a largefile to use the generated content, # sha256, and total_size. if content is None: content = factory.make_bytes(1024) total_size = len(content) largefile = factory.make_LargeFile(content=content, size=total_size) sha256 = largefile.sha256 with largefile.content.open('rb') as stream: content = stream.read() with post_commit_hooks: largefile.delete() # Empty largefile largeobject = LargeObjectFile() largeobject.open().close() largefile = LargeFile.objects.create(sha256=sha256, total_size=total_size, content=largeobject) if rtype is None: rtype = BOOT_RESOURCE_TYPE.UPLOADED resource = factory.make_BootResource(rtype=rtype) resource_set = factory.make_BootResourceSet(resource) rfile = factory.make_BootResourceFile(resource_set, largefile) return rfile, content
def get_or_create_file_from_content(self, content): """Return file based on the content. Reads the data from content calculating the sha256. If largefile with that sha256 already exists then it is returned instead of a new one being created. :param content: File-like object. :return: `LargeFile`. """ sha256 = hashlib.sha256() for data in content: sha256.update(data) hexdigest = sha256.hexdigest() largefile = self.get_file(hexdigest) if largefile is not None: return largefile length = 0 content.seek(0) objfile = LargeObjectFile() with objfile.open("wb") as objstream: for data in content: objstream.write(data) length += len(data) return self.create(sha256=hexdigest, size=length, total_size=length, content=objfile)
def test_with_exit_calls_close(self): data = factory.make_bytes() large_object = LargeObjectFile() with large_object.open("wb") as stream: self.addCleanup(large_object.close) mock_close = self.patch(large_object, "close") stream.write(data) self.assertThat(mock_close, MockCalledOnceWith())
def test_unlink(self): data = factory.make_bytes() large_object = LargeObjectFile() with large_object.open("wb") as stream: stream.write(data) oid = large_object.oid large_object.unlink() self.assertEqual(0, large_object.oid) self.assertRaises(OperationalError, connection.connection.lobject, oid)
def test_stores_data(self): data = factory.make_bytes() test_name = factory.make_name("name") test_instance = LargeObjectFieldModel(name=test_name) large_object = LargeObjectFile() with large_object.open("wb") as stream: stream.write(data) test_instance.large_object = large_object test_instance.save() test_instance = LargeObjectFieldModel.objects.get(name=test_name) with test_instance.large_object.open("rb") as stream: saved_data = stream.read() self.assertEqual(data, saved_data)
def test_interates_on_block_size(self): # String size is multiple of block_size in the testing model data = factory.make_bytes(10 * 2) test_name = factory.make_name("name") test_instance = LargeObjectFieldModel(name=test_name) large_object = LargeObjectFile() with large_object.open("wb") as stream: stream.write(data) test_instance.large_object = large_object test_instance.save() test_instance = LargeObjectFieldModel.objects.get(name=test_name) with test_instance.large_object.open("rb") as stream: offset = 0 for block in stream: self.assertEqual(data[offset:offset + 10], block) offset += 10
def test__schedules_unlink(self): # We're going to capture the delayed call that # delete_large_object_content_later() creates. clock = self.patch(largefile_module, "reactor", Clock()) with transaction.atomic(): largefile = factory.make_LargeFile() oid = largefile.content.oid with post_commit_hooks: largefile.delete() # Deleting `largefile` resulted in a call being scheduled. delayed_calls = clock.getDelayedCalls() self.assertThat(delayed_calls, HasLength(1)) [delayed_call] = delayed_calls # It is scheduled to be run on the next iteration of the reactor. self.assertFalse(delayed_call.called) self.assertThat( delayed_call, MatchesStructure( func=MatchesStructure.byEquality(__name__="unlink"), args=MatchesListwise([Is(largefile.content)]), kw=Equals({}), time=Equals(0), ), ) # Call the delayed function ourselves instead of advancing `clock` so # that we can wait for it to complete (it returns a Deferred). func = wait_for(30)(delayed_call.func) # Wait 30 seconds. func(*delayed_call.args, **delayed_call.kw) # The content has been removed from the database. with transaction.atomic(): error = self.assertRaises( psycopg2.OperationalError, LargeObjectFile(oid).open, "rb" ) self.assertDocTestMatches( "ERROR: large object ... does not exist", str(error) )
def test_to_python_returns_value_when_value_LargeObjectFile(self): field = LargeObjectField() obj_file = LargeObjectFile() self.assertEqual(obj_file, field.to_python(obj_file))
def test_get_db_prep_value_raises_error_when_oid_less_than_zero(self): oid = randint(-100, 0) field = LargeObjectField() obj_file = LargeObjectFile() obj_file.oid = oid self.assertRaises(AssertionError, field.get_db_prep_value, obj_file)
def test_get_db_prep_value_returns_oid_when_value_LargeObjectFile(self): oid = randint(1, 100) field = LargeObjectField() obj_file = LargeObjectFile() obj_file.oid = oid self.assertEqual(oid, field.get_db_prep_value(obj_file))
def test_insists_on_binary_mode(self): message = "Large objects must be opened in binary mode." with ExpectedException(ValueError, message): large_object = LargeObjectFile() large_object.open("w")