def test_delete_fail(self): # deletes should fail if being written to blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) yield self.assertFailure(blob_file.delete(), ValueError) writer.write(self.fake_content) writer.close() # deletes should fail if being read and not closed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) f = blob_file.open_for_reading() yield self.assertFailure(blob_file.delete(), ValueError)
def test_delete_fail(self): # deletes should fail if being written to blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) with self.assertRaises(ValueError): blob_file.delete() writer.write(self.fake_content) writer.close() # deletes should fail if being read and not closed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) r = blob_file.open_for_reading() # must be set to variable otherwise it gets garbage collected with self.assertRaises(ValueError): blob_file.delete()
def test_delete(self): blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content) out = yield finished_d out = yield blob_file.delete() blob_file = BlobFile(self.blob_dir, self.fake_content_hash) self.assertFalse(blob_file.verified)
def _from_stream_descriptor_blob(cls, loop: asyncio.BaseEventLoop, blob_dir: str, blob: BlobFile) -> 'StreamDescriptor': assert os.path.isfile(blob.file_path) with open(blob.file_path, 'rb') as f: json_bytes = f.read() try: decoded = json.loads(json_bytes.decode()) except json.JSONDecodeError: blob.delete() raise InvalidStreamDescriptorError("Does not decode as valid JSON") if decoded['blobs'][-1]['length'] != 0: raise InvalidStreamDescriptorError( "Does not end with a zero-length blob.") if any( [blob_info['length'] == 0 for blob_info in decoded['blobs'][:-1]]): raise InvalidStreamDescriptorError( "Contains zero-length data blob") if 'blob_hash' in decoded['blobs'][-1]: raise InvalidStreamDescriptorError( "Stream terminator blob should not have a hash") if any([ i != blob_info['blob_num'] for i, blob_info in enumerate(decoded['blobs']) ]): raise InvalidStreamDescriptorError( "Stream contains out of order or skipped blobs") descriptor = cls( loop, blob_dir, binascii.unhexlify(decoded['stream_name']).decode(), decoded['key'], binascii.unhexlify(decoded['suggested_file_name']).decode(), [ BlobInfo(info['blob_num'], info['length'], info['iv'], info.get('blob_hash')) for info in decoded['blobs'] ], decoded['stream_hash'], blob.blob_hash) if descriptor.get_stream_hash() != decoded['stream_hash']: raise InvalidStreamDescriptorError( "Stream hash does not match stream metadata") return descriptor