def testRemoteFile(self): content = urandom(1024) with NamedTemporaryFile() as f: # Create new Datafile datafile = Dataset_File(dataset=self.dataset) datafile.filename = 'background_task_testfile' datafile.size = len(content) datafile.sha512sum = hashlib.sha512(content).hexdigest() datafile.url = 'file://' + path.abspath(f.name) datafile.save() def get_datafile(datafile): return Dataset_File.objects.get(id=datafile.id) # Check that it won't verify as it stands expect(get_datafile(datafile).verified).to_be(False) verify_files() expect(get_datafile(datafile).verified).to_be(False) expect(get_datafile(datafile).is_local()).to_be(False) # Fill in the content f.write(content) f.flush() # Check it now verifies verify_files() expect(get_datafile(datafile).verified).to_be(True) expect(get_datafile(datafile).is_local()).to_be(True)
def testLocalFile(self): content = urandom(1024) cf = ContentFile(content, 'background_task_testfile') # Create new Datafile datafile = Dataset_File(dataset=self.dataset) datafile.filename = cf.name datafile.size = len(content) datafile.sha512sum = hashlib.sha512(content).hexdigest() datafile.save() replica = Replica(datafile=datafile, url=write_uploaded_file_to_dataset(self.dataset, cf), location=Location.get_default_location()) replica.save() def get_replica(datafile): return Replica.objects.get(datafile=datafile) # undo auto-verify: replica.verified = False replica.save(update_fields=['verified']) # Check that it's not currently verified expect(get_replica(datafile).verified).to_be(False) # Check it verifies verify_files() expect(get_replica(datafile).verified).to_be(True)
def testRemoteFile(self): content = urandom(1024) with NamedTemporaryFile() as f: # Create new Datafile datafile = Dataset_File(dataset=self.dataset) datafile.filename = 'background_task_testfile' datafile.size = len(content) datafile.sha512sum = hashlib.sha512(content).hexdigest() datafile.save() url = 'file://' + path.abspath(f.name) base_url = 'file://' + path.dirname(path.abspath(f.name)) location = self._get_or_create_local_location( 'test-staging-xxx', base_url, 'external', 10) replica = Replica(datafile=datafile, location=location, url=url) replica.save() def get_replica(replica): try: return Replica.objects.get(id=replica.id) except Replica.DoesNotExist: return None def get_new_replica(datafile): location = Location.get_default_location() return Replica.objects.get(datafile=datafile.id, location=location) # Check that it won't verify as it stands expect(get_replica(replica).verified).to_be(False) verify_files() expect(get_replica(replica).verified).to_be(False) expect(get_replica(replica).is_local()).to_be(False) # Fill in the content f.write(content) f.flush() # Check it now verifies verify_files() expect(get_replica(replica).id).to_be( get_new_replica(datafile).id) expect(get_new_replica(datafile).verified).to_be(True) expect(get_new_replica(datafile).is_local()).to_be(True)
def testLocalFile(self): content = urandom(1024) cf = ContentFile(content, 'background_task_testfile') # Create new Datafile datafile = Dataset_File(dataset=self.dataset) datafile.filename = cf.name datafile.size = len(content) datafile.sha512sum = hashlib.sha512(content).hexdigest() datafile.url = write_uploaded_file_to_dataset(self.dataset, cf) datafile.save() def get_datafile(datafile): return Dataset_File.objects.get(id=datafile.id) # Check that it's not currently verified expect(get_datafile(datafile).verified).to_be(False) # Check it verifies verify_files() expect(get_datafile(datafile).verified).to_be(True)