def makeImportExportTests(cls): def importExportFile(self, otherJobStore, size): # prepare random file for import self.master.partSize = cls.mpTestPartSize srcUrl, srcHash = otherJobStore._getUrlForTestFile(size) self.addCleanup(otherJobStore._cleanUpExternalStore, srcUrl) # test import jobStoreFileID = self.master.importFile(srcUrl) self.assertEqual(self._hashJobStoreFileID(jobStoreFileID), srcHash) # prepare destination for export dstUrl = otherJobStore._getUrlForTestFile() self.addCleanup(otherJobStore._cleanUpExternalStore, dstUrl) # test export self.master.exportFile(jobStoreFileID, dstUrl) self.assertEqual(self._hashJobStoreFileID(jobStoreFileID), otherJobStore._hashUrl(dstUrl)) jobStoreTestClasses = [FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest] make_tests(importExportFile, targetClass=cls, otherJobStore={jsCls.__name__: jsCls for jsCls in jobStoreTestClasses}, size=dict(zero=0, one=1, oneMiB=2**20, partSizeMinusOne=cls.mpTestPartSize - 1, partSize=cls.mpTestPartSize, partSizePlusOne=cls.mpTestPartSize + 1))
def makeImportOnlyTests(cls): def importHttpFile(self): # prepare random file for import self.master.partSize = cls.mpTestPartSize srcUrl = 'https://raw.githubusercontent.com/BD2KGenomics/toil/master/Makefile' srcHash = hashlib.md5(urllib2.urlopen(srcUrl).read()).hexdigest() # test import jobStoreFileID = self.master.importFile(srcUrl) self.assertEqual(self._hashJobStoreFileID(jobStoreFileID), srcHash) make_tests(importHttpFile, targetClass=cls)
def makeTests(cls): def multipartCopy(self, threadPoolSize): # key size is padded to ensure some threads are reused keySize = int((threadPoolSize * partSize) * 1.3) with openS3(keySize) as srcKey: with openS3() as dstBucket: copyKeyMultipart(srcKey, dstBucket, 'test', partSize) self.assertEqual(srcKey.get_contents_as_string(), dstBucket.get_key('test').get_contents_as_string()) make_tests(multipartCopy, targetClass=AWSMultipartCopyTest, threadPoolSize={str(x): x for x in (1, 2, 16)})
def makeImportOnlyTests(cls): def importHttpFile(self): # prepare random file for import self.master.partSize = cls.mpTestPartSize srcUrl = 'https://raw.githubusercontent.com/BD2KGenomics/toil/master/Makefile' srcHash = hashlib.md5( urllib2.urlopen(srcUrl).read()).hexdigest() # test import jobStoreFileID = self.master.importFile(srcUrl) self.assertEqual(self._hashJobStoreFileID(jobStoreFileID), srcHash) make_tests(importHttpFile, targetClass=cls)
def makeTests(cls): def multipartCopy(self, threadPoolSize): # key size is padded to ensure some threads are reused keySize = int((threadPoolSize * partSize) * 1.3) with openS3(keySize) as srcKey: with openS3() as dstBucket: copyKeyMultipart(srcKey, dstBucket, 'test', partSize) self.assertEqual(srcKey.get_contents_as_string(), dstBucket.get_key('test').get_contents_as_string()) make_tests(multipartCopy, AWSMultipartCopyTest, threadPoolSize={str(x): x for x in (1, 2, 16)})
def makeImportExportTests(cls): def importExportFile(self, otherJobStore, size): # prepare random file for import self.master.partSize = cls.mpTestPartSize srcUrl, srcHash = otherJobStore._getUrlForTestFile(size) self.addCleanup(otherJobStore._cleanUpExternalStore, srcUrl) # test import jobStoreFileID = self.master.importFile(srcUrl) self.assertEqual(self._hashJobStoreFileID(jobStoreFileID), srcHash) # prepare destination for export dstUrl = otherJobStore._getUrlForTestFile() self.addCleanup(otherJobStore._cleanUpExternalStore, dstUrl) # test export self.master.exportFile(jobStoreFileID, dstUrl) self.assertEqual(self._hashJobStoreFileID(jobStoreFileID), otherJobStore._hashUrl(dstUrl)) jobStoreTestClasses = [ FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest ] make_tests(importExportFile, targetClass=cls, otherJobStore={ jsCls.__name__: jsCls for jsCls in jobStoreTestClasses if not getattr(jsCls, '__unittest_skip__', False) }, size=dict(zero=0, one=1, oneMiB=2**20, partSizeMinusOne=cls.mpTestPartSize - 1, partSize=cls.mpTestPartSize, partSizePlusOne=cls.mpTestPartSize + 1))
def makeImportExportTests(cls): testClasses = [ FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest ] activeTestClassesByName = { testCls.__name__: testCls for testCls in testClasses if not getattr(testCls, '__unittest_skip__', False) } def testImportExportFile(self, otherCls, size): """ :param AbstractJobStoreTest.Test self: the current test case :param AbstractJobStoreTest.Test otherCls: the test case class for the job store to import from or export to :param int size: the size of the file to test importing/exporting with """ # Prepare test file in other job store self.master.partSize = cls.mpTestPartSize other = otherCls('test') store = other._externalStore() srcUrl, srcMd5 = other._prepareTestFile(store, size) # Import into job store under test jobStoreFileID = self.master.importFile(srcUrl) with self.master.readFileStream(jobStoreFileID) as f: fileMD5 = hashlib.md5(f.read()).hexdigest() self.assertEqual(fileMD5, srcMd5) # Export back into other job store dstUrl = other._prepareTestFile(store) self.master.exportFile(jobStoreFileID, dstUrl) self.assertEqual(fileMD5, other._hashTestFile(dstUrl)) make_tests(testImportExportFile, targetClass=cls, otherCls=activeTestClassesByName, size=dict(zero=0, one=1, oneMiB=2**20, partSizeMinusOne=cls.mpTestPartSize - 1, partSize=cls.mpTestPartSize, partSizePlusOne=cls.mpTestPartSize + 1)) def testImportSharedFile(self, otherCls): """ :param AbstractJobStoreTest.Test self: the current test case :param AbstractJobStoreTest.Test otherCls: the test case class for the job store to import from or export to """ # Prepare test file in other job store self.master.partSize = cls.mpTestPartSize other = otherCls('test') store = other._externalStore() srcUrl, srcMd5 = other._prepareTestFile(store, 42) # Import into job store under test self.assertIsNone( self.master.importFile(srcUrl, sharedFileName='foo')) with self.master.readSharedFileStream('foo') as f: fileMD5 = hashlib.md5(f.read()).hexdigest() self.assertEqual(fileMD5, srcMd5) make_tests(testImportSharedFile, targetClass=cls, otherCls=activeTestClassesByName)
def makeImportExportTests(cls): testClasses = [FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest, GoogleJobStoreTest] activeTestClassesByName = {testCls.__name__: testCls for testCls in testClasses if not getattr(testCls, '__unittest_skip__', False)} def testImportExportFile(self, otherCls, size): """ :param AbstractJobStoreTest.Test self: the current test case :param AbstractJobStoreTest.Test otherCls: the test case class for the job store to import from or export to :param int size: the size of the file to test importing/exporting with """ # Prepare test file in other job store self.master.partSize = cls.mpTestPartSize other = otherCls('test') store = other._externalStore() srcUrl, srcMd5 = other._prepareTestFile(store, size) # Import into job store under test jobStoreFileID = self.master.importFile(srcUrl) with self.master.readFileStream(jobStoreFileID) as f: fileMD5 = hashlib.md5(f.read()).hexdigest() self.assertEqual(fileMD5, srcMd5) # Export back into other job store dstUrl = other._prepareTestFile(store) self.master.exportFile(jobStoreFileID, dstUrl) self.assertEqual(fileMD5, other._hashTestFile(dstUrl)) make_tests(testImportExportFile, targetClass=cls, otherCls=activeTestClassesByName, size=dict(zero=0, one=1, oneMiB=2 ** 20, partSizeMinusOne=cls.mpTestPartSize - 1, partSize=cls.mpTestPartSize, partSizePlusOne=cls.mpTestPartSize + 1)) def testImportSharedFile(self, otherCls): """ :param AbstractJobStoreTest.Test self: the current test case :param AbstractJobStoreTest.Test otherCls: the test case class for the job store to import from or export to """ # Prepare test file in other job store self.master.partSize = cls.mpTestPartSize other = otherCls('test') store = other._externalStore() srcUrl, srcMd5 = other._prepareTestFile(store, 42) # Import into job store under test self.assertIsNone(self.master.importFile(srcUrl, sharedFileName='foo')) with self.master.readSharedFileStream('foo') as f: fileMD5 = hashlib.md5(f.read()).hexdigest() self.assertEqual(fileMD5, srcMd5) make_tests(testImportSharedFile, targetClass=cls, otherCls=activeTestClassesByName)