def testFsCrawlerGlob(self): """ Test the glob functionality. """ crawler = Crawler.create(PathHolder(self.__dir)) crawlers = crawler.glob() result = self.collectFiles(self.__dir) result = list(map(lambda x: x.rstrip("/"), result)) crawlerPaths = list(map(lambda x: x.var("filePath"), crawlers)) self.assertCountEqual(result, crawlerPaths) crawlers = crawler.glob(filterTypes=["turntable", "shotRender"]) crawlerPaths = list(map(lambda x: x.var("filePath"), crawlers)) self.assertCountEqual(crawlerPaths, [self.__turntableFile, self.__shotRenderFile]) crawlers = crawler.glob(filterTypes=[ExrRenderCrawler]) crawlerPaths = list(map(lambda x: x.var("filePath"), crawlers)) result = self.collectFiles(self.__dir, "RND*.exr") result = list(map(lambda x: x.rstrip("/"), result)) self.assertCountEqual(result, crawlerPaths) crawlers = crawler.glob(filterTypes=['exr']) crawlerPaths = list(map(lambda x: x.var("filePath"), crawlers)) result = self.collectFiles(self.__dir, "*.exr") result = list(map(lambda x: x.rstrip("/"), result)) self.assertCountEqual(result, crawlerPaths) crawler = Crawler.create(PathHolder(self.__turntableFile)) otherCrawlers = crawler.globFromParent(filterTypes=[ExrRenderCrawler]) crawlerPaths = list(map(lambda x: x.var("filePath"), crawlers)) otherCrawlerPaths = list( map(lambda x: x.var("filePath"), otherCrawlers)) self.assertCountEqual(crawlerPaths, otherCrawlerPaths)
def testRunConfiguration(self): """ Test execution of the configuration. """ loader = Loader() loader.loadFromDirectory(self.__exampleDirectory) self.assertEqual(len(loader.taskHolders()), 1) taskHolder = loader.taskHolders()[0] taskHolder.addVar( "prefix", self.__exampleTargetPrefixDirectory, True ) # loading input data for the execution crawlerGroups = Crawler.group( FsCrawler.createFromPath( os.path.join(self.__exampleDirectory, 'imageSequence') ).globFromParent() ) resultCrawlers = [] for group in crawlerGroups: if isinstance(group[0], Crawler.registeredType('png')): resultCrawlers += taskHolder.run(group) targetFilePaths = list(sorted(filter(lambda x: len(x), map(lambda x: x.strip(), self.__generatedData.split('\n'))))) createdFilePaths = list(sorted(map(lambda x: x.var('fullPath')[len(self.__exampleTargetPrefixDirectory) + 1:].replace('\\', '/'), resultCrawlers))) self.assertListEqual(targetFilePaths, createdFilePaths)
def testMayaSceneCrawler(self): """ Test that the Maya Scene crawler test works properly. """ crawler = Crawler.create(PathHolder(self.__maFile)) self.assertIsInstance(crawler, MayaSceneCrawler) crawler = Crawler.create(PathHolder(self.__mbFile)) self.assertIsInstance(crawler, MayaSceneCrawler)
def testExrCrawler(self): """ Test that the Exr crawler test works properly. """ crawler = Crawler.create(PathHolder(self.__exrFile)) self.assertIsInstance(crawler, ExrCrawler) crawler = Crawler.create(PathHolder(BaseTestCase.dataTestsDirectory())) self.assertNotIsInstance(crawler, ExrCrawler)
def testImageSequence(self): """ Test that detection of an image sequence works properly. """ crawler = Crawler.create(PathHolder(self.__exrFile)) self.assertFalse(crawler.isSequence()) crawler = Crawler.create(PathHolder(self.__exrSeq)) self.assertTrue(crawler.isSequence()) crawler = Crawler.create(PathHolder(self.__exrAmbiguousSeq)) self.assertTrue(crawler.isSequence())
def testTextureCrawler(self): """ Test that the Texture crawler test works properly. """ crawler = Crawler.create(PathHolder(self.__exrFile)) self.assertIsInstance(crawler, TextureCrawler) crawler = Crawler.create(PathHolder(self.__tifFile)) self.assertIsInstance(crawler, TextureCrawler) crawler = Crawler.create(PathHolder(self.__badExrFile)) self.assertNotIsInstance(crawler, TextureCrawler)
def testCrawlerJson(self): """ Test that you can convert a crawler to json and back. """ crawler = Crawler.create(PathHolder(self.__turntableFile)) jsonResult = crawler.toJson() crawlerResult = Crawler.createFromJson(jsonResult) self.assertCountEqual(crawler.varNames(), crawlerResult.varNames()) self.assertCountEqual(crawler.contextVarNames(), crawlerResult.contextVarNames()) self.assertCountEqual(crawler.tagNames(), crawlerResult.tagNames())
def testIsSequence(self): """ Test if a crawler is a sequence. """ singleCrawler = Crawler.create(PathHolder(self.__singleFile)) sequenceCrawler = Crawler.create(PathHolder(self.__sequenceFile)) self.assertEqual(singleCrawler.isSequence(), False) self.assertEqual(singleCrawler.var("imageType"), "single") self.assertEqual(sequenceCrawler.isSequence(), True) self.assertEqual(sequenceCrawler.var("imageType"), "sequence")
def testCrawlerRegistration(self): """ Test that you can register a new crawler. """ class DummyCrawler(FileCrawler): @classmethod def test(cls, pathHolder, parentCrawler): return False Crawler.register("dummy", DummyCrawler) self.assertIn("dummy", Crawler.registeredNames()) self.assertIn(DummyCrawler, Crawler.registeredSubclasses("file")) self.assertIn(DummyCrawler, Crawler.registeredSubclasses(FsCrawler))
def testImageSequenceVariables(self): """ Test that the image sequence related variables are set properly. """ crawler = Crawler.create(PathHolder(self.__exrSeq)) self.assertEqual(crawler.var("imageType"), "sequence") self.assertEqual(crawler.var("name"), "testSeq") self.assertEqual(crawler.var("frame"), 1) self.assertEqual(crawler.var("padding"), 4) crawler = Crawler.create(PathHolder(self.__exrAmbiguousSeq)) self.assertEqual(crawler.var("imageType"), "sequence") self.assertEqual(crawler.var("name"), "test") self.assertEqual(crawler.var("frame"), 1) self.assertEqual(crawler.var("padding"), 4)
def test02Delivery(self): """ Test the delivery configuration. """ loader = Loader() loader.loadFromDirectory(self.__exampleDirectory) taskHolder = list( filter( lambda x: os.path.basename(x.var('contextConfig')) == 'deliveryConfig.json', loader.taskHolders())) self.assertEqual(len(taskHolder), 1) taskHolder = taskHolder[0] taskHolder.addVar("prefix", self.__exampleDeliveryPrefixDirectory, True) # loading input data for the ingestion crawlerGroups = Crawler.group( FsCrawler.createFromPath( os.path.normpath( os.path.join( self.__exampleIngestionPrefixDirectory, 'jobs/foo/seq/abc/shot/def/plates/bla/v001/1920x1080_exr' ))).glob()) resultCrawlers = [] for group in crawlerGroups: if isinstance(group[0], Crawler.registeredType('plateExr')): resultCrawlers += taskHolder.run(group) targetFilePaths = list( sorted( filter( lambda x: len(x), map( lambda x: x.strip(), self.__deliveryGeneratedData.replace( '<date>', datetime.today().strftime('%Y%m%d')).split( '\n'))))) createdFilePaths = list( sorted( map( lambda x: x.var('fullPath')[len( self.__exampleDeliveryPrefixDirectory) + 1:].replace( '\\', '/'), resultCrawlers))) self.assertListEqual(targetFilePaths, createdFilePaths)
def testMovVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__movFile)) self.assertEqual(crawler.var("type"), "mov") self.assertEqual(crawler.var("category"), "video") self.assertEqual(crawler.var("width"), 1920) self.assertEqual(crawler.var("height"), 1080) self.assertEqual(crawler.var("firstFrame"), 1) self.assertEqual(crawler.var("lastFrame"), 12) crawler = Crawler.create(PathHolder(self.__movNoTimecodeFile)) self.assertEqual(crawler.var("firstFrame"), 0) self.assertEqual(crawler.var("lastFrame"), 23)
def testTxtContents(self): """ Test that txt files are parsed properly. """ crawler = Crawler.create(PathHolder(self.__txtFile)) testData = "testing txt file\nwith random data\n\n1 2 3\n" self.assertEqual(crawler.contents(), testData)
def testGroupSprintfTagSequence(self): """ Test that the tag groupSprintf has been assigned to the image sequence crawler. """ crawler = Crawler.create(PathHolder(self.__sequenceFile)) self.assertIn('groupSprintf', crawler.tagNames()) self.assertEqual(crawler.tag('groupSprintf'), "testSeq.%04d.exr")
def testJsonVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__jsonFile)) self.assertEqual(crawler.var("type"), "json") self.assertEqual(crawler.var("category"), "ascii")
def testMayaSceneVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__maFile)) self.assertEqual(crawler.var("type"), "mayaScene") self.assertEqual(crawler.var("category"), "scene")
def testBadFile(self): """ Test to show that file names with illegal characters are skipped. """ crawler = Crawler.create(PathHolder(self.dataTestsDirectory())) crawlerPaths = map(lambda x: x.var("filePath"), crawler.children()) self.assertNotIn(os.path.join(self.__dir, "bad file.txt"), crawlerPaths)
def testCreation(self): """ Test hashmap creation. """ hashmap = Crawler.create({}) assert(isinstance(hashmap, HashmapCrawler)) self.assertEqual(len(hashmap), 0)
def testDirectoryVariables(self): """ Test that the variables are set properly. """ crawler = Crawler.create(PathHolder(self.__dir)) self.assertEqual(crawler.var("width"), 640) self.assertEqual(crawler.var("height"), 480)
def testPngVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__pngFile)) self.assertEqual(crawler.var("type"), "png") self.assertEqual(crawler.var("category"), "image") self.assertEqual(crawler.var("imageType"), "single")
def testItemsData(self): """ Test items, keys and values for the data. """ hashmap = Crawler.create({"a": 1}) self.assertEqual(list(hashmap.items()), [("a", 1)]) self.assertEqual(list(hashmap.keys()), ["a"]) self.assertEqual(list(hashmap.values()), [1])
def testTextureVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__exrFile)) self.assertEqual(crawler.var("type"), "texture") self.assertEqual(crawler.var("category"), "texture") self.assertEqual(crawler.var("assetName"), "test") self.assertEqual(crawler.var("mapType"), "DIFF") self.assertEqual(crawler.var("udim"), 1001) self.assertEqual(crawler.var("variant"), "default") crawler = Crawler.create(PathHolder(self.__tifFile)) self.assertEqual(crawler.var("assetName"), "test") self.assertEqual(crawler.var("mapType"), "BUMP") self.assertEqual(crawler.var("udim"), 1002) self.assertEqual(crawler.var("variant"), "default")
def testClearData(self): """ Test clear the data in the hashmap. """ hashmap = Crawler.create({"a": 1, "b": 2}) hashmap.clear() self.assertEqual(len(hashmap), 0)
def testExrWidthHeight(self): """ Test that width and height variables are processed properly. """ crawler = Crawler.create(PathHolder(self.__exrFile)) self.assertNotIn("width", crawler.varNames()) self.assertNotIn("height", crawler.varNames()) self.assertEqual(crawler.var("width"), 1920) self.assertEqual(crawler.var("height"), 1080)
def testJpgVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__jpgFile)) self.assertEqual(crawler.var("type"), "jpg") self.assertEqual(crawler.var("category"), "image") self.assertEqual(crawler.var("imageType"), "single") self.assertEqual(crawler.var("width"), 512) self.assertEqual(crawler.var("height"), 512)
def testDpxVariables(self): """ Test that variables are set properly. """ crawler = Crawler.create(PathHolder(self.__dpxFile)) self.assertEqual(crawler.var("type"), "dpx") self.assertEqual(crawler.var("category"), "image") self.assertEqual(crawler.var("imageType"), "single") self.assertEqual(crawler.var("width"), 1920) self.assertEqual(crawler.var("height"), 1080)
def testInsertData(self): """ Test insert data in the hashmap. """ hashmap = Crawler.create({}) hashmap["a"] = 1 self.assertEqual(len(hashmap), 1) self.assertIn('a', hashmap) self.assertEqual(hashmap['a'], 1)
def testCrawlerClone(self): """ Test that cloning crawlers works. """ crawler = Crawler.create(PathHolder(self.__turntableFile)) clone = crawler.clone() self.assertCountEqual(crawler.varNames(), clone.varNames()) self.assertCountEqual(crawler.contextVarNames(), clone.contextVarNames()) self.assertCountEqual(crawler.tagNames(), clone.tagNames())
def test01Ingestion(self): """ Test the ingestion configuration. """ loader = Loader() loader.loadFromDirectory(self.__exampleDirectory) taskHolder = list( filter( lambda x: os.path.basename(x.var('contextConfig')) == 'ingestConfig.yaml', loader.taskHolders())) self.assertEqual(len(taskHolder), 1) taskHolder = taskHolder[0] taskHolder.addVar("prefix", self.__exampleIngestionPrefixDirectory, True) # loading input data for the ingestion crawlerGroups = Crawler.group( FsCrawler.createFromPath( os.path.join(self.__exampleDirectory, 'plates')).globFromParent()) resultCrawlers = [] for group in crawlerGroups: if isinstance(group[0], Crawler.registeredType('png')): resultCrawlers += taskHolder.run(group) targetFilePaths = list( sorted( filter( lambda x: len(x), map(lambda x: x.strip(), self.__ingestedGeneratedData.split('\n'))))) createdFilePaths = list( sorted( map( lambda x: x.var('fullPath')[len( self.__exampleIngestionPrefixDirectory) + 1:].replace( '\\', '/'), resultCrawlers))) self.assertListEqual(targetFilePaths, createdFilePaths)
def testJsonContents(self): """ Test that json files are parsed properly. """ crawler = Crawler.create(PathHolder(self.__jsonFile)) testData = { "testList": [1, 1.2, "value"], "testDict": {"key": "value", "number": 1}, "testString": "blah" } self.assertEqual(crawler.contents(), testData)