def testLevel1(self): # Look for all top level items regardless of type. for depth in [1] : tst = f.findFileGenerator(self.tdir,maxDepth = depth) items = [] expected = [ x for x in testDir[self.tdir].keys() ] for (x,o,p) in tst: items.append(o) assert o in expected ,'Item %s must be expected: %s' %(o,expected) for k in expected: assert k in items, 'Expected item %s must be found in %s' %(k,items) # look for only top level files items = [] expected = ['f0','g0'] t = f.findFileGenerator(self.tdir,acceptanceFunction = acceptFileOnly, maxDepth = depth) for (x,o,p) in t: items.append(o) assert o in expected, 'depth=%d,expect a top level file, got '+o+' not in '+str(expected) % depth for x in expected: assert x in items, 'depth=%d,expect both top level files' % depth # look for only top level directories items = [] expected = ['0','1','2','4'] t = f.findFileGenerator(testDir.keys()[0],acceptanceFunction = acceptDirOnly, maxDepth = depth) for (x,o,p) in t: items.append(o) assert o in expected, 'depth=%d,expect a top level directory' % depth for x in expected: assert x in items, 'depth=%d,expect all top level directories' % depth
def testLevels(self): tst = f.findFileGenerator(self.tdir,maxDepth = 2) items = [] expected = ['f0a', 'f0b', '0', '10', '11', 'f1a', 'f1b', '1', '20', 'f2a', 'f2b', '2', '40', 'f4', '4', 'f0', 'g0'] for (x,o,p) in tst: items.append(o) assert o in expected for o in expected: assert o in items tst = f.findFileGenerator(self.tdir,maxDepth = 3) items = [] expected = ['f0a', 'f0b', '0', '10', '11', 'f1a', 'f1b', '1', '200', '20', 'f2a', 'f2b', '2', '400', 'f40', '40', 'f4', '4', 'f0', 'g0'] for (x,o,p) in tst: items.append(o) assert o in expected for o in expected: assert o in items tst = f.findFileGenerator(self.tdir,maxDepth = 4) items = [] expected = ['f0a', 'f0b', '0', '10', '11', 'f1a', 'f1b', '1', '2000', '200', '20', 'f2a', 'f2b', '2', '4000', 'f400', '400', 'f40', '40', 'f4', '4', 'f0', 'g0'] for (x,o,p) in tst: items.append(o) assert o in expected for o in expected: assert o in items tst = f.findFileGenerator(self.tdir,maxDepth = 100) items = [] expected = ['f0a', 'f0b', '0', '10', '11', 'f1a', 'f1b', '1', 'd0', 'd1', '2000', '200', '20', 'f2a', 'f2b', '2', 'f4000', '4000', 'f400', '400', 'f40', '40', 'f4', '4', 'f0', 'g0'] for (x,o,p) in tst: items.append(o) assert o in expected for o in expected: assert o in items
def testCompare(self): #This test won't work for depth > 1 since the directories are visited individually tst = f.findFileGenerator(self.tdir,maxDepth = 1) items = [] for (x,o,p) in tst: items.append(o) tst = f.findFileGenerator(self.tdir,maxDepth = 1,directorySortFunction=revcmp) ritems = [] for (x,o,p) in tst: ritems.append(o) ritems.reverse() assert(items == ritems)
def testDirAcceptance(self): tst = f.findFileGenerator(self.tdir,maxDepth = 100,directoryAcceptanceFunction=accept2Dirs) items = [] expected = ['0', '1', 'd0', 'd1', '2000', '200', '20', 'f2a', 'f2b', '2', '4', 'f0', 'g0'] for (x,o,p) in tst: items.append(o) assert o in expected for o in expected: assert o in items
def new_crashes(self): # loop over all files under the search_root that have a suffix of # ".json" for a_path, a_file_name, raw_crash_pathname in findFileGenerator( self.config.search_root, lambda x: x[2].endswith(".json")): prefix = os.path.splitext(a_file_name)[0] crash_pathnames = [raw_crash_pathname] for dumpfilename in os.listdir(a_path): if (dumpfilename.startswith(prefix) and dumpfilename.endswith(self.config.dump_suffix)): crash_pathnames.append(os.path.join(a_path, dumpfilename)) # yield the pathnames of all the crash parts yield crash_pathnames
def new_crashes(self): # loop over all files under the search_root that have a suffix of # ".json" for a_path, a_file_name, raw_crash_pathname in findFileGenerator( self.config.search_root, lambda x: x[2].endswith(".json") ): prefix = os.path.splitext(a_file_name)[0] crash_pathnames = [raw_crash_pathname] for dumpfilename in os.listdir(a_path): if dumpfilename.startswith(prefix) and dumpfilename.endswith(self.config.dump_suffix): crash_pathnames.append(os.path.join(a_path, dumpfilename)) # yield the pathnames of all the crash parts yield crash_pathnames
def an_iter(): for a_path, a_file_name, raw_crash_pathname in findFileGenerator( self.config.submitter.search_root, lambda x: x[2].endswith(".json") ): prefix = os.path.splitext(a_file_name)[0] crash_pathnames = [raw_crash_pathname] for dumpfilename in os.listdir(a_path): if (dumpfilename.startswith(prefix) and dumpfilename.endswith(self.config.dump_suffix)): crash_pathnames.append(os.path.join(a_path, dumpfilename)) yield tuple(crash_pathnames) if self.config.submitter.delay: time.sleep(self.config.submitter.delay)
def new_crashes(self): # loop over all files under the search_root that have a suffix of # ".json" for a_path, a_file_name, raw_crash_pathname in findFileGenerator( self.config.search_root, lambda x: x[2].endswith(".json") ): prefix = path.splitext(a_file_name)[0] crash_pathnames = [raw_crash_pathname] for dumpfilename in listdir(a_path): if (dumpfilename.startswith(prefix) and dumpfilename.endswith(self.config.dump_suffix)): crash_pathnames.append( path.join(a_path, dumpfilename) ) # yield the pathnames of all the crash parts - normally, this # method in a crashstorage class yields just a crash_id. In this # case however, we have only pathnames to work with. So we return # this (args, kwargs) form instead yield (((prefix, crash_pathnames), ), {})
def testLevel0(self): for depth in [ -12,-1,0]: tst = f.findFileGenerator(self.tdir,maxDepth = depth) items = [x for x in tst] assert not items, 'Expect nothing for 0 or negative. For %d, got %s' %(depth,items)