def __process_one_test (self, qde): frame (text = ("report check for xfile = %s\n" % qde.xfile + "drivers = %s" % str(qde.drivers)), char = '~').display() # Count the number of expected exemption regions xregions = 0 for source_xrn in qde.xrnotes.values(): for kind in xNoteKinds: xregions += len (source_xrn[kind]) # We're looking at the last report produced, with the last # applicable xcov-level self.__setup_expectations( ntraces = len(qde.drivers), xcovlevel = xcovlevel_from[os.path.basename(qde.wdir)[0:3]], xregions = xregions ) reports = ls (os.path.join (qde.wdir, "test.rep")) thistest.fail_if ( len (reports) != 1, "expected 1 report, found %d" % len (reports)) self.report = Tfile (reports[0], self.__process_line) [rpe.check () for rpe in self.rpElements]
def __init__(self, dotxcov_pattern): # xcov --annotate=xcov produces a set of .xcov annotated unit sources, # each featuring a synthetic note per line. self.elnotes = {} [self.listing_to_enotes(dotxcov) for dotxcov in ls(dotxcov_pattern)]
def check_unexpected_reports(self): """Check that we don't have unexpected reports or notes.""" [ thistest.fail_if( self.covctl.unexpected(s), "report note found for %s, not in expected list" % s) for s in self.ernotes ] [ thistest.fail_if( self.covctl.unexpected(s.rstrip(".xcov")), "%s report found, for source not in expected list" % s) for s in ls("*.xcov") ]
def dirs(self, dirfilter): startlinks = [DirLink(r) for r in ls("%s/*.dtl" % self.rootp)] rootq = [(subdir, None) for subdir in ( [dtl.fstarget for dtl in startlinks] if startlinks else [self.rootp])] while rootq: (rootp, parento) = rootq.pop() dt = DirTree_frompath(rootp) for diro in dt.dirs(dirfilter=dirfilter): yield diro dt.rooto.pdo = parento [ rootq.append((self.__resolve(dl), diro)) for diro in dt.linkmap for dl in dt.linkmap[diro] ]
def __expand_shared_controllers(self, drivers, cspecs): """Search and expand possible shared drivers and/or consolidation specs uptree for our local functional units.""" # shared drivers would be <updir>/test_<xx>*.(adb|c) for some possible # updir and every <xx> such that there is a src/<xx>*.(adb|c). # Likewise for consolidation specs, as <updir>/cons_<xx>*.txt # Gather *set* of <xx> candidates first, then expand the associated # possible lists of drivers (each maybe empty). Beware not to include # child or sub units, as these dont mirror as such in the set of test # drivers. sxx = set( srcmatch.group(1) for srcmatch in ( re.match("([a-z_]*).*\.(adb|c)", os.path.basename(src)) for src in ls("src/*")) if srcmatch) # If there is no candidate body in src/, arrange to run all the # drivers. This is useful for test groups on GPR variations for # example, where we typically want to run all the drivers and check # the analysis results against different sets of SCOS. if len(sxx) == 0: sxx = [""] for body in sxx: for prefix in ("../" * n for n in range(1, thistest.depth)): if drivers: self.__expand_drivers( "%(p)ssrc/test_%(b)s*.adb %(p)ssrc/test_%(b)s*.c" % { 'p': prefix, 'b': body }) if cspecs: self.__expand_cspecs("%ssrc/cons_%s*.txt" % (prefix, body))
import subprocess from gnatpython.fileutils import ls try: artifacts = ['obj/*', 'lib/obj/*', 'lib/lib/*', 'lib/pkg/obj/*', 'lib/pkg/lib/*'] output=subprocess.check_output('gprbuild -v -p -P main.gpr -dn', shell=True) print output output=subprocess.check_output('ls -laR --full-time', shell=True) print output output=subprocess.check_output('gpr2clean -v -p -P main.gpr -r', shell=True) print output output=subprocess.check_output('ls -laR --full-time', shell=True) print output if ls(artifacts) == []: print "clean OK" else: print "clean not OK" print ls(artifacts) except Exception as E: # Unexpected exception. Just print the information we have. print('*** Error: %s' % str(E))
def __expand_cspecs(self, patterns): """Add to the list of consolidation specs to exercize the set of files corresponding to every glob pattern in PATTERNS.""" [self.all_cspecs.extend(ls(p)) for p in to_list(patterns)]
def __expand_drivers(self, patterns): """Add to the list of drivers to exercize the set of files corresponding to every glob pattern in PATTERNS.""" [self.all_drivers.extend(ls(p)) for p in to_list(patterns)]
def check_valid(options, args): # We are producing qualification material. Better know what we're # aiming at, always: exit_if ( not options.dolevel, "Please specify an explicit dolevel (--dolevel)." ) # Generating docs can be pretty long. Better make sure the output format # was intentionally stated: exit_if ( not options.docformat, ("Please specify the desired output format (--docformat).") ) # Likewise for the git branch name: exit_if ( not options.branchname, ("Please specify the git branch name (--branch).") ) # Convey whether we are requested to produce a kit: options.kitp = options.rekit or not options.parts # Settle on the set of documents we are to produce: options.parts = ( valid_parts if not options.parts else options.parts.split(',') ) [exit_if ( part not in valid_parts, "Requested part '%s' is invalid, none of %s" \ % (part, valid_parts.__str__()) ) for part in options.parts] # work dir exit_if ( not options.workdir, "A work dir must be specified (--work-dir)" ) warn_if ( options.kitp and os.path.exists (options.workdir) and ls("%s/*" % options.workdir), "producing kit within non empty workdir") # Producing a STR requires a testsuite dir exit_if ( 'str' in options.parts and not options.testsuite_dir, "--testsuite-dir required when producing a STR" ) # GIT aspects: exit_if ( options.gitpull and options.gitsource, "Specifying git source is incompatible with " "request to pull from current origin" ) # In case we produce TOR/LRM traceability matrixes ... exit_if ( not options.languages, "Please specify the qualified languages (--languages)" )