def __get_scovdata(self, scov_file): """ Return a list of strings containing the SCOV_data. To simplify parsing, the leading comment markers are stripped. """ # The langinfo corresponding to the language of SCOV_FILE lang_info = language_info(scov_file) # The scov data begins at the first line that starts with the # language's comment marker, followed by a '#'. Any line that # starts as a comment after this first '#' comment line is assumed # to be part of the scov data. Build a list of lines containing # the scov data stored in scov_file now. contents = [] in_scovdata = False for line in lines_of(scov_file): # Take care of leading/trailing spaces to give the user # more flexibility. Also take care of the trailing new-line # character that we get from lines_of. line.strip() if line.startswith(lang_info.comment + '#'): in_scovdata = True if in_scovdata and line.startswith(lang_info.comment): # Also take this opportunity to strip the leading comment # string as well as any space immediately following it. # This will simplify the parsing a little bit. contents.append(line[len(lang_info.comment):].lstrip()) return contents
def close(self): """ For each valid unit designated by one of our candidate lists, instantiate a UnitCX object and latch the list of instances. """ # Wrap LREs to make sure we look for them in explicit anchors within # sources, not as arbitrary sections of source lines. The way to do # this depends on the source languages. We assume they are all the # same for our list. for lx in self.lxset: self.__wrap_lre(lx, language_info(self.srlist[0].xpath)) # Now instanciate a unit coverage expectations object for each # source ref in our list: self.uxset = [ UnitCX(sref=sref, LXset=self.lxset) for sref in self.srlist ] return self.uxset
def locate_ali(self, source): """Return the fullpath of the ali file corresponding to the given SOURCE file. Return None if none was found. """ # Whatever the kind of test we are (single or consolidation), we # expect every ALI file of interest to be associated with at least # one single test, and to be present in the "obj" subdirectory of # the associated binary dir. # Compute the local path from single test bindir and iterate over # binary dir for all our drivers until we find. There might actually # be several instances in the consolidation case. We assume they are # all identical, and they should be for typical situations where the # same sources were exercised by multiple drivers: lang_info = language_info(source) lali = "obj/" + lang_info.scofile_for(os.path.basename(source)) for main in self.drivers: tloc = self.abdir_for(no_ext(main)) + lali if os.path.exists(tloc): return tloc return None
def gprfor(mains, prjid="gen", srcdirs="src", objdir=None, exedir=".", main_cargs=None, langs=None, deps=(), compiler_extra="", extra=""): """Generate a simple PRJID.gpr project file to build executables for each main source file in the MAINS list, sources in SRCDIRS. Inexistant directories in SRCDIRS are ignored. Assume the set of languages is LANGS when specified; infer from the mains otherwise. Add COMPILER_EXTRA, if any, at the end of the Compiler package contents. Add EXTRA, if any, at the end of the project file contents. Return the gpr file name. """ deps = '\n'.join(["with \"%s\";" % dep for dep in deps]) mains = to_list(mains) srcdirs = to_list(srcdirs) langs = to_list(langs) # Fetch the support project file template template = contents_of(os.path.join(ROOT_DIR, "template.gpr")) # Instanciate the template fields. # Turn the list of main sources into the proper comma separated sequence # of string literals for the Main GPR attribute. gprmains = ', '.join(['"%s"' % m for m in mains]) # Likewise for source dirs. Filter on existence, to allow widening the set # of tentative dirs while preventing complaints from gprbuild about # inexistent ones. Remove a lone trailing comma, which happens when none # of the provided dirs exists and would produce an invalid gpr file. srcdirs = ', '.join(['"%s"' % d for d in srcdirs if os.path.exists(d)]) srcdirs = srcdirs.rstrip(', ') # Determine the language(s) from the mains. languages_l = langs or set([language_info(main).name for main in mains]) languages = ', '.join(['"%s"' % l for l in languages_l]) # The base project file we need to extend, and the way to refer to it # from the project contents. This provides a default last chance handler # on which we rely to detect termination on exception occurrence. basegpr = (("%s/support/base" % ROOT_DIR) if control.need_libsupport() else None) baseref = ((basegpr.split('/')[-1] + ".") if basegpr else "") # Generate compilation switches: # # - For each language, add BUILDER.COMMON_CARGS as default switches. # # - If we have specific flags for the mains, append them. This is # typically something like: # # for Switches("test_blob.adb") use # Compiler'Default_Switches("Ada") & ("-fno-inline") default_switches = ', '.join( ['"%s"' % switch for switch in BUILDER.COMMON_CARGS()]) compswitches = ('\n'.join([ 'for Default_Switches ("%s") use (%s);' % (language, default_switches) for language in languages_l ]) + '\n' + '\n'.join([ 'for Switches("%s") use \n' ' Compiler\'Default_Switches ("%s") & (%s);' % (main, language_info(main).name, ','.join( ['"%s"' % carg for carg in to_list(main_cargs)])) for main in mains ]) + '\n') # Now instanciate, dump the contents into the target gpr file and return gprtext = template % { 'prjname': prjid, 'extends': ('extends "%s"' % basegpr) if basegpr else "", 'srcdirs': srcdirs, 'exedir': exedir, 'objdir': objdir or (exedir + "/obj"), 'compswitches': compswitches, 'languages': languages, 'gprmains': gprmains, 'deps': deps, 'compiler_extra': compiler_extra, 'pkg_emulator': gpr_emulator_package(), 'extra': extra } return text_to_file(text=gprtext, filename=prjid + ".gpr")
def gprfor(mains, prjid="gen", srcdirs="src", objdir=None, exedir=".", main_cargs=None, langs=None, deps=(), scenario_extra="", compiler_extra="", extra=""): """ Generate a simple PRJID.gpr project file to build executables for each main source file in the MAINS list, sources in SRCDIRS. Inexistant directories in SRCDIRS are ignored. Assume the set of languages is LANGS when specified; infer from the mains otherwise. Add SCENARIO_EXTRA, if any, at the beginning of the project files (for scenario variables). Add COMPILER_EXTRA, if any, at the end of the Compiler package contents. Add EXTRA, if any, at the end of the project file contents. Return the gpr file name. """ deps = '\n'.join('with "%s";' % dep for dep in deps) mains = to_list(mains) srcdirs = to_list(srcdirs) langs = to_list(langs) # Fetch the support project file template template = contents_of(os.path.join(ROOT_DIR, "template.gpr")) # Instanciate the template fields. # Turn the list of main sources into the proper comma separated sequence # of string literals for the Main GPR attribute. gprmains = ', '.join('"%s"' % m for m in mains) if gprmains: gprmains = 'for Main use (%s);' % gprmains # Likewise for source dirs. Filter on existence, to allow widening the set # of tentative dirs while preventing complaints from gprbuild about # inexistent ones. srcdirs_list = [d for d in srcdirs if os.path.exists(d)] # Determine the language(s) from the sources if they are not explicitly # passed as parameters. if not langs: lang_infos = [ language_info(src) for srcdir in srcdirs_list for src in os.listdir(srcdir) ] langs = set(li.name for li in lang_infos if li) srcdirs = ', '.join('"%s"' % d for d in srcdirs_list) languages = ', '.join('"%s"' % lang for lang in langs) # The base project file we need to extend, and the way to refer to it # from the project contents. This provides a default last chance handler # on which we rely to detect termination on exception occurrence. basegpr = (("%s/support/base" % ROOT_DIR) if RUNTIME_INFO.need_libsupport else None) # If we have specific flags for the mains, append them. This is # typically something like: # # for Switches("test_blob.adb") use # Compiler'Default_Switches("Ada") & ("-fno-inline") compswitches = ('\n'.join([ 'for Switches("%s") use \n' ' Compiler\'Default_Switches ("%s") & (%s);' % (main, language_info(main).name, ','.join( ['"%s"' % carg for carg in to_list(main_cargs)])) for main in mains ]) + '\n') # Now instanciate, dump the contents into the target gpr file and return gprtext = template % { 'prjname': prjid, 'extends': ('extends "%s"' % basegpr) if basegpr else "", 'scenario': scenario_extra, 'srcdirs': srcdirs, 'exedir': exedir, 'objdir': objdir or (exedir + "/obj"), 'compswitches': compswitches, 'languages': languages, 'gprmains': gprmains, 'deps': deps, 'compiler_extra': compiler_extra, 'pkg_emulator': gpr_emulator_package(), 'extra': extra } return text_to_file(text=gprtext, filename=prjid + ".gpr")