def __init__(self, distance, lib): import os.path as osp lib = osp.expanduser(osp.expandvars(lib)) if not osp.exists(lib): l = Dso.find_library(lib) if l: lib = l assert osp.exists(lib), "no such path [%s]" % (lib, ) self.lib = lib # path of library self.distance = distance # distance from root lib self.deplibs = self._getLibs(lib) # direct dependencies
def __init__(self, distance, lib): import os.path as osp lib = osp.expanduser(osp.expandvars(lib)) if not osp.exists(lib): l = Dso.find_library(lib) if l: lib = l assert osp.exists(lib), "no such path [%s]" % (lib,) self.lib = lib # path of library self.distance = distance # distance from root lib self.deplibs = self._getLibs(lib) # direct dependencies
if visitor: visitor(rflx_type) if rflx_type.IsClass() or rflx_type.IsStruct(): for i in range(rflx_type.BaseSize()): itype = rflx_type.BaseAt(i).ToType() itype_name = itype.Name(RflxEnums.DICTSCOPE) if itype_name != '' and not (itype_name in ctx): self.load_type(itype_name) ctx.add(itype_name) ctx.update(self.visit(itype, visitor, ctx)) for i in range(rflx_type.DataMemberSize()): itype = rflx_type.DataMemberAt(i).TypeOf() itype = itype.RawType() if itype.IsPointer() else itype itype_name = itype.Name(RflxEnums.DICTSCOPE) if itype_name != '' and not (itype_name in ctx): self.load_type(itype_name) ctx.add(itype_name) # restore original type itype = rflx_type.DataMemberAt(i).TypeOf() itype_name = itype.Name(RflxEnums.DICTSCOPE) ctx.update(self.visit(itype, visitor, ctx)) return ctx pass # class DsoDb ### clean-up ------------------------------------------------------------------ registry = _Dso.DsoDb()
def main(args): """a script to check the definitions of (reflex) plugins across multiple so-called 'rootmap' files """ exitcode = 0 print ":" * 80 print "::: chk-rflx :::" import os import PyUtils.Dso as Dso _suppression_dct = { 'TMath': ('libCore.so', 'libMathCore.so'), 'string': ('libGaudiKernelDict.so', 'libCore.so', 'liblcg_PyCoolDict.so', 'libSTLAddRflx.so'), '__pf__::CNV_71_9631': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9632': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9633': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9634': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9639': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), ## FIXME !! this shouldn't be suppressed !! '__pf__::RootCollection': ( 'liblcg_RootCollection.so', 'libAthAnalysisTools.so', ), ## !! } def print_db(db, detailedDump=False): if detailedDump: fct = lambda x: x else: fct = os.path.basename keys = db.keys() keys.sort() for k in keys: print "%s:" % k libs = db[k] libs.sort() for lib in libs: print " ", fct(lib) return import PyUtils.Dso as Dso dsodb = Dso.DsoDb() if args.capabilities: libname = args.capabilities try: capabilities = dsodb.capabilities(libname) print "::: capabilities of [%s]" % (libname, ) print os.linesep.join([" %s" % c for c in capabilities]) except ValueError, err: exitcode = 1 pass
default=False, help= "Pedantic mode: if a component is found in 2 libraries which have the same name (usual case of a developer working on a (set of) package(s)), it is still being reported as being duplicated" ) parser.add_option("-l", "--level", dest="logLvl", default="INFO", help="Logging level (aka verbosity)") (options, args) = parser.parse_args() print(":" * 80) print("::: checkPlugins :::") sc = 0 dsoDb = Dso.DsoDb() if len(args) > 0 and args[0][0] != "-": options.capabilities = args[0] pass if options.capabilities: libName = options.capabilities try: capabilities = dsoDb.capabilities(libName) print("::: capabilities of [%s]" % libName) print(os.linesep.join([" " + str(c) for c in capabilities])) except ValueError: sc = 1 pass
# @file PyUtils/bin/gen-typereg-dso.py # @purpose a python script to workaround various limitations of rootmap files # and reflex/cint typename impedance mismatches # @author Sebastien Binet <*****@*****.**> # @date February 2009 __doc__ = '''a python script to workaround various limitations of rootmap files and reflex/cint typename impedance mismatches. ''' __version__ = '$Revision: 1.1 $' __author__ = 'Sebastien Binet <*****@*****.**>' if __name__ == "__main__": import sys import os import PyUtils.Dso as Dso oname = 'typereg_dso_db.csv' if len(sys.argv) > 1: oname = sys.argv[1] else: from PyCmt.Cmt import CmtWrapper project_root = CmtWrapper().projects()[0] from PyUtils.path import path oname = path(project_root) / "InstallArea" / "share" / oname if not os.path.exists(oname.dirname()): os.makedirs(oname.dirname()) pass rflx_names = Dso.gen_typeregistry_dso(oname) sys.exit(0)
def main(args): """a script to check the definitions of (reflex) plugins across multiple so-called 'rootmap' files """ exitcode = 0 print(":" * 80) print("::: chk-rflx :::") import os import PyUtils.Dso as Dso _suppression_dct = { 'TMath': ('libCore.so', 'libMathCore.so'), 'string': ('libGaudiKernelDict.so', 'libCore.so', 'liblcg_PyCoolDict.so', 'libSTLAddRflx.so'), '__pf__::CNV_71_9631': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9632': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9633': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9634': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), '__pf__::CNV_71_9639': ( 'libDataModelTestDataReadCnvPoolCnv.so', 'libDataModelTestDataWriteCnvPoolCnv.so', ), ## FIXME !! this shouldn't be suppressed !! '__pf__::RootCollection': ( 'liblcg_RootCollection.so', 'libAthAnalysisTools.so', ), ## !! } def print_db(db, detailedDump=False): if detailedDump: fct = lambda x: x else: fct = os.path.basename keys = db.keys() keys.sort() for k in keys: print("%s:" % k) libs = db[k] libs.sort() for lib in libs: print(" ", fct(lib)) return dsodb = Dso.DsoDb() if args.capabilities: libname = args.capabilities try: capabilities = dsodb.capabilities(libname) print("::: capabilities of [%s]" % (libname, )) print(os.linesep.join([" %s" % c for c in capabilities])) except ValueError: exitcode = 1 pass if args.chk_dups: libname = args.chk_dups try: print("::: checking duplicates for [%s]..." % (libname, )) dups = dsodb.duplicates(libname, pedantic=args.pedantic) for k in dups: print(" -", k) print(os.linesep.join([" %s" % v for v in dups[k]])) if len(dups.keys()) > 0: exitcode = 1 except ValueError: exitcode = 1 pass if args.dump_content: print("::: dumping content of all known plugins...") entries = dsodb.content(pedantic=args.pedantic) print_db(entries, args.detailed_dump) print("::: known entries:", len(entries.keys())) if args.dump_libs: print("::: dumping all known libraries...") libs = dsodb.libs(detailedDump=args.detailed_dump) for lib in libs: print(" -", lib) print("::: known libs:", len(libs)) if args.dump_dso: print("::: dumping all known dso/rootmap files...") dso_files = [dso for dso in dsodb.dsoFiles] dso_files.sort() for dso_file in dso_files: if not args.detailed_dump: dso_file = os.path.basename(dso_file) print(" -", dso_file) print("::: known dsos:", len(dso_files)) if args.check_dict_dups: print(":: checking dict. duplicates...") dups = dsodb.dictDuplicates(pedantic=args.pedantic) suppression_log = [] for k in dups: v = dups[k] # mark as error only if it isn't a known dup' if k in _suppression_dct: suppressed = [ os.path.basename(ii) in _suppression_dct[k] for ii in v ] if all(suppressed): suppression_log.append(k[:]) pass else: # that's a new one !! exitcode = 1 else: # that's a new one !! exitcode = 1 # print ("---> NOT ignoring [%s]" % (k,)) print_db(dups, args.detailed_dump) if len(suppression_log): print("-" * 40) print("## ignoring the following dups':") for k in suppression_log: print(" -", k) print("-" * 40) print("## all dups:", len(dups.keys())) print("## dups:", len(dups.keys()) - len(suppression_log)) if args.check_pf_dups: print("::: checking (plugin factories) components dups...") dups = dsodb.pfDuplicates(pedantic=args.pedantic) suppression_log = [] for k in dups: v = dups[k] # mark as error only if it isn't a known dup' if k in _suppression_dct: suppressed = [ os.path.basename(ii) in _suppression_dct[k] for ii in v ] if all(suppressed): suppression_log.append(k[:]) pass else: # that's a new one !! exitcode = 1 else: # that's a new one !! exitcode = 1 # print ("---> NOT ignoring [%s]" % (k,)) print_db(dups, args.detailed_dump) if len(suppression_log): print("-" * 40) print("## ignoring the following dups':") for k in suppression_log: print(" -", k) print("-" * 40) print("## all dups:", len(dups.keys())) print("## dups:", len(dups.keys()) - len(suppression_log)) if args.check_all_dups: print("::: checking all components dups...") dups = dsodb.pfDuplicates(pedantic=args.pedantic) dups.update(dsodb.dictDuplicates(pedantic=args.pedantic)) suppression_log = [] for k in dups: v = dups[k] # mark as error only if it isn't a known dup' if k in _suppression_dct: suppressed = [ os.path.basename(ii) in _suppression_dct[k] for ii in v ] if all(suppressed): suppression_log.append(k[:]) pass else: # that's a new one !! exitcode = 1 else: # that's a new one !! exitcode = 1 # print ("---> NOT ignoring [%s]" % (k,)) print_db(dups, args.detailed_dump) if len(suppression_log): print("-" * 40) print("## ignoring the following dups':") for k in suppression_log: print(" -", k) print("-" * 40) print("## all dups:", len(dups.keys())) print("## dups:", len(dups.keys()) - len(suppression_log)) if exitcode: print("::: ERROR !!") else: print("::: All good.") print(":" * 80) return exitcode