Beispiel #1
0
def parse_and_filter(indir, namefilter=None, recursive=False, printlog=False):
    ''' read and parse headers and definitions of component and instrument files '''
    instr_files, comp_files = utils.get_instr_comp_files(indir, recursive)
    print("parsing root folder:", indir)
    comp_info_lst = []
    for f in comp_files:
        try:
            if printlog:
                print("parsing comp... %s" % f)
            info = CompParser(f).parse()
            info.filepath = os.path.abspath(f)
            comp_info_lst.append(info)
        except:
            print("failed parsing file: %s" % f)
            comp_info_lst.append(CompParser(f).stub())
    print("comp files: %s" % str(len(comp_files)))

    instr_info_lst = []
    for f in instr_files:
        try:
            if printlog:
                print("parsing instr... %s" % f)
            info = InstrParser(f).parse()
            info.filepath = os.path.abspath(f)
            instr_info_lst.append(info)
        except:
            print("failed parsing file: %s" % f)
            instr_info_lst.append(InstrParser(f).stub())
    print("instr files: %s" % str(len(instr_files)))

    if namefilter != None:
        comp_info_lst = [
            c for c in comp_info_lst
            if re.search(namefilter.lower(), c.name.lower())
        ]
        instr_info_lst = [
            c for c in instr_info_lst
            if re.search(namefilter.lower(), c.name.lower())
        ]
        comp_files = [
            f for f in comp_files
            if re.search(namefilter.lower(),
                         os.path.splitext(os.path.basename(f))[0].lower())
        ]
        instr_files = [
            f for f in instr_files
            if re.search(namefilter.lower(),
                         os.path.splitext(os.path.basename(f))[0].lower())
        ]

    return comp_info_lst, instr_info_lst, comp_files, instr_files
Beispiel #2
0
 def initDynamicView(self):
     # load installed mcstas instruments:
     # construct args = [site, instr_fullpath[], instr_path_lst[]]
     args = []
     files_instr, files_comp = get_instr_comp_files(mccode_config.configuration["MCCODE_LIB_DIR"])
     
     # temporary list consisting of instrument files with site names: 
     files_instr_and_site = []
     for f in files_instr:
         files_instr_and_site.append([f, get_instr_site(f)])
     
     # order instrument files by site:
     sites = {s for s in list(map(lambda f: f[1], files_instr_and_site))}
     for s in sites:
         # extract instruments file paths of this site
         instr_path_lst = list(map(lambda f: f[0], filter(lambda f: f[1] in [s], files_instr_and_site)))
         # sort instrument of this site by file name
         instr_path_lst.sort(key=lambda instrpath: os.path.splitext(os.path.basename(instrpath))[0])
         # extract file names
         instr_name_lst = list(map(lambda instrpath: os.path.splitext(os.path.basename(instrpath))[0], instr_path_lst))
         arg = []
         arg.append(s)
         arg.append(instr_name_lst)
         arg.append(instr_path_lst)
         args.append(arg)
     
     # sort sites 
     args.sort(key=lambda arg: arg[0])    
     
     # hand on for menu generation
     self.view.initMainWindowDynamicElements(args, self.handleNewFromTemplate)
     
     # load installed mcstas components:
     # args - [category, comp_names[], comp_parsers[]]
     args = []
     categories = {0 : 'Source', 1 : 'Optics', 2 : 'Sample', 3 : 'Monitor', 4 : 'Misc', 5 : 'Contrib', 6: 'Union', 7 : 'Obsolete'}
     dirnames = {0 : 'sources', 1 : 'optics', 2 : 'samples', 3 : 'monitors', 4 : 'misc', 5 : 'contrib', 6: 'contrib/union', 7 : 'obsolete'}
     i = 0
     while i < 8:
         arg = [] # arg - category, comp_names[], comp_parsers[]
         compnames = []
         parsers = []
         
         for f in files_comp:
             if i==6:
                 if re.search(dirnames[i], os.path.dirname(f)):
                     compnames.append(os.path.splitext(os.path.basename(f))[0]) # get filename without extension - this is the component name
                     parsers.append(ComponentParser(f)) # append a parser, for ease of parsing on-the-fly
             else:
                 if re.search(dirnames[i], os.path.basename(os.path.dirname(f))):
                     compnames.append(os.path.splitext(os.path.basename(f))[0]) # get filename without extension - this is the component name
                     parsers.append(ComponentParser(f)) # append a parser, for ease of parsing on-the-fly
         
         arg.append(categories[i])
         arg.append(compnames)
         arg.append(parsers)
         args.append(arg)
         
         i += 1
     
     # sort components in each category (using Python default string sort on filename)
     for arg in args:
         arg[1].sort()
         arg[2].sort(key=lambda parser: os.path.splitext(os.path.basename(parser.file))[0])
     
     # hand on for menu generation
     self.view.initCodeEditorComponentMenu(args)
Beispiel #3
0
def repair_instr(localdir):
    '''
    Dev function used to alter instr file headers.
    '''
    local_instr_files, local_comp_files = utils.get_instr_comp_files(localdir)

    files = []
    rows = []
    for filename in local_instr_files:
        try:
            print("parsing... %s" % filename)
            info = InstrParser(filename).parse()
            files.append(filename)
            rows.append(info)
        except:
            print("failed parsing instr file: %s" % filename)
            quit()

    cnt = 0
    for filename in local_instr_files:
        f = open(filename, 'r')
        # read the first two instr file sections
        header = utils.read_header(f)
        define = utils.read_define_instr(f)

        # doc lines
        print('*****')
        print(filename)
        print()

        seen_P = False
        par_docs = []
        idxs = []
        idxs_remove = []
        lines = header.splitlines()
        for i in range(len(lines)):
            l = lines[i]

            # remove unwanted lines
            m = re.match('\* Release:', l)
            if m:
                idxs_remove.append(i)
                continue
            m = re.match('\* Version:', l)
            if m:
                idxs_remove.append(i)
                continue
            m = re.match('\* INPUT PARAMETERS:', l)
            if m:
                idxs_remove.append(i)
                continue

            # fast-forward to %P / %Parameters tag
            if not seen_P and re.match('\* \%Parameters', l):
                seen_P = True
            elif not seen_P:
                continue
            # exit if we reach %L / %Link tag
            if re.match('\* \%L', l):
                break

            l = l.lstrip('*').strip()
            m = re.match(
                '(\w+):[ \t]*\[([ \w\/\(\)\\\~\-.,\":\%\^\|\{\};\*]+)\][ \t]*(.*)',
                l)
            if m:
                healthy_par_doc = (m.group(1), m.group(2), m.group(3).strip())
                par_docs.append(healthy_par_doc)
                idxs.append(i)
                continue
            elif re.match('(\w+):', l):
                # empty docstrings
                m3 = re.match('(\w+):[ \t]*$', l)
                if m3:
                    empty_par_doc = (m3.group(1), '', '')
                    par_docs.append(empty_par_doc)
                    idxs.append(i)
                    continue
                # no-unit docstrings
                m4 = re.match(
                    '(\w+):[ \t]*([ \t\w,.\-\(\)\=\^\/:\"\'\%\<\>\|\{\};\*]+)$',
                    l)
                if m4:
                    limp_par_doc = (m4.group(1), '', m4.group(2).strip())
                    par_docs.append(limp_par_doc)
                    idxs.append(i)
                    continue
                # "inversed" docstrings
                m2 = re.match(
                    '(\w+):[ \t]*(.*)[ \t]*\[([ \w\/\(\)\\\~\-.,\":\%\|\{\}]+)\]',
                    l)
                if m2:
                    par_doc = (m2.group(1), m2.group(3).strip(), m2.group(2))
                    par_docs.append(par_doc)
                    idxs.append(i)
                    continue

        # edit par doc lines, remove superfluous
        if True:
            if len(par_docs) == 0:
                continue
            l01 = max([len(p[0]) + len(p[1]) for p in par_docs])

            name, real_pars = utils.parse_define_instr(define)
            real_parnames = [par[1] for par in real_pars]

            # rewrite par doc lines, remove "bonus" doc lines without a parameter to document
            for i in range(len(par_docs)):
                p = par_docs[i]
                idx = idxs[i]

                # reorganize the docstring line
                format_str = '* %s: %-' + str(l01 - len(p[0]) + 3) + 's %s'
                l = format_str % (p[0], '[' + p[1] + ']', p[2])
                print(l)

                # replace l in lines:
                lines[idx] = l

                # flag superfluous doc lines for removal
                if p[0] not in real_parnames:
                    # (!!!!)
                    # TODO: take care of the ordering of idxs_remove. Today, we know that all previuosly
                    # removed lines are above, but this may change
                    # (!!!!)
                    idxs_remove.append(idx)

            # add a stub par doc line for each par that isn't represented (WARNING: do not use while removing lines!
            if False:
                print()
                extra_pardoc_lines = []
                par_doc_names = [q[0] for q in par_docs]
                for i in range(len(real_pars)):
                    par_name = real_pars[i][1]
                    if par_name not in par_doc_names:
                        l = '* %s:' % par_name
                        print(l)
                        extra_pardoc_lines.append(l)
                # insert those extra lines ...
                good_idx = idxs[-1]
                for i in range(len(extra_pardoc_lines)):
                    l = extra_pardoc_lines[i]
                    lines.insert(good_idx + i + 1, l)

        # append/read-append remaining lines
        for l in define.splitlines():
            lines.append(l)
        for l in f:
            lines.append(l.rstrip('\n'))

        # remove unwanted lines:
        for idx in reversed(idxs_remove):
            del lines[idx]

        for l in lines:
            print(l)

        continue

        f.close()
        f = open(filename, 'w')
        f.write('\n'.join(lines) + '\n')
        f.close()

        cnt += 1
        print(cnt)
    quit()
Beispiel #4
0
def main(args):
    logging.basicConfig(level=logging.INFO)

    usedir = mccode_config.configuration["MCCODE_LIB_DIR"]

    if args.dir == None and args.install == False and args.searchterm == None and args.manual == False and args.comps == False and args.web == False:
        ''' browse system docs and exit '''
        subprocess.Popen(
            '%s %s' %
            (mccode_config.configuration['BROWSER'],
             os.path.join(usedir,
                          mccode_config.get_mccode_prefix() + 'doc.html')),
            shell=True)
        quit()

    elif args.manual == True:
        ''' open manual and exit '''
        subprocess.Popen(
            '%s %s' %
            (mccode_config.configuration['BROWSER'],
             os.path.join(
                 usedir, 'doc', 'manuals',
                 mccode_config.configuration['MCCODE'] + '-manual.pdf')),
            shell=True)
        quit()

    elif args.comps == True:
        ''' open component manual and exit '''
        subprocess.Popen(
            '%s %s' %
            (mccode_config.configuration['BROWSER'],
             os.path.join(
                 usedir, 'doc', 'manuals',
                 mccode_config.configuration['MCCODE'] + '-components.pdf')),
            shell=True)
        quit()

    elif args.web == True:
        ''' open website and exit '''
        subprocess.Popen(
            '%s %s' % (mccode_config.configuration['BROWSER'], 'http://www.' +
                       mccode_config.configuration['MCCODE'] + '.org'),
            shell=True)
        quit()

    elif args.install == True:
        ''' write system doc files '''
        if args.searchterm:
            print("will not write filtered system docs, exiting...")
            quit()
        print("writing mccode distribution docs...")
        if args.dir:
            usedir = args.dir
            print("using custom dir: %s" % usedir)

        comp_infos, instr_infos, comp_files, instr_files = parse_and_filter(
            usedir, recursive=True, printlog=args.verbose)
        write_doc_files_or_continue(comp_infos, instr_infos, comp_files,
                                    instr_files, args.verbose)

        masterdoc = OverviewDocWriter(
            comp_infos, instr_infos, [], [],
            mccode_config.configuration['MCCODE_LIB_DIR'])
        text = masterdoc.create()

        mcdoc_html_filepath = os.path.join(
            usedir,
            mccode_config.get_mccode_prefix() + 'doc.html')
        try:
            write_file(mcdoc_html_filepath, text)
            print("master doc file: %s" % mcdoc_html_filepath)
        except Exception as e:
            print('ERROR writing master doc file: %s', e)

    elif args.dir != None or args.searchterm != None:
        ''' filtered and/or local results '''
        flter = '.*'
        usedir = mccode_config.configuration['MCCODE_LIB_DIR']
        rec = True
        if args.searchterm:
            flter = args.searchterm

        # single, specific file
        if args.searchterm is not None and re.search('\.', args.searchterm):
            usedir2 = '.'
            if args.dir is not None:
                usedir2 = args.dir
            f = os.path.join(usedir2, args.searchterm)

            # find matcing filenames
            instr_files, comp_files = utils.get_instr_comp_files(
                mccode_config.configuration['MCCODE_LIB_DIR'], True)
            comp_files = [
                f for f in comp_files if os.path.basename(f) == args.searchterm
            ]
            instr_files = [
                f for f in instr_files
                if os.path.basename(f) == args.searchterm
            ]

            # accumulate results
            results = []
            if os.path.isfile(f):
                results.append(f)
            results.extend(instr_files)
            results.extend(comp_files)
            if len(results) == 1:
                f = results[0]

                instr = re.search('[\w0-9]+\.instr', args.searchterm)
                comp = re.search('[\w0-9]+\.comp', args.searchterm)

                if instr:
                    f_html = os.path.splitext(f)[0] + ".html"
                    info = InstrParser(f).parse()
                    info.filepath = os.path.abspath(f)
                    write_doc_files_or_continue([], [info], [], [f])
                    subprocess.Popen(
                        '%s %s' %
                        (mccode_config.configuration['BROWSER'], f_html),
                        shell=True)
                elif comp:
                    f_html = os.path.splitext(f)[0] + ".html"
                    info = CompParser(f).parse()
                    info.filepath = os.path.abspath(f)
                    write_doc_files_or_continue([info], [], [f], [])
                    subprocess.Popen(
                        '%s %s' %
                        (mccode_config.configuration['BROWSER'], f_html),
                        shell=True)
                quit()
            # there were multiple matches - fall back to general search term mode
            else:
                flter = os.path.splitext(os.path.basename(args.searchterm))[0]

        # system
        comp_infos, instr_infos, comp_files, instr_files = parse_and_filter(
            usedir, flter, recursive=True)
        write_doc_files_or_continue(comp_infos, instr_infos, comp_files,
                                    instr_files)

        # local
        comp_infos_local = []
        instr_infos_local = []
        if args.dir != None:
            usedir = args.dir
            comp_infos_local, instr_infos_local, comp_files, instr_files = parse_and_filter(
                args.dir, flter, recursive=False)
            write_doc_files_or_continue(comp_infos_local, instr_infos_local,
                                        comp_files, instr_files)

        if len(comp_infos_local) + len(instr_infos_local) + len(
                comp_infos) + len(instr_infos) == 0:
            print("no matches found")
            quit()

        masterdoc = OverviewDocWriter(comp_infos, instr_infos,
                                      comp_infos_local, instr_infos_local,
                                      usedir)
        text = masterdoc.create()

        mcdoc_html_filepath = os.path.join(
            '.',
            mccode_config.get_mccode_prefix() + 'doc.html')
        if args.verbose:
            print('writing local overview doc file... %s' %
                  mcdoc_html_filepath)
        write_file(mcdoc_html_filepath, text)

        subprocess.Popen(
            '%s %s' %
            (mccode_config.configuration['BROWSER'],
             os.path.join('.',
                          mccode_config.get_mccode_prefix() + 'doc.html')),
            shell=True)
Beispiel #5
0
def main(args):
    '''
    Behavior: See file header.
    '''
    logging.basicConfig(level=logging.INFO)

    usedir = args.crawl or mccode_config.configuration["MCCODE_LIB_DIR"]
    print("using source directory: " + usedir)
    '''
    # repair mode - do not run mcdoc, just the "repair" function
    if args.repair:
        #repair_instr(localdir)
        repair_comp(usedir)
        quit()
    '''

    # local files
    instr_files, comp_files = utils.get_instr_comp_files(usedir)

    # parse comp files
    comp_info_lst = []
    for f in comp_files:
        try:
            print("parsing... %s" % f)
            info = CompParser(f).parse()
            info.filepath = f
            comp_info_lst.append(info)
        except:
            print("failed parsing file: %s" % f)
            comp_info_lst.append(CompParser(f).stub())
    print("parsed comp files: %s" % str(len(comp_files)))

    # parse instr files
    instr_info_lst = []
    for f in instr_files:
        try:
            print("parsing... %s" % f)
            info = InstrParser(f).parse()
            info.filepath = f
            instr_info_lst.append(info)
        except:
            print("failed parsing file: %s" % f)
            instr_info_lst.append(InstrParser(f).stub())
    print("parsed instr files: %s" % str(len(instr_files)))

    # apply a name-filter (matches instr / comp name, not filename)
    if args.namefilter:
        comp_info_lst = [
            c for c in comp_info_lst
            if re.search(args.namefilter.lower(), c.name.lower())
        ]
        instr_info_lst = [
            c for c in comp_info_lst
            if re.search(args.namefilter.lower(), c.name.lower())
        ]
    '''
    # debug mode - write files with a header property each, then quit
    if args.debug:
        text = '\n'.join(['%4d: \n%s' % (i, files[i]) for i in range(len(files))])
        write_file('files', text)
        
        for i in range(utils.InstrCompHeaderInfo.__len__()-3):
            text = '\n'.join(['%4d: %s' % (j, instr_info_lst[j][i]) for j in range(len(instr_info_lst))])
            write_file(utils.InstrCompHeaderInfo.colname(i), text)
        for i in range(8, 10):
            text = '\n'.join(['%4d: \n%s' % (j, '\n'.join(['%-20s, %-10s, %s' % (str(k[0]), str(k[1]), str(k[2])) for k in instr_info_lst[j][i]])) for j in range(len(instr_info_lst))])
            write_file(utils.InstrCompHeaderInfo.colname(i), text)
        
        text = '\n'.join(['%4d: \n%s' % (j, '\n'.join(instr_info_lst[j][10])) for j in range(len(instr_info_lst))])
        #  '\n'.join(info.links)
        write_file(utils.InstrCompHeaderInfo.colname(10), text)
        quit()
    '''

    mcdoc_html = os.path.join(usedir, 'mcdoc.html')
    # try-catch section for file write
    try:
        # generate and save comp html doc pages
        for i in range(len(comp_info_lst)):
            p = comp_info_lst[i]
            f = comp_files[i]
            doc = CompDocWriter(p)
            text = doc.create()
            h = os.path.splitext(f)[0] + '.html'
            print("writing doc file... %s" % h)
            write_file(h, text)

        # generate and save instr html doc pages
        for i in range(len(instr_info_lst)):
            p = instr_info_lst[i]
            f = instr_files[i]
            doc = InstrDocWriter(p)
            text = doc.create()
            h = os.path.splitext(f)[0] + '.html'
            print("writing doc file... %s" % h)
            write_file(h, text)

        # write overview files, properly assembling links to instr- and html-files
        masterdoc = OverviewDocWriter(
            comp_info_lst, instr_info_lst,
            mccode_config.configuration['MCCODE_LIB_DIR'])
        text = masterdoc.create()
        print('writing master doc file... %s' % os.path.abspath('mcdoc.html'))
        write_file(mcdoc_html, text)

    except Exception as e:
        print("Could not write to disk: %s" % e.__str__())

    # open a web-browser in a cross-platform way, unless --nobrowse has been enabled
    if not args.nobrowse:
        subprocess.Popen('%s %s' %
                         (mccode_config.configuration['BROWSER'], mcdoc_html),
                         shell=True)
Beispiel #6
0
def mccode_test(branchdir, testdir, limitinstrs=None, instrfilter=None):
    ''' this main test function tests the given mccode branch/version '''

    # copy instr files and record info
    logging.info("Finding instruments in: %s" % branchdir)
    instrs, _ = utils.get_instr_comp_files(join(branchdir, "examples"),
                                           recursive=True,
                                           instrfilter=instrfilter)
    instrs.sort()

    # limt runs if required
    if limitinstrs:
        instrs = instrs[:limitinstrs]

    # max instr name length for pretty-output
    maxnamelen = 0
    for f in instrs:
        l = len(basename(f)) - 5
        if l > maxnamelen:
            maxnamelen = l

    # create test objects and copy instrument files
    logging.info("Copying instruments to: %s" % testdir)
    tests = []
    for f in instrs:
        # create the test foldr for this instrument
        instrname = splitext(basename(f))[0]
        instrdir = join(testdir, instrname)
        mkdir(instrdir)

        # create a new file with the instr text in it - e.g. a local copy of the instrument file
        text = open(f, encoding='utf-8').read()
        f_new = join(instrdir, basename(f))
        open(f_new, 'w', encoding='utf-8').write(text)

        # create a test object for every test defined in the instrument header
        instrtests = create_instr_test_objs(sourcefile=f,
                                            localfile=f_new,
                                            header=text)
        tests = tests + instrtests

        # extract and record %Example info from text
        numtests = len([t for t in instrtests if t.testnb > 0])
        if numtests == 0:
            formatstr = "%-" + "%ds: NO TEST" % maxnamelen
            logging.debug(formatstr % instrname)
        elif numtests == 1:
            formatstr = "%-" + "%ds: TEST" % maxnamelen
            logging.debug(formatstr % instrname)
        else:
            formatstr = "%-" + "%ds: TESTS (%d)" % (maxnamelen, numtests)
            logging.debug(formatstr % instrname)

    # compile, record time
    logging.info("")
    logging.info("Compiling instruments [seconds]...")
    for test in tests:
        # if binary exists, set compile time = 0 and continue
        binfile = os.path.splitext(test.localfile)[0] + ".out"
        if os.path.exists(binfile):
            test.compiled = True
            test.compiletime = 0
        else:
            if test.testnb > 0 or (not args.skipnontest):
                log = LineLogger()
                t1 = time.time()
                cmd = "mcrun --info %s &> compile_stdout.txt" % test.localfile
                utils.run_subtool_noread(cmd,
                                         cwd=join(testdir, test.instrname))
                t2 = time.time()
                test.compiled = os.path.exists(binfile)
                test.compiletime = t2 - t1

                # log to terminal
                if test.compiled:
                    formatstr = "%-" + "%ds: " % maxnamelen + \
                      "{:3d}.".format(math.floor(test.compiletime)) + str(test.compiletime-int(test.compiletime)).split('.')[1][:2]
                    logging.info(formatstr % test.get_display_name())
                else:
                    formatstr = "%-" + "%ds: COMPILE ERROR using:\n" % maxnamelen
                    logging.info(formatstr % test.instrname + cmd)
            else:
                logging.info("Skipping compile of " + test.instrname)
        # save (incomplete) test results to disk
        test.save(infolder=join(testdir, test.instrname))

    # run, record time
    logging.info("")
    logging.info("Running tests...")
    for test in tests:
        if not test.compiled:
            formatstr = "%-" + "%ds:   NO COMPILE" % (maxnamelen + 1)
            logging.info(formatstr % test.instrname)
            continue

        # runable tests have testnb > 0
        if test.testnb <= 0:
            formatstr = "%-" + "%ds:   NO TEST" % (maxnamelen + 1)
            logging.info(formatstr % test.get_display_name())
            continue

        # run the test, record time and runtime success/fail
        t1 = time.time()
        global ncount, mpi
        if mpi is not None:
            cmd = "mcrun -s 1000 %s %s -n%s --mpi=%s -d%d &> run_stdout.txt" % (
                test.localfile, test.parvals, ncount, mpi, test.testnb)
        else:
            cmd = "mcrun -s 1000 %s %s -n%s -d%d  &> run_stdout.txt" % (
                test.localfile, test.parvals, ncount, test.testnb)
        retcode = utils.run_subtool_noread(cmd,
                                           cwd=join(testdir, test.instrname))
        t2 = time.time()
        didwrite = os.path.exists(
            join(testdir, test.instrname, str(test.testnb), "mccode.sim"))
        test.didrun = retcode != 0 or didwrite
        test.runtime = t2 - t1

        # log to terminal
        if test.didrun:
            formatstr = "%-" + "%ds: " % (maxnamelen+1) + \
                "{:3d}.".format(math.floor(test.runtime)) + str(test.runtime-int(test.runtime)).split('.')[1][:2]
            logging.info(formatstr % test.get_display_name())
        else:
            formatstr = "%-" + "%ds: RUNTIME ERROR" % (maxnamelen + 1)
            logging.info(formatstr % instrname + ", " + cmd)
            continue

        # test value extraction
        extraction = extract_testvals(
            join(testdir, test.instrname, str(test.testnb)), test.detector)
        if type(extraction) is tuple:
            test.testval = extraction[0]
        else:
            test.testval = -1

        # save test result to disk
        test.testcomplete = True
        test.save(infolder=join(testdir, test.instrname))

    #    cpu type: cat /proc/cpuinfo |grep name |uniq | cut -f2- -d:
    #    gpu type: nvidia-smi -L | head -1 |cut -f2- -d: |cut -f1 -d\(

    metalog = LineLogger()
    utils.run_subtool_to_completion(
        "cat /proc/cpuinfo |grep name |uniq | cut -f2- -d: | xargs echo",
        stdout_cb=metalog.logline)
    cpu_type = ",".join(metalog.lst)

    metalog = LineLogger()
    utils.run_subtool_to_completion(
        "nvidia-smi -L | head -1 |cut -f2- -d: |cut -f1 -d\(",
        stdout_cb=metalog.logline)
    gpu_type = ",".join(metalog.lst)
    if "failed because" in gpu_type:
        gpu_type = "none"

    metalog = LineLogger()
    utils.run_subtool_to_completion("hostname", stdout_cb=metalog.logline)
    hostnamestr = ",".join(metalog.lst)

    metalog = LineLogger()
    utils.run_subtool_to_completion('echo "$USER"', stdout_cb=metalog.logline)
    username = "******".join(metalog.lst)

    metainfo = OrderedDict()
    metainfo["ncount"] = ncount
    metainfo["mpi"] = mpi
    metainfo["date"] = utils.get_datetimestr()
    metainfo["hostname"] = hostnamestr
    metainfo["user"] = username
    metainfo["cpu_type"] = cpu_type
    metainfo["gpu_type"] = gpu_type

    # displayname must be unique, we can return a dict, which eases comparison between tests
    obj = {}
    for t in tests:
        obj[t.get_display_name()] = t.get_json_repr()
    obj["_meta"] = metainfo
    return obj