Example #1
0
    def run(self, info):
        paths = sorted(glob.glob(info[Keys.MERGE] + "_*"))

        #read in
        config_container = {}
        nofiles = len(paths)
        if nofiles == 0:
            raise RuntimeError("No files to merge found!")
        for path in paths:
            logging.debug("Reading %s", path)
            config = infohandler.get_handler(path).read(path)

            lastjob = config[Keys.SUBJOBLIST][-1]
            checksum = int(lastjob.split(Keys.SUBJOBSEP)[2])
            if nofiles != checksum:
                raise RuntimeError(
                    "Number of inputfiles %d and checksum %d do not match" %
                    (nofiles, checksum))

            #append the current config to the ones with same parent subjobs
            parentjoblist = config[Keys.SUBJOBLIST][:-1]
            parentjobstr = self.parentjobs_to_str(parentjoblist)

            #remove one level from subjoblist
            config[Keys.SUBJOBLIST] = parentjoblist
            if not config[Keys.SUBJOBLIST]:
                del config[Keys.SUBJOBLIST]
            if parentjobstr in config_container:
                config_container[parentjobstr] = dicts.merge(config_container[parentjobstr], \
                        config, priority='append')
            else:
                config_container[parentjobstr] = config

        #unify (only possible after all collected)
        for config in config_container.values():
            for key in config.keys():
                if key == Keys.SUBJOBLIST:
                    config[key] = dicts.unify(config[key], unlist_single=False)
                    continue
                if isinstance(config[key], list):
                    config[key] = dicts.unify(config[key])

        #write back
        for i, config in enumerate(config_container.values()):
            path = info[Keys.MERGED] + '_' + str(i)
            logging.debug("Writing out %s", path)
            infohandler.get_handler(path).write(config, path)

        return info
    def run(self, log, info):
        used_engines = []
        log.debug("All available engines: %s", info['ENGINES'])
        for engine in info['ENGINES'].split(" "):
            key = 'RUN' + engine.upper()
            if key in info and info[key] == 'True':
                used_engines.append(engine)
        log.debug("Effectively used engines: %s" % used_engines)

        if not isinstance(info[Keys.DATASET_CODE],list):
            info[Keys.DATASET_CODE] = [info[Keys.DATASET_CODE]]
        runs = len(info[Keys.DATASET_CODE])
        log.debug("Number of samples: %d" % runs)
        for i in range(runs):
            collectedconfig = {}
            for engine in used_engines:
                path = "%s.ini_%d" % (engine, i)
                if not os.path.exists(path):
                    raise RuntimeError("Required infofile not found " + path)
                else:
                    log.debug("Found infofile "+path)
                engineconfig = infohandler.get_handler(path).read(path)
                collectedconfig = dicts.merge(collectedconfig, engineconfig, priority='append')

            for key in collectedconfig.keys():
                if isinstance(collectedconfig[key], list):
                    collectedconfig[key] = dicts.unify(collectedconfig[key])

            collector_path = "%s_%d" % (info[Keys.MERGED], i)
            infohandler.get_handler(info[Keys.MERGED]).write(collectedconfig, collector_path)
            log.debug('Wrote outfile ' + collector_path)

        return info
Example #3
0
    def run(self, log, info):
        paths = sorted(glob.glob(info[Keys.MERGE] + "_*"))

        #read in
        config_container = {}
        nofiles = len(paths)
        if nofiles == 0:
            raise RuntimeError("No files to merge found!")
        for path in paths:
            log.debug("Reading " + path)
            config = infohandler.get_handler(path).read(path)

            lastjob = config[Keys.SUBJOBLIST][-1]
            checksum = int(lastjob.split(Keys.SUBJOBSEP)[2])
            if nofiles != checksum:
                raise RuntimeError("Number of inputfiles %d and checksum %d do not match" % (nofiles, checksum))

            #append the current config to the ones with same parent subjobs
            parentjoblist = config[Keys.SUBJOBLIST][:-1]
            parentjobstr = self.parentjobs_to_str(parentjoblist)

            #remove one level from subjoblist
            config[Keys.SUBJOBLIST] = parentjoblist
            if not config[Keys.SUBJOBLIST]:
                del config[Keys.SUBJOBLIST]
            if parentjobstr in config_container:
                config_container[parentjobstr] = dicts.merge(config_container[parentjobstr], config, priority='append')
            else:
                config_container[parentjobstr] = config

        #unify (only possible after all collected)
        for config in config_container.values():
            for key in config.keys():
                if key == Keys.SUBJOBLIST:
                    config[key] = dicts.unify(config[key], unlist_single=False)
                    continue
                if isinstance(config[key], list):
                    config[key] = dicts.unify(config[key])

        #write back
        for i, config in enumerate(config_container.values()):
            path = info[Keys.MERGED] + '_' + str(i)
            log.debug("Writing out " + path)
            infohandler.get_handler(path).write(config, path)

        return info
Example #4
0
    def run(self, log, info):
        ih = get_handler(info[Keys.COLLATE])
        paths = info[Keys.COLLATE].split(" ")
        del info[Keys.COLLATE]
        collector_config = info.copy()

        #read in
        for path in paths:
            log.debug('collating file [%s]' % path)
            config = ih.read(path)
            collector_config = dicts.merge(collector_config, config, priority='append')

        #unify
        for key in collector_config.keys():
            collector_config[key] = dicts.unify(collector_config[key])

        #write back
        return collector_config
Example #5
0
    def run(self, info):
        infoh = get_handler(info[Keys.COLLATE])
        paths = info[Keys.COLLATE].split(" ")
        del info[Keys.COLLATE]
        collector_config = info.copy()

        #read in
        for path in paths:
            logging.debug('collating file [%s]', path)
            config = infoh.read(path)
            collector_config = dicts.merge(collector_config,
                                           config,
                                           priority='append')

        #unify
        for key in collector_config.keys():
            collector_config[key] = dicts.unify(collector_config[key])

        #write back
        return collector_config