Exemple #1
0
class Config(object):

    def __init__(self, base_dir, branch_name):

        self.__branch_name = RFSString(branch_name)

        with open(os.path.join(base_dir, "project_config.json"), "r") as fd:
            self.__project_config = json.load(fd)
        
        with open(os.path.join(base_dir, self.__branch_name.fs(),
                               "mincid.json"), "r") as fd:
            self.__branch_config = json.load(fd)

    def project_cfg(self, key):
        return self.__project_config[key]

    def branch_defines(self, key):
        return self.__branch_config[0][key]

    def branch_jobs(self):
        return self.__branch_config[1]

    def expand(self, val):
        if val[0] != "$":
            return val
        return self.__branch_config[0][val[1:]]
Exemple #2
0
    def __init__(self, base_dir, branch_name):

        self.__branch_name = RFSString(branch_name)

        with open(os.path.join(base_dir, "project_config.json"), "r") as fd:
            self.__project_config = json.load(fd)
        
        with open(os.path.join(base_dir, self.__branch_name.fs(),
                               "mincid.json"), "r") as fd:
            self.__branch_config = json.load(fd)
Exemple #3
0
    def __init__(self, master_conf, desc_file):
        with open(master_conf, "r") as fd:
            self.__master_config = json.load(fd)
        with open(desc_file, "r") as fd:
            self.__config = json.load(fd)
        self.__name = self.__config['name']
        self.__rfs = RFSString(self.__name)

        self.__tmp_dir = os.path.join(
            self.__master_config["worker_dir"],
            self.__rfs.fs(),
            time.strftime("%Y%m%d-%H%M%S"))
        os.makedirs(self.__tmp_dir, exist_ok=True)

        self.__working_dir = os.path.join(self.__tmp_dir, ".mincid")
        os.makedirs(self.__working_dir, exist_ok=True)
        os.chmod(self.__working_dir, 0o777)
            
        shutil.copyfile(desc_file,
                        os.path.join(self.__working_dir, "project_config.json"))
        shutil.copyfile(master_conf,
                        os.path.join(self.__working_dir, "mincid_master.json"))
        self.__logger = MLogger("Project", self.__name, self.__working_dir)
        self.__logger.info("Init project [%s]" % self.__name)
Exemple #4
0
class Project(object):

    def __init__(self, master_conf, desc_file):
        with open(master_conf, "r") as fd:
            self.__master_config = json.load(fd)
        with open(desc_file, "r") as fd:
            self.__config = json.load(fd)
        self.__name = self.__config['name']
        self.__rfs = RFSString(self.__name)

        self.__tmp_dir = os.path.join(
            self.__master_config["worker_dir"],
            self.__rfs.fs(),
            time.strftime("%Y%m%d-%H%M%S"))
        os.makedirs(self.__tmp_dir, exist_ok=True)

        self.__working_dir = os.path.join(self.__tmp_dir, ".mincid")
        os.makedirs(self.__working_dir, exist_ok=True)
        os.chmod(self.__working_dir, 0o777)
            
        shutil.copyfile(desc_file,
                        os.path.join(self.__working_dir, "project_config.json"))
        shutil.copyfile(master_conf,
                        os.path.join(self.__working_dir, "mincid_master.json"))
        self.__logger = MLogger("Project", self.__name, self.__working_dir)
        self.__logger.info("Init project [%s]" % self.__name)

    def cleanup(self):
        """Cleanup: removes old builds"""
        bdir = os.path.join(
            self.__master_config["worker_dir"],
            self.__rfs.fs())
        ldirs = os.listdir(bdir)
        # Check if there is something to do.
        mbuild_cnt = int(self.__config['max_build_cnt'])
        if len(ldirs) < mbuild_cnt:
            self.__logger.info("Nothing to remove: should store [%d] existent [%d]"
                               % (mbuild_cnt, len(ldirs)))
            return
        sldirs = sorted(ldirs)
        while len(sldirs) >= mbuild_cnt:
            self.__logger.info("Removing old build [%s]" % sldirs[0])
            shutil.rmtree(os.path.join(bdir, sldirs[0]))
            del sldirs[0]
        
    def process(self):
        self.__logger.info("Start project [%s]" % self.__name)
        stdouterr_filename = os.path.join(self.__working_dir,
                                          "sbatch_project.stdouterr")
        with open(stdouterr_filename, "w") as fd_stdouterr:
            p = subprocess.Popen(
                ["sbatch", "--job-name=%s+BranchesConfig" % self.__name,
                "--output=%s" % os.path.join(self.__working_dir,
                                             "slurm_build_project_%j.out"),
                 "--export=PYTHONPATH",
                 os.path.join(self.__master_config['mincid_install_dir'],
                              "build_branches_config.py"), self.__tmp_dir],
                stdout=fd_stdouterr, stderr=fd_stdouterr)
        p.wait()
        self.__logger.info("sbatch process return value [%d]" % p.returncode)
        self.__logger.info("Finished project startup [%s]" % self.__name)
Exemple #5
0
    def __start_variant(self, sname, image, variant_list):
        base = self.__config.expand(image['base'])
        self.__logger.info("Start variant [%s] [%s] [%s]"
                           % (sname, base, variant_list))
        stdouterr_filename = os.path.join(self.__branch_dir,
                                          "start_variants.stdouterr")

        dep_jobids = []
        # Collect jobids of all dependend stages
        if 'depends_on' in self.__config.branch_jobs()[sname]:
            for dep_stage in self.__config.branch_jobs()[sname]['depends_on']:
                dep_jobids.extend(self.__jobids[dep_stage])
            self.__logger.info("Dependent jobids [%s]" % dep_jobids)
        
        with open(stdouterr_filename, "w") as fd_stdouterr:
            # Create own temp dir
            rfsname = RFSString(self.__name)
            rfsbase = RFSString(base)
            variant_name = "%s+%s+%s+%s+%s" % \
                           (self.__config.project_cfg('name'),
                            rfsname.fs(),
                            sname, rfsbase.fs(), "_".join(variant_list))
            variant_tmp_dir = os.path.join(self.__variants_base_dir, variant_name)
            os.makedirs(variant_tmp_dir, exist_ok=True)
            variant_desc = {
                'name': variant_name,
                'base': base,
                'branch_name': self.__name,
                'global_tmp_dir': self.__tmp_dir,
                'directory': variant_tmp_dir
            }

            if len(variant_list)>0:
                variant_desc['variant_list'] = variant_list

            for pword in ('install', 'run'):
                if pword in self.__config.branch_jobs()[sname]:
                    variant_desc[
                        pword] = self.__config.branch_jobs()[sname][pword]

            if 'prepare' in image:
                variant_desc['prepare'] = image['prepare']
                
            if 'build_prepare' in image:
                variant_desc['build_prepare'] = image['build_prepare']

            variant_cfg_file_name = os.path.join(
                variant_tmp_dir, "variant.json")
            with open(variant_cfg_file_name, "w") as fd:
                json.dump(variant_desc, fd)

            subproc_slist = [
                "sbatch", "--job-name=%s" % variant_name,
                "--output=%s" % os.path.join(self.__working_dir,
                                             "slurm_build_branch_%j.out"),
                "--extra-node-info=1:2:1",
                 "--export=PYTHONPATH"]
            if len(dep_jobids)>0:
                dep_str = ":".join(str(x) for x in dep_jobids)
                self.__logger.info("New batch is dependent on [%s]" %
                                   dep_str)
                subproc_slist.append("--dependency=afterok:%s" % dep_str)
            subproc_slist.extend(
                [os.path.join(self.__master_config['mincid_install_dir'],
                              "build_variant.py"), variant_cfg_file_name])
                
            p = subprocess.Popen(subproc_slist,
                stdout=subprocess.PIPE, stderr=fd_stdouterr)

        res = p.stdout.read()
        p.wait()
        
        self.__logger.info("sbatch process return value [%d]" % p.returncode)

        decoded = res.decode("UTF-8")
        jobnr = int(decoded[20:-1])
        self.__jobids[sname].append(jobnr)
        
        self.__logger.info("sbatch process id [%d]" % jobnr)
        self.__logger.info("Finished variant [%s] [%s] [%s]"
                           % (sname, base, variant_list))