示例#1
0
    def __init__(self, job_name="cloudal", **kwargs):
        self.keep_alive = kwargs.get('keep_alive')
        self.out_of_chart = kwargs.get('out_of_chart')
        self.oar_job_ids = kwargs.get('oar_job_ids')
        self.config_file_path = kwargs.get('config_file_path')
        self.configs = kwargs.get('configs')
        self.is_reservation = kwargs.get('is_reservation')
        self.no_deploy_os = kwargs.get('no_deploy_os')
        self.job_name = job_name

        self.max_deploy = MAX_RETRY_DEPLOY
        """self.oar_result containts the list of tuples (oar_job_id, site_name)
        that identifies the reservation on each site,
        which can be retrieved from the command line arguments or from make_reservation()
        """
        self.oar_result = list()
        """
        TODO:
            + write function to check all nodes in a job is alive
            + modify make_reservation function to accept error_hosts
              -> make replacement reservation or cancel program or ignore
        """
        if self.oar_job_ids is not None:
            logger.info('Checking the given oar_job_id is valid or not')
            for each in self.oar_job_ids.split(','):
                site_name, oar_job_id = each.split(':')
                oar_job_id = int(oar_job_id)
                # check validity of oar_job_id
                job_info = get_oar_job_info(oar_job_id=oar_job_id,
                                            frontend=site_name)
                if job_info is None or len(job_info) == 0:
                    logger.error(
                        "Job id: %s in %s is not a valid Grid5000 oar_job_id" %
                        (oar_job_id, site_name))
                    logger.error(
                        "Please rerun the script with a correct oar_job_id")
                    exit()
                self.oar_result.append((int(oar_job_id), str(site_name)))
            if self.config_file_path and not self.configs:
                self.configs = parse_config_file(self.config_file_path)
            return
        elif self.configs and isinstance(self.configs, dict):
            logger.debug("Use configs instead of config file")
        elif self.config_file_path is None:
            logger.error(
                "Please provide at least a provisioning config file or oar_job_id."
            )
            exit()
        else:
            super(g5k_provisioner,
                  self).__init__(config_file_path=self.config_file_path)
        # parse clusters into correct format to be used in g5k
        self.clusters = {
            each['cluster']: each['n_nodes']
            for each in self.configs['clusters']
        }
示例#2
0
def test_define_parameters():
    dir_path = os.path.dirname(os.path.realpath(__file__))
    file_path = os.path.join(dir_path, "../../test_data/test.yaml")
    result = parse_config_file(file_path)
    actual = define_parameters(result['parameters'])
    # assert result is None
    assert isinstance(actual, dict)
    assert actual['iteration'] == range(1, 5)
    assert actual['duration'] == [10]
    assert actual['workloads'] == ['write']
示例#3
0
    def run(self):
        logger.debug('Parse and convert configs for G5K provisioner')
        self.configs = parse_config_file(self.args.config_file_path)
        kube_master_site = self.create_configs()

        logger.info('''Your topology:
                        Antidote DCs: %s
                        n_antidotedb_per_DC: %s ''' %
                    (len(self.configs['exp_env']['antidote_clusters']),
                     self.configs['exp_env']['n_antidotedb_per_dc']))

        kube_namespace = 'elmerfs-exp'
        self.setup_env(kube_master_site, kube_namespace)
示例#4
0
    def run(self):
        logger.debug("Parse and convert configs for G5K provisioner")
        self.configs = parse_config_file(self.args.config_file_path)
        kube_master_site = self.create_configs()

        logger.info("""Your topology:  %s""" %
                    self.configs["exp_env"]["antidote_clusters"])

        # Logarithmic scale interval of latency
        if self.configs["parameters"][
                "latency_interval"] == "logarithmic scale":
            start, end = self.configs["parameters"]["latency"]
            latency = [start, end]
            log_start = int(math.ceil(math.log(start)))
            log_end = int(math.ceil(math.log(end)))
            for i in range(log_start, log_end):
                latency.append(int(math.exp(i)))
                latency.append(int(math.exp(i + 0.5)))
            del self.configs["parameters"]["latency_interval"]
            self.configs["parameters"]["latency"] = list(set(latency))

        if self.configs["parameters"]["benchmarks"] == "performance":
            self.configs["parameters"]["n_nodes_run_per_dc"] = list(
                range(
                    1, self.configs["exp_env"]["antidote_clusters"][0]
                    ["n_antidotedb_per_dc"] + 1))

        sweeper = create_combs_queue(
            result_dir=self.configs["exp_env"]["results_dir"],
            parameters=self.configs["parameters"],
        )
        kube_namespace = "elmerfs-exp"
        oar_job_ids = None
        while len(sweeper.get_remaining()) > 0:
            if oar_job_ids is None:
                oar_job_ids, kube_master, elmerfs_hosts = self.setup_env(
                    kube_master_site, kube_namespace)

            comb = sweeper.get_next()
            sweeper = self.run_workflow(
                elmerfs_hosts=elmerfs_hosts,
                kube_master=kube_master,
                kube_namespace=kube_namespace,
                comb=comb,
                sweeper=sweeper,
            )

            if not is_job_alive(oar_job_ids):
                oardel(oar_job_ids)
                oar_job_ids = None
        logger.info("Finish the experiment!!!")
示例#5
0
    def run(self):
        logger.debug('Parse and convert configs for G5K provisioner')
        self.configs = parse_config_file(self.args.config_file_path)

        # Add the number of Antidote DC as a parameter
        self.configs['parameters']['n_dc'] = len(
            self.configs['exp_env']['clusters'])

        logger.debug('Normalize the parameter space')
        self.normalized_parameters = define_parameters(
            self.configs['parameters'])

        logger.debug('Normalize the given configs')
        kube_master_site = self.create_configs()

        logger.info('''Your largest topology:
                        Antidote DCs: %s
                        n_antidotedb_per_DC: %s  ''' %
                    (len(self.configs['exp_env']['clusters']),
                     max(self.normalized_parameters['n_nodes_per_dc'])))

        sweeper = create_combs_queue(
            result_dir=self.configs['exp_env']['results_dir'],
            parameters=self.configs['parameters'],
        )
        kube_namespace = 'elmerfs-exp'
        oar_job_ids = None
        while len(sweeper.get_remaining()) > 0:
            if oar_job_ids is None:
                oar_job_ids, kube_master = self.setup_env(
                    kube_master_site, kube_namespace)

            comb = sweeper.get_next()
            sweeper = self.run_workflow(
                kube_master=kube_master,
                kube_namespace=kube_namespace,
                comb=comb,
                sweeper=sweeper,
            )

            if not is_job_alive(oar_job_ids):
                oardel(oar_job_ids)
                oar_job_ids = None
        logger.info('Finish the experiment!!!')
示例#6
0
def test_parse_config_file_valid_input():
    expected = {
        'walltime': 23400,
        'starttime': None,
        'cloud_provider_image': 'debian10-x64-big',
        'clusters': [{
            'cluster': 'dahu',
            'n_nodes': 3
        }],
        'parameters': {
            'iteration': ['1..4'],
            'duration': 10,
            'workloads': 'write'
        }
    }

    dir_path = os.path.dirname(os.path.realpath(__file__))
    file_path = os.path.join(dir_path, "../test_data/test.yaml")
    result = parse_config_file(file_path)
    assert result == expected
示例#7
0
    def run(self):
        logger.debug('Parse and convert configs for OVH provisioner')
        self.configs = parse_config_file(self.args.config_file_path)
        # Add the number of Antidote DC as a parameter
        self.configs['parameters']['n_dc'] = len(
            self.configs['exp_env']['clusters'])

        logger.debug('Normalize the parameter space')
        self.normalized_parameters = define_parameters(
            self.configs['parameters'])

        logger.debug('Normalize the given configs')
        kube_master_site = self.create_configs()

        logger.info('''Your largest topology:
                        Antidote DCs: %s
                        n_antidotedb_per_DC: %s
                        n_fmke_client_per_DC: %s ''' %
                    (len(self.configs['exp_env']['clusters']),
                     max(self.normalized_parameters['n_nodes_per_dc']),
                     max(self.normalized_parameters['n_fmke_client_per_dc'])))

        logger.info('Creating the combination list')
        sweeper = create_paramsweeper(
            result_dir=self.configs['exp_env']['results_dir'],
            parameters=self.normalized_parameters)

        kube_namespace = 'fmke-exp'
        node_ids_file = None
        while len(sweeper.get_remaining()) > 0:
            if node_ids_file is None:
                kube_master, node_ids_file = self.setup_env(
                    kube_master_site, kube_namespace)
            comb = sweeper.get_next()
            sweeper = self.run_exp_workflow(kube_namespace=kube_namespace,
                                            kube_master=kube_master,
                                            comb=comb,
                                            sweeper=sweeper)
            # if not is_nodes_alive(node_ids_file):
            #     node_ids_file = None
        logger.info('Finish the experiment!!!')
示例#8
0
    def run(self):
        logger.debug('Parse and convert configs for G5K provisioner')
        self.configs = parse_config_file(self.args.config_file_path)
        kube_master_site = self.create_configs()

        logger.info('''Your topology:
                        Antidote DCs: %s
                        n_antidotedb_per_DC: %s
                        n_fmke_per_DC: %s
                        n_fmke_client_per_DC: %s ''' % (
            len(self.configs['exp_env']['clusters']),
            self.configs['exp_env']['n_antidotedb_per_dc'],
            self.configs['exp_env']['n_fmke_app_per_dc'],
            self.configs['exp_env']['n_fmke_client_per_dc'])
        )

        logger.debug('Creating the combination list')
        sweeper = create_combs_queue(result_dir=self.configs['exp_env']['results_dir'],
                                     parameters=self.configs['parameters'])

        kube_namespace = 'fmke-exp'
        oar_job_ids = None
        while len(sweeper.get_remaining()) > 0:
            if oar_job_ids is None:
                kube_master, oar_job_ids = self.setup_env(kube_master_site, kube_namespace)

            comb = sweeper.get_next()
            sweeper = self.run_workflow(kube_namespace=kube_namespace,
                                        kube_master=kube_master,
                                        comb=comb,
                                        sweeper=sweeper)

            if not is_job_alive(oar_job_ids):
                oardel(oar_job_ids)
                oar_job_ids = None
        logger.info('Finish the experiment!!!')
    def run(self):
        logger.debug('Parse and convert configs for G5K provisioner')
        self.configs = parse_config_file(self.args.config_file_path)

        logger.debug('Creating the combination list')
        sweeper = create_combs_queue(
            result_dir=self.configs['exp_env']['results_dir'],
            parameters=self.configs['parameters'])

        oar_job_ids = None
        logger.info('Running the experiment workflow')
        while len(sweeper.get_remaining()) > 0:
            if oar_job_ids is None:
                logger.info('Setting the experiment environment')
                oar_job_ids = self.setup_env()

            comb = sweeper.get_next()
            sweeper = self.run_workflow(comb=comb, sweeper=sweeper)

            if not is_job_alive(oar_job_ids):
                oardel(oar_job_ids)
                oar_job_ids = None

        logger.info('Finish the experiment!!!')
示例#10
0
def test_parse_config_file_wrong_input(file_path, message):
    with pytest.raises(IOError) as exc_info:
        parse_config_file(file_path)
    assert message in str(exc_info)
示例#11
0
 def __init__(self, config_file_path):
     self.configs = parse_config_file(config_file_path)