Exemple #1
0
    def _initialize_conf(self):
        """Merge locally-specified configuration files with default files
        from the distribution"""

        action = Remote(
            "cp " + os.path.join(self.conf_dir, MR_CONF_FILE + ".template ") +
            os.path.join(self.conf_dir, MR_CONF_FILE), self.hosts)
        action.run()

        super(HadoopV2Cluster, self)._initialize_conf()
Exemple #2
0
    def _initialize_conf(self):
        """Merge locally-specified configuration files with default files
        from the distribution"""

        action = Remote("cp " + os.path.join(self.conf_dir,
                                             MR_CONF_FILE + ".template ") +
                        os.path.join(self.conf_dir, MR_CONF_FILE),
                        self.hosts)
        action.run()

        super(HadoopV2Cluster, self)._initialize_conf()
Exemple #3
0
    def bootstrap(self, tar_file):
        """Install Hadoop in all cluster nodes from the specified tar.gz file.

        Args:
          hadoop_tar_file (str):
            The file containing Hadoop binaries.
        """

        if super(HadoopV2Cluster, self).bootstrap(tar_file):
            action = Remote(
                "cp " +
                os.path.join(self.conf_dir, MR_CONF_FILE + ".template ") +
                os.path.join(self.conf_dir, MR_CONF_FILE), self.hosts)
            action.run()
Exemple #4
0
    def bootstrap(self, tar_file):
        """Install Hadoop in all cluster nodes from the specified tar.gz file.

        Args:
          hadoop_tar_file (str):
            The file containing Hadoop binaries.
        """

        if super(HadoopV2Cluster, self).bootstrap(tar_file):
            action = Remote("cp " + os.path.join(self.conf_dir,
                                                 MR_CONF_FILE + ".template ") +
                            os.path.join(self.conf_dir, MR_CONF_FILE),
                            self.hosts)
            action.run()
def check_if_file_exists(file_name, nodes, connection_params):
    """
    Check if a file exists in a set of nodes. It returns the set of nodes that DON'T have the files
    :param file_name: 
    :param nodes: 
    :param connection_params: 
    :return: 
    """
    r = Remote(cmd="test -e " + file_name,
               hosts=nodes,
               connection_params=connection_params,
               process_args={"nolog_exit_code": True}
               )
    r.run()
    not_ok = filter(lambda p: p.ok is not True, r.processes)
    return set([node.host.address for node in not_ok])
 def prepare_bench(self):
     """bench configuration and compilation, copy binaries to frontends
     
     return True if preparation is ok
     """
     logger.info("preparation: configure and compile benchmark")
     # the involved sites. We will do the compilation on the first of these.
     sites = list(set(map(get_cluster_site, self.parameters['cluster'])))
     # generate the bench compilation configuration
     bench_list = '\n'.join([ 'lu\t%s\t%s' % (size, n_core)
                              for n_core in self.parameters['n_core']
                              for size in self.parameters['size'] ])
     # Reserving a node because compiling on the frontend is forbidden
     # and because we need mpif77
     jobs = oarsub([(OarSubmission(resources = "nodes=1",
                                   job_type = 'allow_classic_ssh',
                                   walltime ='0:10:00'), sites[0])])
     if jobs[0][0]:
         try:
             logger.info("copying bench archive to %s" % (sites[0],))
             copy_bench = Put([sites[0]], ['NPB3.3-MPI.tar.bz2']).run()
             logger.info("extracting bench archive on %s" % (sites[0],))
             extract_bench = Remote('tar -xjf NPB3.3-MPI.tar.bz2', [sites[0]]).run()
             logger.info("waiting job start %s" % (jobs[0],))
             wait_oar_job_start(*jobs[0], prediction_callback = pred_cb)
             logger.info("getting nodes of %s" % (jobs[0],))
             nodes = get_oar_job_nodes(*jobs[0])
             logger.info("configure bench compilation")
             conf_bench = Remote('echo "%s" > ~/NPB3.3-MPI/config/suite.def' % bench_list, nodes).run()
             logger.info("compil bench")
             compilation = Remote('cd NPB3.3-MPI && make clean && make suite', nodes).run()
             logger.info("compil finished")
         except:
             logger.error("unable to compile bench")
             return False
         finally:
             oardel(jobs)
     # Copying binaries to all other frontends
     frontends = sites[1:]
     rsync = Remote('rsync -avuP ~/NPB3.3-MPI/ {{frontends}}:NPB3.3-MPI', 
                    [get_host_site(nodes[0])] * len(frontends)) 
     rsync.run()
     return compilation.ok and rsync.ok
    def run(self):
        """Run the experiment"""
        already_configured = self.options.already_configured
        reservation_job_id = int(self.options.reservation_id) \
            if self.options.reservation_id is not None else None
        is_a_test = self.options.is_a_test

        if is_a_test:
            logger.warn('THIS IS A TEST! This run will use only a few '
                        'resources')

        # make the result folder writable for all
        os.chmod(self.result_dir, 0o777)
        # Import configuration
        with open(self.args[0]) as config_file:
            config = json.load(config_file)
        # backup configuration
        copy(self.args[0], self.result_dir)

        site = config["grid5000_site"]
        resources = config["resources"]
        nb_experiment_nodes = config["nb_experiment_nodes"]
        walltime = str(config["walltime"])
        env_name = config["kadeploy_env_name"]
        workloads = config["workloads"]
        # check if workloads exists (Suppose that the same NFS mount point
        # is present on the remote and the local environment
        for workload_file in workloads:
            with open(workload_file):
                pass
            # copy the workloads files to the results dir
            copy(workload_file, self.result_dir)

        # define the workloads parameters
        self.parameters = {
            'workload_filename': workloads
        }
        logger.info('Workloads: {}'.format(workloads))

        # define the iterator over the parameters combinations
        self.sweeper = ParamSweeper(os.path.join(self.result_dir, "sweeps"),
                                    sweep(self.parameters))

        # Due to previous (using -c result_dir) run skip some combination
        logger.info('Skipped parameters:' +
                    '{}'.format(str(self.sweeper.get_skipped())))

        logger.info('Number of parameters combinations {}'.format(
            str(len(self.sweeper.get_remaining()))))
        logger.info('combinations {}'.format(
            str(self.sweeper.get_remaining())))

        if reservation_job_id is not None:
            jobs = [(reservation_job_id, site)]
        else:
            jobs = oarsub([(OarSubmission(resources=resources,
                                          job_type='deploy',
                                          walltime=walltime), site)])
        job_id, site = jobs[0]
        if job_id:
            try:
                logger.info("waiting job start %s on %s" % (job_id, site))
                wait_oar_job_start(
                    job_id, site, prediction_callback=prediction_callback)
                logger.info("getting nodes of %s on %s" % (job_id, site))
                nodes = get_oar_job_nodes(job_id, site)
                # sort the nodes
                nodes = sorted(nodes, key=lambda node: node.address)
                # get only the necessary nodes under the switch
                if nb_experiment_nodes > len(nodes):
                    raise RuntimeError('The number of given node in the '
                                       'reservation ({}) do not match the '
                                       'requested resources '
                                       '({})'.format(len(nodes),
                                                     nb_experiment_nodes))
                nodes = nodes[:nb_experiment_nodes]
                logger.info("deploying nodes: {}".format(str(nodes)))
                deployed, undeployed = deploy(
                    Deployment(nodes, env_name=env_name),
                    check_deployed_command=already_configured)
                if undeployed:
                    logger.warn(
                        "NOT deployed nodes: {}".format(str(undeployed)))
                    raise RuntimeError('Deployement failed')

                if not already_configured:

                    # install OAR
                    install_cmd = "apt-get update; apt-get install -y "
                    node_packages = "oar-node"
                    logger.info(
                        "installing OAR nodes: {}".format(str(nodes[1:])))
                    install_oar_nodes = Remote(
                        install_cmd + node_packages,
                        nodes[1:],
                        connection_params={'user': '******'})
                    install_oar_nodes.start()

                    server_packages = ("oar-server oar-server-pgsql oar-user "
                                       "oar-user-pgsql postgresql python3-pip "
                                       "libjson-perl postgresql-server-dev-all")
                    install_oar_sched_cmd = """
                    mkdir -p /opt/oar_sched; \
                    cd /opt/oar_sched; \
                    git clone https://github.com/oar-team/oar3.git; \
                    cd oar3; \
                    git checkout dce942bebc2; \
                    pip3 install -e .; \
                    cd /usr/lib/oar/schedulers; \
                    ln -s /usr/local/bin/kamelot; \
                    pip3 install psycopg2
                    """
                    logger.info("installing OAR server node: {}".format(str(nodes[0])))
                    install_master = SshProcess(install_cmd + server_packages +
                                                ";" + install_oar_sched_cmd, nodes[0],
                                                connection_params={'user': '******'})
                    install_master.run()
                    install_oar_nodes.wait()

                    if not install_master.ok:
                        Report(install_master)

                    configure_oar_cmd = """
                    sed -i \
                        -e 's/^\(DB_TYPE\)=.*/\\1="Pg"/' \
                        -e 's/^\(DB_HOSTNAME\)=.*/\\1="localhost"/' \
                        -e 's/^\(DB_PORT\)=.*/\\1="5432"/' \
                        -e 's/^\(DB_BASE_PASSWD\)=.*/\\1="oar"/' \
                        -e 's/^\(DB_BASE_LOGIN\)=.*/\\1="oar"/' \
                        -e 's/^\(DB_BASE_PASSWD_RO\)=.*/\\1="oar_ro"/' \
                        -e 's/^\(DB_BASE_LOGIN_RO\)=.*/\\1="oar_ro"/' \
                        -e 's/^\(SERVER_HOSTNAME\)=.*/\\1="localhost"/' \
                        -e 's/^\(SERVER_PORT\)=.*/\\1="16666"/' \
                        -e 's/^\(LOG_LEVEL\)\=\"2\"/\\1\=\"3\"/' \
                        -e 's#^\(LOG_FILE\)\=.*#\\1="{result_dir}/oar.log"#' \
                        -e 's/^\(JOB_RESOURCE_MANAGER_PROPERTY_DB_FIELD\=\"cpuset\".*\)/#\\1/' \
                        -e 's/^#\(CPUSET_PATH\=\"\/oar\".*\)/\\1/' \
                        -e 's/^\(FINAUD_FREQUENCY\)\=.*/\\1="0"/' \
                        /etc/oar/oar.conf
                    """.format(result_dir=self.result_dir)
                    configure_oar = Remote(configure_oar_cmd, nodes,
                                           connection_params={'user': '******'})
                    configure_oar.run()
                    logger.info("OAR is configured on all nodes")

                    # Configure server
                    create_db = "oar-database --create --db-is-local"
                    config_oar_sched = ("oarnotify --remove-queue default;"
                                        "oarnotify --add-queue default,1,kamelot")
                    start_oar = "systemctl start oar-server.service"
                    logger.info(
                        "configuring OAR database: {}".format(str(nodes[0])))
                    config_master = SshProcess(create_db + ";" + config_oar_sched + ";" + start_oar,
                                               nodes[0],
                                               connection_params={'user': '******'})
                    config_master.run()

                    # propagate SSH keys
                    logger.info("configuring OAR SSH")
                    oar_key = "/tmp/.ssh"
                    Process('rm -rf ' + oar_key).run()
                    Process('scp -o BatchMode=yes -o PasswordAuthentication=no '
                            '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null '
                            '-o ConnectTimeout=20 -rp -o User=root ' +
                            nodes[0].address + ":/var/lib/oar/.ssh"
                            ' ' + oar_key).run()
                    # Get(nodes[0], "/var/lib/oar/.ssh", [oar_key], connection_params={'user': '******'}).run()
                    Put(nodes[1:], [oar_key], "/var/lib/oar/", connection_params={'user': '******'}).run()
                    add_resources_cmd = """
                    oarproperty -a cpu || true; \
                    oarproperty -a core || true; \
                    oarproperty -c -a host || true; \
                    oarproperty -a mem || true; \
                    """
                    for node in nodes[1:]:
                        add_resources_cmd = add_resources_cmd + "oarnodesetting -a -h {node} -p host={node} -p cpu=1 -p core=4 -p cpuset=0 -p mem=16; \\\n".format(node=node.address)

                    add_resources = SshProcess(add_resources_cmd, nodes[0],
                                               connection_params={'user': '******'})
                    add_resources.run()

                    if add_resources.ok:
                        logger.info("oar is now configured!")
                    else:
                        raise RuntimeError("error in the OAR configuration: Abort!")

                # TODO backup de la config de OAR

                # Do the replay
                logger.info('begining the replay')
                while len(self.sweeper.get_remaining()) > 0:
                    combi = self.sweeper.get_next()
                    workload_file = os.path.basename(combi['workload_filename'])
                    oar_replay = SshProcess(script_path + "/oar_replay.py " +
                                            combi['workload_filename'] + " " +
                                            self.result_dir + "  oar_gant_" +
                                            workload_file,
                                            nodes[0])
                    oar_replay.stdout_handlers.append(self.result_dir + '/' +
                                                      workload_file + '.out')
                    logger.info("replaying workload: {}".format(combi))
                    oar_replay.run()
                    if oar_replay.ok:
                        logger.info("Replay workload OK: {}".format(combi))
                        self.sweeper.done(combi)
                    else:
                        logger.info("Replay workload NOT OK: {}".format(combi))
                        self.sweeper.cancel(combi)
                        raise RuntimeError("error in the OAR replay: Abort!")

            except:
                traceback.print_exc()
                ipdb.set_trace()

            finally:
                if is_a_test:
                    ipdb.set_trace()
                if reservation_job_id is None:
                    logger.info("delete job: {}".format(jobs))
                    oardel(jobs)
    def run(self):
        """Run the experiment"""
        already_configured = self.options.already_configured
        reservation_job_id = int(self.options.reservation_id) \
            if self.options.reservation_id is not None else None
        is_a_test = self.options.is_a_test

        if is_a_test:
            logger.warn('THIS IS A TEST! This run will use only a few '
                        'resources')

        # make the result folder writable for all
        os.chmod(self.result_dir, 0o777)
        # Import configuration
        with open(self.args[0]) as config_file:
            config = json.load(config_file)
        # backup configuration
        copy(self.args[0], self.result_dir)

        site = config["grid5000_site"]
        resources = config["resources"]
        nb_experiment_nodes = config["nb_experiment_nodes"]
        walltime = str(config["walltime"])
        env_name = config["kadeploy_env_name"]
        workloads = config["workloads"]
        # check if workloads exists (Suppose that the same NFS mount point
        # is present on the remote and the local environment
        for workload_file in workloads:
            with open(workload_file):
                pass
            # copy the workloads files to the results dir
            copy(workload_file, self.result_dir)

        # define the workloads parameters
        self.parameters = {'workload_filename': workloads}
        logger.info('Workloads: {}'.format(workloads))

        # define the iterator over the parameters combinations
        self.sweeper = ParamSweeper(os.path.join(self.result_dir, "sweeps"),
                                    sweep(self.parameters))

        # Due to previous (using -c result_dir) run skip some combination
        logger.info('Skipped parameters:' +
                    '{}'.format(str(self.sweeper.get_skipped())))

        logger.info('Number of parameters combinations {}'.format(
            str(len(self.sweeper.get_remaining()))))
        logger.info('combinations {}'.format(str(
            self.sweeper.get_remaining())))

        if reservation_job_id is not None:
            jobs = [(reservation_job_id, site)]
        else:
            jobs = oarsub([(OarSubmission(resources=resources,
                                          job_type='deploy',
                                          walltime=walltime), site)])
        job_id, site = jobs[0]
        if job_id:
            try:
                logger.info("waiting job start %s on %s" % (job_id, site))
                wait_oar_job_start(job_id,
                                   site,
                                   prediction_callback=prediction_callback)
                logger.info("getting nodes of %s on %s" % (job_id, site))
                nodes = get_oar_job_nodes(job_id, site)
                # sort the nodes
                nodes = sorted(nodes, key=lambda node: node.address)
                # get only the necessary nodes under the switch
                if nb_experiment_nodes > len(nodes):
                    raise RuntimeError('The number of given node in the '
                                       'reservation ({}) do not match the '
                                       'requested resources '
                                       '({})'.format(len(nodes),
                                                     nb_experiment_nodes))
                nodes = nodes[:nb_experiment_nodes]
                logger.info("deploying nodes: {}".format(str(nodes)))
                deployed, undeployed = deploy(
                    Deployment(nodes, env_name=env_name),
                    check_deployed_command=already_configured)
                if undeployed:
                    logger.warn("NOT deployed nodes: {}".format(
                        str(undeployed)))
                    raise RuntimeError('Deployement failed')

                if not already_configured:

                    # install OAR
                    install_cmd = "apt-get update; apt-get install -y "
                    node_packages = "oar-node"
                    logger.info("installing OAR nodes: {}".format(
                        str(nodes[1:])))
                    install_oar_nodes = Remote(
                        install_cmd + node_packages,
                        nodes[1:],
                        connection_params={'user': '******'})
                    install_oar_nodes.start()

                    server_packages = (
                        "oar-server oar-server-pgsql oar-user "
                        "oar-user-pgsql postgresql python3-pip "
                        "libjson-perl postgresql-server-dev-all")
                    install_oar_sched_cmd = """
                    mkdir -p /opt/oar_sched; \
                    cd /opt/oar_sched; \
                    git clone https://github.com/oar-team/oar3.git; \
                    cd oar3; \
                    git checkout dce942bebc2; \
                    pip3 install -e .; \
                    cd /usr/lib/oar/schedulers; \
                    ln -s /usr/local/bin/kamelot; \
                    pip3 install psycopg2
                    """
                    logger.info("installing OAR server node: {}".format(
                        str(nodes[0])))
                    install_master = SshProcess(
                        install_cmd + server_packages + ";" +
                        install_oar_sched_cmd,
                        nodes[0],
                        connection_params={'user': '******'})
                    install_master.run()
                    install_oar_nodes.wait()

                    if not install_master.ok:
                        Report(install_master)

                    configure_oar_cmd = """
                    sed -i \
                        -e 's/^\(DB_TYPE\)=.*/\\1="Pg"/' \
                        -e 's/^\(DB_HOSTNAME\)=.*/\\1="localhost"/' \
                        -e 's/^\(DB_PORT\)=.*/\\1="5432"/' \
                        -e 's/^\(DB_BASE_PASSWD\)=.*/\\1="oar"/' \
                        -e 's/^\(DB_BASE_LOGIN\)=.*/\\1="oar"/' \
                        -e 's/^\(DB_BASE_PASSWD_RO\)=.*/\\1="oar_ro"/' \
                        -e 's/^\(DB_BASE_LOGIN_RO\)=.*/\\1="oar_ro"/' \
                        -e 's/^\(SERVER_HOSTNAME\)=.*/\\1="localhost"/' \
                        -e 's/^\(SERVER_PORT\)=.*/\\1="16666"/' \
                        -e 's/^\(LOG_LEVEL\)\=\"2\"/\\1\=\"3\"/' \
                        -e 's#^\(LOG_FILE\)\=.*#\\1="{result_dir}/oar.log"#' \
                        -e 's/^\(JOB_RESOURCE_MANAGER_PROPERTY_DB_FIELD\=\"cpuset\".*\)/#\\1/' \
                        -e 's/^#\(CPUSET_PATH\=\"\/oar\".*\)/\\1/' \
                        -e 's/^\(FINAUD_FREQUENCY\)\=.*/\\1="0"/' \
                        /etc/oar/oar.conf
                    """.format(result_dir=self.result_dir)
                    configure_oar = Remote(configure_oar_cmd,
                                           nodes,
                                           connection_params={'user': '******'})
                    configure_oar.run()
                    logger.info("OAR is configured on all nodes")

                    # Configure server
                    create_db = "oar-database --create --db-is-local"
                    config_oar_sched = (
                        "oarnotify --remove-queue default;"
                        "oarnotify --add-queue default,1,kamelot")
                    start_oar = "systemctl start oar-server.service"
                    logger.info("configuring OAR database: {}".format(
                        str(nodes[0])))
                    config_master = SshProcess(
                        create_db + ";" + config_oar_sched + ";" + start_oar,
                        nodes[0],
                        connection_params={'user': '******'})
                    config_master.run()

                    # propagate SSH keys
                    logger.info("configuring OAR SSH")
                    oar_key = "/tmp/.ssh"
                    Process('rm -rf ' + oar_key).run()
                    Process(
                        'scp -o BatchMode=yes -o PasswordAuthentication=no '
                        '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null '
                        '-o ConnectTimeout=20 -rp -o User=root ' +
                        nodes[0].address + ":/var/lib/oar/.ssh"
                        ' ' + oar_key).run()
                    # Get(nodes[0], "/var/lib/oar/.ssh", [oar_key], connection_params={'user': '******'}).run()
                    Put(nodes[1:], [oar_key],
                        "/var/lib/oar/",
                        connection_params={
                            'user': '******'
                        }).run()
                    add_resources_cmd = """
                    oarproperty -a cpu || true; \
                    oarproperty -a core || true; \
                    oarproperty -c -a host || true; \
                    oarproperty -a mem || true; \
                    """
                    for node in nodes[1:]:
                        add_resources_cmd = add_resources_cmd + "oarnodesetting -a -h {node} -p host={node} -p cpu=1 -p core=4 -p cpuset=0 -p mem=16; \\\n".format(
                            node=node.address)

                    add_resources = SshProcess(
                        add_resources_cmd,
                        nodes[0],
                        connection_params={'user': '******'})
                    add_resources.run()

                    if add_resources.ok:
                        logger.info("oar is now configured!")
                    else:
                        raise RuntimeError(
                            "error in the OAR configuration: Abort!")

                # TODO backup de la config de OAR

                # Do the replay
                logger.info('begining the replay')
                while len(self.sweeper.get_remaining()) > 0:
                    combi = self.sweeper.get_next()
                    workload_file = os.path.basename(
                        combi['workload_filename'])
                    oar_replay = SshProcess(
                        script_path + "/oar_replay.py " +
                        combi['workload_filename'] + " " + self.result_dir +
                        "  oar_gant_" + workload_file, nodes[0])
                    oar_replay.stdout_handlers.append(self.result_dir + '/' +
                                                      workload_file + '.out')
                    logger.info("replaying workload: {}".format(combi))
                    oar_replay.run()
                    if oar_replay.ok:
                        logger.info("Replay workload OK: {}".format(combi))
                        self.sweeper.done(combi)
                    else:
                        logger.info("Replay workload NOT OK: {}".format(combi))
                        self.sweeper.cancel(combi)
                        raise RuntimeError("error in the OAR replay: Abort!")

            except:
                traceback.print_exc()
                ipdb.set_trace()

            finally:
                if is_a_test:
                    ipdb.set_trace()
                if reservation_job_id is None:
                    logger.info("delete job: {}".format(jobs))
                    oardel(jobs)
Exemple #9
0
    def workflow(self, comb, host, comb_dir):
        """ """
        comb_ok = False
        thread_name = style.Thread(str(host).split('.')[0]) + ': '
        logger.info(thread_name + 'Starting combination ' + slugify(comb))
        if 'parapluie' in str(host):
            nb_proc = 24
        elif 'paranoia' in str(host):
            nb_proc = 20
        elif 'parapide' in str(host):
            nb_proc = 8
        else:
            nb_proc = 16

        try:
            self.export = "source ~/aevol_binary/intel/linux/mkl/bin/mklvars.sh intel64; "

            bucketname = self.working_dir + '/raevol_5_mut_lat/' + slugify(
                comb) + '/'

            logger.info(thread_name + "Killing other RAevol")

            killa = Remote("killall -9 aevol_run", [host])
            for killp in killa.processes:
                killp.ignore_error = True
            killa.run()

            if os.path.isdir(bucketname) and os.path.exists(bucketname +
                                                            '/last_gener.txt'):
                logger.info(thread_name + "Resuming AEVOL from NFS backup")

                gen_file = open(bucketname + '/last_gener.txt', 'r')

                last_gen = gen_file.read()

                if int(last_gen) < 300000:
                    logger.info(thread_name + "Resuming AEVOL Run from " +
                                str(int(last_gen)))
                    rem = Remote(
                        self.export + 'cd ' + bucketname +
                        '; /home/jorouzaudcornabas/aevol_binary/aevol/src/aevol_run -p '
                        + str(nb_proc) + ' -e 300000 -r ' + last_gen +
                        ' >> aevol_run.log', [host],
                        process_args={
                            'default_stdout_handler': False,
                            'default_stderr_handler': False
                        }).run()
                    if rem.ok:
                        comb_ok = True
                else:
                    comb_ok = True
            else:
                Remote('mkdir -p ' + bucketname, [host]).run()

                param_file = '/home/jorouzaudcornabas/aevol_binary/aevol/execo/mut_lat/param_tmpl.in'

                logger.info(thread_name + 'Generate config file ' + param_file)

                f_template = open(param_file)
                fd, outfile = mkstemp(dir='/tmp/',
                                      prefix=slugify(comb) + '_param')
                f = os.fdopen(fd, 'w')

                for line in f_template:
                    if 'CONFIGURE_ENVIRONMENT_VALUES' in line:
                        if comb['env'] == 'const':
                            line = line.replace('CONFIGURE_ENVIRONMENT_VALUES',
                                                'NB_ENVIRONMENTS 1')
                            f.write(line)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.8   0.05' +
                                    os.linesep)
                        elif comb['env'] == 'lat_3':
                            line = line.replace('CONFIGURE_ENVIRONMENT_VALUES',
                                                'NB_ENVIRONMENTS 2')
                            f.write(line)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.8   0.05' +
                                    os.linesep)
                        elif comb['env'] == 'lat_all':
                            line = line.replace('CONFIGURE_ENVIRONMENT_VALUES',
                                                'NB_ENVIRONMENTS 16')
                            f.write(line)

                            #const

                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  1  0.5   0.8   0.05' +
                                    os.linesep)

                            # 1

                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  2  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  3  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  3  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  3  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  3  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  4  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  4  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  4  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  4  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  5  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  5  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  5  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  5  0.5   0.85  0.05' +
                                    os.linesep)

                            # 2

                            f.write('ENV_ADD_GAUSSIAN  6  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  6  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  6  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  6  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  7  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  7  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  7  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  7  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  8  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  8  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  8  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  8  0.5   0.85  0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  9  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  9  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  9  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  9  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  10  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  10  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  10  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  10  0.5   0.85  0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  11  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  11  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  11  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  11  0.5   0.85  0.05' +
                                    os.linesep)

                            # 3

                            f.write('ENV_ADD_GAUSSIAN  12  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  12  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  12  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  12  0.5   0.8   0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  13  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  13  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  13  0.5   0.6   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  13  0.5   0.85  0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  14  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  14  0.5   0.4   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  14  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  14  0.5   0.85  0.05' +
                                    os.linesep)

                            f.write('ENV_ADD_GAUSSIAN  15  0.5   0.2   0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  15  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  15  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  15  0.5   0.85  0.05' +
                                    os.linesep)

                            # 4

                            f.write('ENV_ADD_GAUSSIAN  16  0.5   0.25  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  16  0.5   0.45  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  16  0.5   0.65  0.05' +
                                    os.linesep)
                            f.write('ENV_ADD_GAUSSIAN  16  0.5   0.85  0.05' +
                                    os.linesep)
                    elif 'CONFIGURE_SIGNAL_VALUES' in line:
                        if comb['env'] == 'const':
                            line = line.replace('CONFIGURE_SIGNAL_VALUES', '')
                            f.write(line)

                        elif comb['env'] == 'lat_3':
                            line = line.replace(
                                'CONFIGURE_SIGNAL_VALUES',
                                'CREATE_SIGNAL h0 h0 h0 w0 m0 m1 m0 h1 h0 m0 h0 m1 h1 w0 h1 h0 m1 h1 m0 w0 w0 m0 w0 h0 h1 m1 w0 m0 m1 m0 w0 h1 h0 m0 h0 m1 h1 w0 h0 w0 m0 m1 m0 w0 h1 h0 w0 w0 h1'
                            )
                            f.write(line)

                            f.write('ENV_ADD_SIGNAL 2 1' + os.linesep)
                        elif comb['env'] == 'lat_all':
                            line = line.replace(
                                'CONFIGURE_SIGNAL_VALUES',
                                'CREATE_SIGNAL h0 w0 h1 m1 w0 h1 m0 h0 h1 w0 h0 m1 h1 h1 m1 m0 h0 w0 h1 m1 w0 h1 m0 h0 h1 w0 h0 m1 h1 h1 m1 m0 h1 m0 m1'
                            )
                            f.write(line)
                            f.write(
                                'CREATE_SIGNAL m0 h0 m1 h1 m1 w0 m0 m1 m0 h0 m1 h1 w0 h0 h0 h1 m1 m0 h1 w0 h1 h0 m1 h1 m0 w0 w0 m0 m1 w0 w0 h1 h0 w0 h1 h0 h0 m0 h0 w0 h0 m1 m0 w0 h1 w0 w0 h1 m0'
                                + os.linesep)
                            f.write(
                                'CREATE_SIGNAL h0 h0 h0 w0 m0 m1 m0 h1 h0 m0 h0 m1 h1 w0 h1 h0 m1 h1 m0 w0 w0 m0 w0 h0 h1 m1 w0 m0 m1 m0 w0 h1 h0 m0 h0 m1 h1 w0 h0 w0 m0 m1 m0 w0 h1 h0 w0 w0 h1'
                                + os.linesep)
                            f.write(
                                'CREATE_SIGNAL h1 h1 m0 w0 w0 h1 m1 h1 h1 m1 m0 w0 m1 m0 m0 w0 m0 h0 m0 h0 w0 h0 m0 h0 h1 m1 h0 h1 w0 h0 h1 m1 h1 m1 m0'
                                + os.linesep)

                            f.write('ENV_ADD_SIGNAL 2 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 3 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 4 3' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 5 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 6 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 6 2' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 7 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 7 3' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 8 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 8 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 9 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 9 3' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 10 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 10 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 11 3' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 11 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 12 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 12 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 12 3' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 13 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 13 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 13 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 14 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 14 3' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 14 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 15 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 15 3' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 15 4' + os.linesep)

                            f.write('ENV_ADD_SIGNAL 16 1' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 16 2' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 16 3' + os.linesep)
                            f.write('ENV_ADD_SIGNAL 16 4' + os.linesep)
                    else:
                        line = line.replace('SEED_NUMBER', str(comb['seed']))
                        line = line.replace('MUTATION_RATE_VALUE',
                                            comb['mutation'])
                        line = line.replace('SELECTION_PRESSURE',
                                            str(comb['selection']))
                        f.write(line)

                f_template.close()
                f.close()

                put_file = Put([host], [outfile],
                               remote_location=bucketname).run()
                if not put_file.ok:
                    exit()

                os.remove(outfile)

                Remote(
                    'cd ' + bucketname + '; cp ' + outfile.split('/')[-1] +
                    ' param.in; cp /home/jorouzaudcornabas/aevol_binary/aevol/execo/mut_lat/binding_matrix.rae .',
                    [host]).run()

                logger.info(thread_name + "Launching AEVOL Create")
                Remote(
                    self.export + 'cd ' + bucketname +
                    '; /home/jorouzaudcornabas/aevol_binary/aevol/src/aevol_create > aevol_create.log',
                    [host],
                    process_args={
                        'default_stdout_handler': False,
                        'default_stderr_handler': False
                    }).run()

                logger.info(thread_name + "Launching AEVOL Run")
                rem = Remote(
                    self.export + 'cd ' + bucketname +
                    '; /home/jorouzaudcornabas/aevol_binary/aevol/src/aevol_run -p '
                    + str(nb_proc) + ' -n 300000 > aevol_run.log', [host],
                    process_args={
                        'default_stdout_handler': False,
                        'default_stderr_handler': False
                    }).run()
                if rem.ok:
                    comb_ok = True

            logger.info(thread_name + 'Get results ' + comb_dir + "/" +
                        slugify(comb))

        #try:
        #os.mkdir(comb_dir + "/" + slugify(comb))
        #except:
        #logger.warning(thread_name +
        #'%s already exists, removing existing files', comb_dir + "/" + slugify(comb))

#shutil.rmtree(comb_dir+ "/" + slugify(comb))
#try:
#os.mkdir(comb_dir + "/" + slugify(comb))
#except:
#logger.warning(thread_name +
#'%s already exists, recreating directory', comb_dir + "/" + slugify(comb))

#get_results = Get([host], [bucketname+ "/aevol_create.log", bucketname+ "/aevol_run.log", bucketname+'/stats/'],
#local_location=comb_dir + "/" + slugify(comb)).run()

#for p in get_results.processes:
#if not p.ok:
#logger.error(thread_name +
#': Unable to retrieve the files for combination %s',
#slugify(comb))
#exit()

        finally:
            if comb_ok:
                self.sweeper.done(comb)
                # shutil.rmtree(bucketname)
                logger.info(thread_name + ': ' + slugify(comb) + \
                             ' has been done')
            else:
                self.sweeper.cancel(comb)
                logger.warning(thread_name + ': ' + slugify(comb) + \
                            ' has been canceled')
        logger.info(style.step('%s Remaining'),
                    len(self.sweeper.get_remaining()))