Esempio n. 1
0
    def finish_minimization(self, result):
        result1 = self.load_result(self.finished_min1_name)
        result2 = result

        atoms1 = io.loadcon(result1['min.con'])
        atoms2 = io.loadcon(result2['min.con'])

        results_dat1 = io.parse_results(result1['results.dat'])
        results_dat2 = io.parse_results(result2['results.dat'])
        self.data['force_calls_minimization'] += results_dat1['total_force_calls']
        self.data['force_calls_minimization'] += results_dat2['total_force_calls']

        is_reactant = lambda a: atoms.match(a, self.reactant,
                                            config.comp_eps_r,
                                            config.comp_neighbor_cutoff, False)

        tc1 = io.parse_results(result1['results.dat'])['termination_reason']
        tc2 = io.parse_results(result2['results.dat'])['termination_reason']

        termination_reason1 = self.job_termination_reasons['minimization'][tc1]
        termination_reason2 = self.job_termination_reasons['minimization'][tc2]
        if termination_reason1 == 'max_iterations' or termination_reason2 == 'max_iterations':
            self.data['termination_reason'] = 9
            self.data['potential_energy_saddle'] = 0.0
            self.data['potential_energy_reactant'] = 0.0
            self.data['potential_energy_product'] = 0.0
            self.data['barrier_reactant_to_product'] = 0.0
            self.data['barrier_product_to_reactant'] = 0.0
            return

        # Check the connectivity of the process
        if (not is_reactant(atoms1) and not is_reactant(atoms2)) or \
           (is_reactant(atoms1) and is_reactant(atoms2)):
            # Not connected
            self.data['termination_reason'] = 6
            self.data['potential_energy_saddle'] = 0.0
            self.data['potential_energy_reactant'] = 0.0
            self.data['potential_energy_product'] = 0.0
            self.data['barrier_reactant_to_product'] = 0.0
            self.data['barrier_product_to_reactant'] = 0.0
            return
        elif is_reactant(atoms1):
            reactant_results_dat = results_dat1
            product_results_dat = results_dat2
            self.finished_reactant_name = self.finished_min1_name
            self.finished_product_name = self.finished_min2_name
        elif is_reactant(atoms2):
            reactant_results_dat = results_dat2
            product_results_dat = results_dat1
            self.finished_reactant_name = self.finished_min2_name
            self.finished_product_name = self.finished_min1_name

        self.data['potential_energy_reactant'] = reactant_results_dat['potential_energy']
        self.data['potential_energy_product'] = product_results_dat['potential_energy']

        self.data['barrier_reactant_to_product'] = self.data['potential_energy_saddle'] - \
                self.data['potential_energy_reactant'] 
        self.data['barrier_product_to_reactant'] = self.data['potential_energy_saddle'] - \
                self.data['potential_energy_product'] 
Esempio n. 2
0
 def finish_search(self, result):
     results_dat = io.parse_results(result['results.dat'])
     self.data.update(results_dat)
     reactant_energy = results_dat['potential_energy_reactant']
     barrier = results_dat['potential_energy_saddle'] - reactant_energy
     self.data['potential_energy_reactant'] = reactant_energy
     self.data['barrier_reactant_to_product'] = barrier
Esempio n. 3
0
 def finish_search(self, result):
     results_dat = io.parse_results(result['results.dat'])
     self.data.update(results_dat)
     reactant_energy = results_dat['potential_energy_reactant']
     barrier = results_dat['potential_energy_saddle'] - reactant_energy
     self.data['potential_energy_reactant'] = reactant_energy
     self.data['barrier_reactant_to_product'] = barrier
Esempio n. 4
0
def register_results(comm, bhstates):
    logger.info("Registering results")
    if os.path.isdir(config.path_jobs_in):
        shutil.rmtree(config.path_jobs_in)
    os.makedirs(config.path_jobs_in)

    # Function used by communicator to determine whether to discard a result
    def keep_result(name):
        return True

    num_registered = 0

    for result in comm.get_results(config.path_jobs_in, keep_result):
        # The result dictionary contains the following key-value pairs:
        # product.con - an array of strings containing the reactant
        # results.dat - an array of strings containing the results
        # id - wuid

        #result_id = result['id']
        #del result['id']
        result_info = io.parse_results(result['results.dat'])
        if 'minimum_energy' not in result_info:
            continue
        if result_info['termination_reason'] == 0:
            if bhstates.add_state(result, result_info):
                logger.info("New structure with energy %.8e",
                            result_info['minimum_energy'])

            #logger.info("found new structure with energy %.3e", fe)
            #if bhminima.add_minimum(fe, result['product.con'].getvalue()):
            #    logger.info("found new low energy structure with energy %.3e", fe)

        num_registered += 1

    logger.info("%i (result) searches processed", num_registered)
Esempio n. 5
0
def register_results(comm, bhstates):
    logger.info("Registering results")
    if os.path.isdir(config.path_jobs_in):
        shutil.rmtree(config.path_jobs_in)
    os.makedirs(config.path_jobs_in)

    # Function used by communicator to determine whether to discard a result
    def keep_result(name):
        return True

    num_registered = 0

    for result in comm.get_results(config.path_jobs_in, keep_result):
        # The result dictionary contains the following key-value pairs:
        # product.con - an array of strings containing the reactant
        # results.dat - an array of strings containing the results
        # id - wuid

        #result_id = result['id']
        #del result['id']
        result_info = io.parse_results(result['results.dat'])
        if 'minimum_energy' not in result_info:
            continue
        if result_info['termination_reason'] == 0:
            if bhstates.add_state(result, result_info):
                logger.info("New structure with energy %.8e",
                            result_info['minimum_energy'])

            #logger.info("found new structure with energy %.3e", fe)
            #if bhminima.add_minimum(fe, result['product.con'].getvalue()):
            #    logger.info("found new low energy structure with energy %.3e", fe)

        num_registered += 1

    logger.info("%i (result) searches processed", num_registered)
Esempio n. 6
0
def register_results(comm, current_state, states):
    logger.info("Registering results")
    if os.path.isdir(config.path_jobs_in):
        shutil.rmtree(config.path_jobs_in)
    os.makedirs(config.path_jobs_in)

    # Function used by communicator to determine whether to discard a result
    def keep_result(name):
        return True

    transition = None
    num_registered = 0
    speedup = 0
    for result in comm.get_results(config.path_jobs_in, keep_result):
        # The result dictionary contains the following key-value pairs:
        # reactant.con - an array of strings containing the reactant
        # product.con - an array of strings containing the product
        # results.dat - an array of strings containing the results
        # id - StateNumber_WUID
        #
        # The reactant, product, and mode are passed as lines of the files because
        # the information contained in them is not needed for registering results
        state_num = int(result['name'].split("_")[0])

        state = states.get_state(state_num)

        # read in the results
        result['results'] = io.parse_results(result['results.dat'])
        speedup += result['results']['speedup']
        if result['results']['transition_found'] == 1:
            result['results']['transition_time_s'] += state.get_time()
            time = result['results']['transition_time_s']
            process_id = state.add_process(result)
            logger.info("Found transition with time: %.3e s", time)
            if not transition and current_state.number == state.number:
                transition = {'process_id': process_id, 'time': time}
            state.zero_time()
            num_cancelled = comm.cancel_state(state_num)
            logger.info("Cancelled %i workunits from state %i", num_cancelled,
                        state.number)
            break
        else:
            state.inc_time(result['results']['simulation_time_s'])
        num_registered += 1

    logger.info("Processed results: %i", num_registered)
    if num_registered >= 1:
        logger.info("Average speedup: %f", speedup / num_registered)
    return num_registered, transition, speedup
def register_results(comm, current_state, states):
    logger.info("Registering results")
    if os.path.isdir(config.path_jobs_in):
        shutil.rmtree(config.path_jobs_in)
    os.makedirs(config.path_jobs_in)

    # Function used by communicator to determine whether to discard a result
    def keep_result(name):
        return True

    transition = None
    num_registered = 0
    speedup = 0
    for result in comm.get_results(config.path_jobs_in, keep_result):
        # The result dictionary contains the following key-value pairs:
        # reactant.con - an array of strings containing the reactant
        # product.con - an array of strings containing the product
        # results.dat - an array of strings containing the results
        # id - StateNumber_WUID
        #
        # The reactant, product, and mode are passed as lines of the files because
        # the information contained in them is not needed for registering results
        state_num = int(result['name'].split("_")[0])

        state = states.get_state(state_num)

        # read in the results
        result['results'] = io.parse_results(result['results.dat'])
        speedup += result['results']['speedup']
        if result['results']['transition_found'] == 1:
            result['results']['transition_time_s'] += state.get_time()
            time = result['results']['transition_time_s']
            process_id = state.add_process(result)
            logger.info("Found transition with time: %.3e s", time)
            if not transition and current_state.number==state.number:
                transition = {'process_id':process_id, 'time':time}
            state.zero_time()
            num_cancelled = comm.cancel_state(state_num)
            logger.info("Cancelled %i workunits from state %i", 
                        num_cancelled, state.number)
            break
        else:
            state.inc_time(result['results']['simulation_time_s'])
        num_registered += 1

    logger.info("Processed results: %i", num_registered)
    if num_registered >=1:
        logger.info("Average speedup: %f", speedup/num_registered)
    return num_registered, transition, speedup
Esempio n. 8
0
    def process_result(self, result):
        results_dat = io.parse_results(result['results.dat'])
        #XXX: can remove this line now
        result['results.dat'].seek(0)
        job_type = results_dat['job_type']
        termination_code = results_dat['termination_reason']

        self.save_result(result)
        self.finished_jobs.append(result['name'])

        if job_type == 'saddle_search':
            self.data['termination_reason'] = termination_code
            logger.info("Search_id: %i saddle search complete" %
                        self.search_id)
            if termination_code == 0:
                self.job_statuses[job_type] = 'complete'
            else:
                self.job_statuses[job_type] = 'error'
            self.finished_saddle_name = result['name']
            self.finish_search(result)

        elif job_type == 'minimization':
            if self.job_statuses['min1'] == 'running':
                min_name = 'min1'
                min_number = 1
                self.finished_min1_name = result['name']
            else:
                min_name = 'min2'
                min_number = 2
                self.finished_min2_name = result['name']

            self.job_statuses[min_name] = 'complete'
            logger.info("Search_id: %i minimization %i complete" %
                        (self.search_id, min_number))

            if min_number == 2:
                self.finish_minimization(result)

        done = False not in [
            s == 'complete' for s in self.job_statuses.values()
        ]
        if not done:
            done = True in [s == 'error' for s in self.job_statuses.values()]

        if done:
            return self.build_result()
Esempio n. 9
0
    def process_result(self, result):
        results_dat = io.parse_results(result['results.dat'])
        #XXX: can remove this line now
        result['results.dat'].seek(0)
        job_type = results_dat['job_type']
        termination_code = results_dat['termination_reason']

        self.save_result(result)
        self.finished_jobs.append(result['name'])

        if job_type == 'saddle_search':
            self.data['termination_reason'] = termination_code
            logger.info("Search_id: %i saddle search complete" % self.search_id)
            if termination_code == 0:
                self.job_statuses[job_type] = 'complete'
            else:
                self.job_statuses[job_type] = 'error'
            self.finished_saddle_name = result['name']
            self.finish_search(result)

        elif job_type == 'minimization':
            if self.job_statuses['min1'] == 'running':
                min_name = 'min1'
                min_number = 1
                self.finished_min1_name = result['name']
            else:
                min_name = 'min2'
                min_number = 2
                self.finished_min2_name = result['name']

            self.job_statuses[min_name] = 'complete'
            logger.info("Search_id: %i minimization %i complete" % (self.search_id, min_number))

            if min_number == 2:
                self.finish_minimization(result)

        done = False not in  [ s == 'complete' for s in self.job_statuses.values() ] 
        if not done:
            done = True in [ s == 'error' for s in self.job_statuses.values() ]

        if done:
            return self.build_result()
Esempio n. 10
0
    def register_results(self):
        logger.info("Registering results")
        t1 = time()
        if os.path.isdir(config.path_jobs_in):
            try:
                shutil.rmtree(config.path_jobs_in)  
            except (OSError, IOError):
                pass
        if not os.path.isdir(config.path_jobs_in):
            os.makedirs(config.path_jobs_in)

        # Function used by communicator to determine whether to discard a result
        def keep_result(name):
            # note that all processes are assigned to the current state
            state_num = int(name.split("_")[0])
            if self.superbasin:
                return (state_num in self.superbasin.state_dict and
                        self.superbasin.get_confidence() < config.akmc_confidence)
            else:
                return (state_num == self.state.number and
                        self.state.get_confidence() < config.akmc_confidence)

        num_registered = 0
        for result in self.comm.get_results(config.path_jobs_in, keep_result):
            # The result dictionary contains the following key-value pairs:
            # reactant - an array of strings containing the reactant
            # saddle - an atoms object containing the saddle
            # product - an array of strings containing the product
            # mode - an array of strings conatining the mode
            # results - a dictionary containing the key-value pairs in results.dat
            # id - StateNumber_WUID
            #
            # The reactant, product, and mode are passed as lines of the files because
            # the information contained in them is not needed for registering results
            if config.debug_keep_all_results:
                #XXX: We should only do these checks once to speed things up,
                #     but at the same time debug options don't have to be fast
                # save_path = os.path.join(config.path_root, "old_searches")
                # if not os.path.isdir(save_path):
                #    os.mkdir(save_path)
                # shutil.copytree(result_path, os.path.join(save_path, i))
                # XXX: This is currently broken by the new result passing 
                #      scheme. Should it be done in communicator?
                pass
            if len(result) == 0: continue
            state_num = int(result['name'].split("_")[0])
            id = int(result['name'].split("_")[1]) + result['number']
            searchdata_id = "%d_%d" % (state_num, id)
            # Store information about the search into result_data for the
            # search_results.txt file in the state directory.
            try:
                job_type = self.job_table.get_row('wuid', id)['type']
            except TypeError:
                logger.warning("Could not find job type for search %s" 
                               % searchdata_id)
                continue
            result['type'] = job_type
            if job_type is None:
                logger.warning("Could not find search data for search %s" 
                               % searchdata_id)
            else:
                self.job_table.delete_row('wuid', id)
            result['wuid'] = id

            # If we are doing a search for a superbasin the results
            # could be for a different state.
            if self.superbasin:
                try:
                    state = self.superbasin.state_dict[state_num]
                except KeyError:
                    logger.warning("State of job %s is not part of "
                                   "the superbasin" % result['name'])
                    continue
            else:
                state = self.state

            # read in the results
            result['results'] = io.parse_results(result['results.dat'])
            if result['results']['termination_reason'] == 0:
                state.add_process(result, self.superbasin)
            else:
                state.register_bad_saddle(result, config.debug_keep_bad_saddles, superbasin=self.superbasin)
            num_registered += 1

            if ((self.superbasin and self.superbasin.get_confidence() >= config.akmc_confidence) or
                (not self.superbasin and self.state.get_confidence() >= config.akmc_confidence)):
                if not config.debug_register_extra_results:
                    break

        # Approximate number of searches received
        tot_searches = len(os.listdir(config.path_jobs_in)) * config.comm_job_bundle_size

        t2 = time()
        logger.info("Processed %i results", num_registered)
        if tot_searches != num_registered:
            logger.info("Discarded approximately %i results" % (tot_searches - num_registered))
        logger.debug("Registered %.1f results per second", (num_registered/(t2-t1)))

        self.job_table.write()
        return num_registered
Esempio n. 11
0
def register_results(comm, current_state, states):
    logger.info("Registering results")
    if os.path.isdir(config.path_jobs_in):
        shutil.rmtree(config.path_jobs_in)
    os.makedirs(config.path_jobs_in)

    # Function used by communicator to determine whether to discard a result
    def keep_result(name):
        return True

    transition = None
    num_registered = 0
    speedup = 0
    number_state = []
    numres = 0
    for result in comm.get_results(config.path_jobs_in, keep_result):
        # The result dictionary contains the following key-value pairs:
        # reactant.con - an array of strings containing the reactant
        # product.con - an array of strings containing the product
        # results.dat - an array of strings containing the results
        # id - StateNumber_WUID
        #
        # The reactant, product, and mode are passed as lines of the files because
        # the information contained in them is not needed for registering results

        state_num = int(result['name'].split("_")[0])
        id = int(result['name'].split("_")[1]) + result['number']

        state = states.get_state(state_num)

        # read in the results
        result['results'] = io.parse_results(result['results.dat'])
        speedup += result['results']['speedup']
        if result['results']['transition_found'] == 1:
            result['results']['transition_time_s'] += state.get_time()
            a = result['results']['potential_energy_product']
            f = open("states/0/end_state_table", "a+")
            lines = f.readlines()
            f.close()
            proc = []
            number_state.append(0)
            count = 0
            state_match = 0
            flag = 0
            product = io.loadcon(result['product.con'])

            for i in range(0, numres):
                product2 = io.loadcon("states/0/procdata/product_%i.con" % i)
                if atoms.match(product, product2, config.comp_eps_r,
                               config.comp_neighbor_cutoff, True):
                    if flag == 0:
                        state_match = number_state[i]
                        number_state[numres] = state_match
                        flag = 1
                        break
            count = 0
            time_to_state = 0
            time_check = 0
            for line in lines[1:]:
                l = line.split()
                proc.append({
                    'state': l[0],
                    'views': l[1],
                    'rate': l[2],
                    'time': l[3]
                })
                if float(l[3]) > time_check:
                    time_check = float(l[3])
                if flag == 0:
                    number_state[numres] = int(l[0]) + 1
                else:
                    if state_match == int(l[0]):
                        proc[count]['views'] = str(int(l[1]) + 1)
                        time_to_state = float(
                            l[3]) + result['results']['transition_time_s']
                        proc[count]['time'] = str(time_to_state)
                        proc[count]['rate'] = str(
                            1 / (time_to_state / float(proc[count]['views'])))
                count += 1

            if flag == 0:
                proc.append({
                    'state':
                    number_state[numres],
                    'views':
                    1,
                    'rate':
                    1 / (float(time_check +
                               result['results']['transition_time_s'])),
                    'time':
                    time_check + result['results']['transition_time_s']
                })

            g = open("states/0/end_state_table", "w")
            g.write('state       views         rate        time \n')
            for j in range(0, len(proc)):
                g.write(str(proc[j]['state']))
                g.write("             ")
                g.write(str(proc[j]['views']))
                g.write("             ")
                g.write(str(proc[j]['rate']))
                g.write("             ")
                g.write(str(proc[j]['time']))
                g.write("\n")
            g.close()
            numres += 1
            time = result['results']['transition_time_s']
            process_id = state.add_process(result)
            logger.info("Found transition with time %.3e", time)
            if not transition and current_state.number == state.number:
                transition = {'process_id': process_id, 'time': time}
            state.zero_time()
        else:
            state.inc_time(result['results']['simulation_time_s'])
        num_registered += 1
    logger.info("Processed %i (result) searches", num_registered)
    if num_registered >= 1:
        logger.info("Average speedup is  %f", speedup / num_registered)
    return num_registered, transition, speedup
Esempio n. 12
0
    def finish_minimization(self, result):
        result1 = self.load_result(self.finished_min1_name)
        result2 = result

        atoms1 = io.loadcon(result1['min.con'])
        atoms2 = io.loadcon(result2['min.con'])

        results_dat1 = io.parse_results(result1['results.dat'])
        results_dat2 = io.parse_results(result2['results.dat'])
        self.data['force_calls_minimization'] += results_dat1[
            'total_force_calls']
        self.data['force_calls_minimization'] += results_dat2[
            'total_force_calls']

        is_reactant = lambda a: atoms.match(
            a, self.reactant, config.comp_eps_r, config.comp_neighbor_cutoff,
            False)

        tc1 = io.parse_results(result1['results.dat'])['termination_reason']
        tc2 = io.parse_results(result2['results.dat'])['termination_reason']

        termination_reason1 = self.job_termination_reasons['minimization'][tc1]
        termination_reason2 = self.job_termination_reasons['minimization'][tc2]
        if termination_reason1 == 'max_iterations' or termination_reason2 == 'max_iterations':
            self.data['termination_reason'] = 9
            self.data['potential_energy_saddle'] = 0.0
            self.data['potential_energy_reactant'] = 0.0
            self.data['potential_energy_product'] = 0.0
            self.data['barrier_reactant_to_product'] = 0.0
            self.data['barrier_product_to_reactant'] = 0.0
            return

        # Check the connectivity of the process
        if (not is_reactant(atoms1) and not is_reactant(atoms2)) or \
           (is_reactant(atoms1) and is_reactant(atoms2)):
            # Not connected
            self.data['termination_reason'] = 6
            self.data['potential_energy_saddle'] = 0.0
            self.data['potential_energy_reactant'] = 0.0
            self.data['potential_energy_product'] = 0.0
            self.data['barrier_reactant_to_product'] = 0.0
            self.data['barrier_product_to_reactant'] = 0.0
            return
        elif is_reactant(atoms1):
            reactant_results_dat = results_dat1
            product_results_dat = results_dat2
            self.finished_reactant_name = self.finished_min1_name
            self.finished_product_name = self.finished_min2_name
        elif is_reactant(atoms2):
            reactant_results_dat = results_dat2
            product_results_dat = results_dat1
            self.finished_reactant_name = self.finished_min2_name
            self.finished_product_name = self.finished_min1_name

        self.data['potential_energy_reactant'] = reactant_results_dat[
            'potential_energy']
        self.data['potential_energy_product'] = product_results_dat[
            'potential_energy']

        self.data['barrier_reactant_to_product'] = self.data['potential_energy_saddle'] - \
                self.data['potential_energy_reactant']
        self.data['barrier_product_to_reactant'] = self.data['potential_energy_saddle'] - \
                self.data['potential_energy_product']
Esempio n. 13
0
class ServerMinModeExplorer(MinModeExplorer):
    def __init__(self, states, previous_state, state, superbasin=None):
        #XXX: need to init somehow
        self.search_id = 0

        self.wuid_to_search_id = {}
        self.process_searches = {}
        self.job_info = {}

        if os.path.isfile("explorer.pickle"):
            f = open("explorer.pickle", "rb")
            tmp_dict = pickle.load(f)
            f.close()
            self.__dict__.update(tmp_dict)

        MinModeExplorer.__init__(self, states, previous_state, state,
                                 superbasin)

    def save(self):
        f = open("explorer.pickle", "w")
        d = self.__dict__.copy()
        del d['states']
        del d['previous_state']
        del d['state']
        del d['comm']
        pickle.dump(d, f, pickle.HIGHEST_PROTOCOL)
        f.close()

        f = open("searches.log", 'w')
        f.write("%9s %13s %11s %10s\n" %
                ("search_id", "job_type", "status", "job_name"))
        for search_id in self.job_info:
            fmt = "%9i %13s %11s %10s\n"
            lines = {'saddle_search': None, 'min1': None, 'min2': None}
            for name, job in self.job_info[search_id].iteritems():
                lines[job['type']] = fmt % (search_id, job['type'],
                                            job['status'], name)

            if lines['saddle_search']:
                f.write(lines['saddle_search'])
            if lines['min1']:
                f.write(lines['min1'])
            else:
                f.write(fmt % (search_id, 'min1', 'not_running', ''))
            if lines['min2']:
                f.write(lines['min2'])
            else:
                f.write(fmt % (search_id, 'min2', 'not_running', ''))
        f.close()

    def explore(self):
        if not os.path.isdir(
                config.path_jobs_in):  #XXX: does this condition ever happen?
            os.makedirs(config.path_jobs_in)
            if self.state.get_confidence(
                    self.superbasin) >= config.akmc_confidence:
                self.process_searches = {}
                self.save()

        MinModeExplorer.explore(self)

    def register_results(self):
        logger.info("Registering results")
        t1 = time()
        if os.path.isdir(config.path_jobs_in):
            try:
                shutil.rmtree(config.path_jobs_in)
            except OSError, msg:
                logger.error("Error cleaning up %s: %s", config.path_jobs_in,
                             msg)
            else:
                os.makedirs(config.path_jobs_in)

        if not os.path.isdir(config.path_incomplete):
            os.makedirs(config.path_incomplete)

        # Function used by communicator to determine whether to keep a result
        def keep_result(name):
            # note that all processes are assigned to the current state
            state_num = int(name.split("_")[0])
            return (state_num == self.state.number and \
                    self.state.get_confidence(self.superbasin) < config.akmc_confidence)

        num_registered = 0
        for result in self.comm.get_results(config.path_jobs_in, keep_result):
            state_num = int(result['name'].split("_")[0])
            # XXX: doesn't this doesn't give the correct id wrt bundling
            id = int(result['name'].split("_")[1]) + result['number']
            searchdata_id = "%d_%d" % (state_num, id)

            search_id = self.wuid_to_search_id[id]
            if search_id not in self.process_searches:
                continue
            self.job_info[search_id][searchdata_id]['status'] = 'complete'
            #logger.info("got result for search_id %i" % search_id)
            final_result = self.process_searches[search_id].process_result(
                result)
            if final_result:
                results_dict = io.parse_results(final_result['results.dat'])
                reason = results_dict['termination_reason']
                if reason == 0:
                    self.state.add_process(final_result)
                else:
                    final_result['wuid'] = id
                    self.state.register_bad_saddle(
                        final_result, config.debug_keep_bad_saddles)
            else:
                ps = self.process_searches[search_id]
                saddle = ps.get_saddle()
                if saddle:
                    barrier = ps.data['barrier_reactant_to_product']
                    if self.state.find_repeat(ps.get_saddle_file(), barrier):
                        self.state.add_process(ps.build_result())
                        del self.process_searches[search_id]
            num_registered += 1
            if self.state.get_confidence(
                    self.superbasin) >= config.akmc_confidence:
                if not config.debug_register_extra_results:
                    break

        # Approximate number of searches received
        tot_searches = len(os.listdir(
            config.path_jobs_in)) * config.comm_job_bundle_size

        t2 = time()
        logger.info("Processed %i results", num_registered)
        if tot_searches != num_registered:
            logger.info("Discarded approximately %i results" %
                        (tot_searches - num_registered))
        logger.debug("Registered %.1f results per second",
                     (num_registered / (t2 - t1)))

        self.save()
        return num_registered
Esempio n. 14
0
    def register_results(self):
        logger.info("Registering results")
        t1 = time()
        if os.path.isdir(config.path_jobs_in):
            try:
                shutil.rmtree(config.path_jobs_in)
            except (OSError, IOError):
                pass
        if not os.path.isdir(config.path_jobs_in):
            os.makedirs(config.path_jobs_in)

        # Function used by communicator to determine whether to discard a result
        def keep_result(name):
            # note that all processes are assigned to the current state
            state_num = int(name.split("_")[0])
            if self.superbasin:
                return (state_num in self.superbasin.state_dict
                        and self.superbasin.get_confidence() <
                        config.akmc_confidence)
            else:
                return (state_num == self.state.number and
                        self.state.get_confidence() < config.akmc_confidence)

        num_registered = 0
        for result in self.comm.get_results(config.path_jobs_in, keep_result):
            # The result dictionary contains the following key-value pairs:
            # reactant - an array of strings containing the reactant
            # saddle - an atoms object containing the saddle
            # product - an array of strings containing the product
            # mode - an array of strings conatining the mode
            # results - a dictionary containing the key-value pairs in results.dat
            # id - StateNumber_WUID
            #
            # The reactant, product, and mode are passed as lines of the files because
            # the information contained in them is not needed for registering results
            if config.debug_keep_all_results:
                #XXX: We should only do these checks once to speed things up,
                #     but at the same time debug options don't have to be fast
                # save_path = os.path.join(config.path_root, "old_searches")
                # if not os.path.isdir(save_path):
                #    os.mkdir(save_path)
                # shutil.copytree(result_path, os.path.join(save_path, i))
                # XXX: This is currently broken by the new result passing
                #      scheme. Should it be done in communicator?
                pass
            if len(result) == 0: continue
            state_num = int(result['name'].split("_")[0])
            id = int(result['name'].split("_")[1]) + result['number']
            searchdata_id = "%d_%d" % (state_num, id)
            # Store information about the search into result_data for the
            # search_results.txt file in the state directory.
            try:
                job_type = self.job_table.get_row('wuid', id)['type']
            except TypeError:
                logger.warning("Could not find job type for search %s" %
                               searchdata_id)
                continue
            result['type'] = job_type
            if job_type is None:
                logger.warning("Could not find search data for search %s" %
                               searchdata_id)
            else:
                self.job_table.delete_row('wuid', id)
            result['wuid'] = id

            # If we are doing a search for a superbasin the results
            # could be for a different state.
            if self.superbasin:
                try:
                    state = self.superbasin.state_dict[state_num]
                except KeyError:
                    logger.warning("State of job %s is not part of "
                                   "the superbasin" % result['name'])
                    continue
            else:
                state = self.state

            # read in the results
            result['results'] = io.parse_results(result['results.dat'])
            if result['results']['termination_reason'] == 0:
                state.add_process(result, self.superbasin)
            else:
                state.register_bad_saddle(result,
                                          config.debug_keep_bad_saddles,
                                          superbasin=self.superbasin)
            num_registered += 1

            if ((self.superbasin and
                 self.superbasin.get_confidence() >= config.akmc_confidence) or
                (not self.superbasin
                 and self.state.get_confidence() >= config.akmc_confidence)):
                if not config.debug_register_extra_results:
                    break

        # Approximate number of searches received
        tot_searches = len(os.listdir(
            config.path_jobs_in)) * config.comm_job_bundle_size

        t2 = time()
        logger.info("Processed %i results", num_registered)
        if tot_searches != num_registered:
            logger.info("Discarded approximately %i results" %
                        (tot_searches - num_registered))
        logger.debug("Registered %.1f results per second",
                     (num_registered / (t2 - t1)))

        self.job_table.write()
        return num_registered
Esempio n. 15
0
def register_results(comm, current_state, states):
    logger.info("Registering results")
    if os.path.isdir(config.path_jobs_in):
        shutil.rmtree(config.path_jobs_in)
    os.makedirs(config.path_jobs_in)
    # Function used by communicator to determine whether to discard a result
    def keep_result(name):
        return True
    transition = None
    num_registered = 0
    speedup = 0
    number_state = []
    numres = 0
    for result in comm.get_results(config.path_jobs_in, keep_result):
        # The result dictionary contains the following key-value pairs:
        # reactant.con - an array of strings containing the reactant
        # product.con - an array of strings containing the product
        # results.dat - an array of strings containing the results
        # id - StateNumber_WUID
        #
        # The reactant, product, and mode are passed as lines of the files because
        # the information contained in them is not needed for registering results
        
        
        state_num = int(result['name'].split("_")[0])
        id = int(result['name'].split("_")[1]) + result['number']

        state = states.get_state(state_num)

        # read in the results
        result['results'] = io.parse_results(result['results.dat'])
        speedup += result['results']['speedup']
        if result['results']['transition_found'] == 1:
            result['results']['transition_time_s'] += state.get_time()
            a = result['results']['potential_energy_product']
            f = open ("states/0/end_state_table","a+")
            lines = f.readlines()
            f.close()
            proc = []
            number_state.append(0)                                      
            count = 0 
            state_match = 0
            flag = 0
            product = io.loadcon (result['product.con'])                            


            for i in range(0, numres):                                                                        
                product2 = io.loadcon ("states/0/procdata/product_%i.con" % i )                                 
                if atoms.match(product, product2,config.comp_eps_r,config.comp_neighbor_cutoff,True):          
                    if flag == 0:                                                                                
                        state_match = number_state[i]
                        number_state[numres] = state_match
                        flag = 1            
                        break
            count = 0 
            time_to_state = 0
            time_check = 0
            for line in lines[1:]:                                                                         
                l = line.split()                                      
                proc.append({'state': l[0], 'views': l[1], 'rate': l[2], 'time': l[3]})  
                if float(l[3]) > time_check:
                    time_check = float(l[3])
                if flag == 0:
                    number_state[numres] = int(l[0])+1
                else:
                    if state_match == int(l[0]):
                        proc[count]['views'] = str(int(l[1]) + 1)
                        time_to_state = float(l[3]) + result['results']['transition_time_s']
                        proc[count]['time'] = str(time_to_state)
                        proc[count]['rate'] = str(1/(time_to_state/float(proc[count]['views'])))
                count += 1


            if flag == 0:
                proc.append({'state': number_state[numres],  'views': 1, 'rate': 1/(float(time_check+result['results']['transition_time_s'])) , 'time': time_check + result['results']['transition_time_s']}) 


            g = open ("states/0/end_state_table","w")
            g.write('state       views         rate        time \n')
            for j in range(0,len(proc)):
                g.write(str(proc[j]['state']))
                g.write("             ")
                g.write(str(proc[j]['views']))
                g.write("             ")
                g.write(str(proc[j]['rate']))
                g.write("             ")
                g.write(str(proc[j]['time']))
                g.write("\n")
            g.close() 
            numres += 1
            time = result['results']['transition_time_s']
            process_id = state.add_process(result)
            logger.info("Found transition with time %.3e", time)
            if not transition and current_state.number==state.number:
                transition = {'process_id':process_id, 'time':time}
            state.zero_time()
        else:
            state.inc_time(result['results']['simulation_time_s'])
        num_registered += 1
    logger.info("Processed %i (result) searches", num_registered)
    if num_registered >=1:
        logger.info("Average speedup is  %f", speedup/num_registered)
    return num_registered, transition, speedup
Esempio n. 16
0
#!/usr/bin/env python
import os
import sys
sys.path.insert(0, "../../")
from fileio import parse_results

test_path = os.path.split(os.path.realpath(__file__))[0]
test_name = os.path.basename(test_path)

retval = os.system("../../client/client > stdout.dat")
if retval:
    print "%s: problem running eon" % test_name
    sys.exit(1)

run_gmin = parse_results('results.dat')['minimum_energy']

global_min = -44.326801

error = abs(global_min-run_gmin)

if error == 0.0:
    print "%s: passed error of %.6e"%(test_name, error)
    sys.exit(0)
else:
    print "%s: failed error of %.6e"%(test_name,error)
    sys.exit(1)