Ejemplo n.º 1
0
def make_searches(comm, wuid, bhstates):
    num_in_buffer = comm.get_queue_size() * config.comm_job_bundle_size
    logger.info("%i searches in the queue" % num_in_buffer)
    num_to_make = max(config.comm_job_buffer_size - num_in_buffer, 0)
    logger.info("Making %i searches" % num_to_make)

    if num_to_make == 0:
        return wuid

    searches = []

    invariants = {}

    f = open(os.path.join(config.path_root, 'pos.con'))
    initial_react = StringIO(f.read())
    f.close()

    #invariants['reactant_passed.con']=reactIO

    ini_changes = [('Main', 'job', 'basin_hopping')]
    #invariants['config_passed.ini'] = io.modify_config(config.config_path, ini_changes)
    #invariants['reactant_passed.con']  = reactIO

    #Merge potential files into invariants
    invariants = dict(invariants, **io.load_potfiles(config.path_pot))

    searches = []
    for i in range(num_to_make):
        search = {}
        search['id'] = "%d" % wuid
        ini_changes = [('Main', 'random_seed',
                        str(int(numpy.random.random() * 2**32)))]

        #if config.bh_random_structure:
        #    reactIO = StringIO()
        #    io.savecon(reactIO, rs.generate())
        #else:
        #    reactIO = initial_react

        if config.bh_initial_state_pool_size == 0:
            reactIO = initial_react
        elif config.bh_initial_state_pool_size > 0:
            reactIO = bhstates.get_random_minimum()
            if reactIO is None:
                reactIO = initial_react
        else:
            logger.fatal("Initial state pool size negative")
            sys.exit(1)

        search['pos.con'] = reactIO
        search['config.ini'] = io.modify_config(config.config_path,
                                                ini_changes)
        searches.append(search)
        wuid += 1

    comm.submit_jobs(searches, invariants)
    logger.info(str(num_to_make) + " searches created")
    return wuid
Ejemplo n.º 2
0
def make_searches(comm, wuid, bhstates):
    num_in_buffer = comm.get_queue_size()*config.comm_job_bundle_size
    logger.info("%i searches in the queue" % num_in_buffer)
    num_to_make = max(config.comm_job_buffer_size - num_in_buffer, 0)
    logger.info("Making %i searches" % num_to_make)

    if num_to_make == 0:
        return wuid

    searches = []

    invariants = {}

    f = open(os.path.join(config.path_root, 'pos.con'))
    initial_react = StringIO(f.read())
    f.close()

    #invariants['reactant_passed.con']=reactIO

    ini_changes = [ ('Main', 'job', 'basin_hopping') ]
    #invariants['config_passed.ini'] = io.modify_config(config.config_path, ini_changes)
    #invariants['reactant_passed.con']  = reactIO

    #Merge potential files into invariants
    invariants = dict(invariants,  **io.load_potfiles(config.path_pot))

    searches = []
    for i in range(num_to_make):
        search = {}
        search['id'] = "%d" % wuid
        ini_changes = [ ('Main', 'random_seed', str(int(numpy.random.random()*2**32))) ]

        #if config.bh_random_structure:
        #    reactIO = StringIO()
        #    io.savecon(reactIO, rs.generate())
        #else:
        #    reactIO = initial_react

        if config.bh_initial_state_pool_size == 0:
            reactIO = initial_react
        elif config.bh_initial_state_pool_size > 0:
            reactIO = bhstates.get_random_minimum()
            if reactIO is None:
                reactIO = initial_react
        else:
            logger.fatal("Initial state pool size negative")
            sys.exit(1)

        search['pos.con'] = reactIO
        search['config.ini'] = io.modify_config(config.config_path, ini_changes)
        searches.append(search)
        wuid += 1

    comm.submit_jobs(searches, invariants)
    logger.info( str(num_to_make) + " searches created")
    return wuid
Ejemplo n.º 3
0
def main():
    config.init()

    # Should we have some kind of sanity-check module/function somewhere?
    fnames = [
        os.path.basename(f)
        for f in glob.glob(os.path.join(config.path_pot, '*'))
    ]
    if 'pos.con' in fnames:
        print "WARNING: pos.con found in potfiles path. Are you sure you want this? It will overwrite the pos.con in the calculation directory when your jobs are being run."

    job = config.main_job.lower()
    if job == 'akmc':
        akmc.main()
    elif job == 'parallel_replica' or job == 'unbiased_parallel_replica':
        parallelreplica.main()
    elif job == 'basin_hopping':
        basinhopping.main()
    elif job == 'escape_rate':
        escaperate.main()
    else:
        import communicator
        import shutil
        config.path_scratch = config.path_root
        comm = communicator.get_communicator()

        invariants = {}

        # Merge potential files into invariants
        invariants = dict(invariants, **io.load_potfiles(config.path_pot))

        job = {}
        files = [f for f in os.listdir(".") if os.path.isfile(f)]
        for f in files:
            fh = open(f)
            if (len(f.split('.')) > 1):
                #f_passed = f.split('.')[0] + "_passed." + f.split('.')[1]
                f_passed = f.split('.')[0] + "." + f.split('.')[1]
                job[f_passed] = StringIO(fh.read())
            fh.close()
        job["id"] = "output"
        if os.path.isdir("output_old"):
            shutil.rmtree("output_old")
        if os.path.isdir("output"):
            shutil.move("output", "output_old")
        comm.submit_jobs([job], invariants)
Ejemplo n.º 4
0
def main():
    config.init()

    # Should we have some kind of sanity-check module/function somewhere?
    fnames = [os.path.basename(f) for f in glob.glob(os.path.join(config.path_pot, '*'))]
    if 'pos.con' in fnames:
        print "WARNING: pos.con found in potfiles path. Are you sure you want this? It will overwrite the pos.con in the calculation directory when your jobs are being run."

    job = config.main_job.lower()
    if job == 'akmc':
        akmc.main()
    elif job == 'parallel_replica' or job == 'unbiased_parallel_replica':
        parallelreplica.main()
    elif job == 'basin_hopping':
        basinhopping.main()
    elif job == 'escape_rate':
        escaperate.main()
    else:
        import communicator
        import shutil
        config.path_scratch = config.path_root
        comm = communicator.get_communicator()

        invariants = {}

        # Merge potential files into invariants
        invariants = dict(invariants, **io.load_potfiles(config.path_pot))

        job = {}
        files = [ f for f in os.listdir(".") if os.path.isfile(f) ]
        for f in files:
            fh = open(f)
            if(len(f.split('.')) > 1):
                #f_passed = f.split('.')[0] + "_passed." + f.split('.')[1]
		f_passed = f.split('.')[0] + "." + f.split('.')[1]
                job[f_passed] = StringIO(fh.read())
            fh.close()
        job["id"] = "output"
        if os.path.isdir("output_old"):
            shutil.rmtree("output_old")
        if os.path.isdir("output"):
            shutil.move("output", "output_old")
        comm.submit_jobs([job], invariants)
Ejemplo n.º 5
0
def make_searches(comm, current_state, wuid):
    reactant = current_state.get_reactant()
    #XXX:what if the user changes the bundle size?
    num_in_buffer = comm.get_queue_size() * config.comm_job_bundle_size
    logger.info("Queue contains %i searches" % num_in_buffer)
    num_to_make = max(config.comm_job_buffer_size - num_in_buffer, 0)
    logger.info("Making %i searches" % num_to_make)

    if num_to_make == 0:
        return wuid

    searches = []

    invariants = {}

    reactIO = StringIO()
    io.savecon(reactIO, reactant)

    # Merge potential files into invariants
    #XXX: Should this be in our "science" maybe the communicator should
    #     handle this.
    invariants = dict(invariants, **io.load_potfiles(config.path_pot))

    searches = []
    for i in range(num_to_make):
        search = {}
        search['id'] = "%d_%d" % (current_state.number, wuid)
        search['pos.con'] = reactIO
        ini_changes = [
            ('Main', 'job', 'parallel_replica'),
            ('Main', 'random_seed', str(int(numpy.random.random() * 10**9))),
        ]
        search['config.ini'] = io.modify_config(config.config_path,
                                                ini_changes)
        searches.append(search)
        wuid += 1

    comm.submit_jobs(searches, invariants)
    logger.info("Created " + str(num_to_make) + " searches")
    return wuid
Ejemplo n.º 6
0
def make_searches(comm, current_state, wuid):
    reactant = current_state.get_reactant()
    #XXX:what if the user changes the bundle size?
    num_in_buffer = comm.get_queue_size()*config.comm_job_bundle_size
    logger.info("Queue contains %i searches" % num_in_buffer)
    num_to_make = max(config.comm_job_buffer_size - num_in_buffer, 0)
    logger.info("Making %i searches" % num_to_make)
    
    if num_to_make == 0:
        return wuid

    searches = []

    invariants = {}

    reactIO = StringIO()
    io.savecon(reactIO, reactant)

    # Merge potential files into invariants
    #XXX: Should this be in our "science" maybe the communicator should
    #     handle this.
    invariants = dict(invariants, **io.load_potfiles(config.path_pot))

    searches = []
    for i in range(num_to_make):
        search = {}
        search['id'] = "%d_%d" % (current_state.number, wuid)
        search['pos.con']  = reactIO
        ini_changes = [
                        ('Main', 'job', 'parallel_replica'),
                        ('Main', 'random_seed',
                            str(int(numpy.random.random()*10**9))),
                      ]
        search['config.ini'] = io.modify_config(config.config_path, ini_changes)
        searches.append(search)
        wuid += 1

    comm.submit_jobs(searches, invariants)
    logger.info("Created " + str(num_to_make) + " searches")
    return wuid
Ejemplo n.º 7
0
    def make_jobs(self):
        num_unsent = self.comm.get_queue_size()*config.comm_job_bundle_size
        logger.info("Queued %i jobs" % num_unsent)
        num_in_progress = self.comm.get_number_in_progress()*config.comm_job_bundle_size
        logger.info("Running %i jobs" % num_in_progress)
        num_total = num_unsent + num_in_progress
        num_to_make = max(config.comm_job_buffer_size - num_unsent, 0)
        if config.comm_job_max_size != 0:
            if num_total + num_to_make>= config.comm_job_max_size:
                num_to_make = max(0, config.comm_job_max_size - num_total)
                logger.info("Reached max_jobs")
        logger.info("Making %i jobs" % num_to_make)

        if num_to_make == 0:
            return

        jobs = []

        invariants = {}

        # Merge potential files into invariants
        invariants = dict(invariants, **io.load_potfiles(config.path_pot))

        t1 = time()

        # start new searches
        for i in range(num_to_make):
            job = None
            for ps in self.process_searches.values():
                job, job_type = ps.get_job(self.state.number)
                if job:
                    self.wuid_to_search_id[self.wuid] = ps.search_id
                    sid = ps.search_id
                    break

            if not job:
                displacement, mode, disp_type = self.generate_displacement()
                reactant = self.state.get_reactant()
                process_search = ProcessSearch(reactant, displacement, mode,
                                               disp_type, self.search_id, self.state.number)
                self.process_searches[self.search_id] = process_search
                self.wuid_to_search_id[self.wuid] = self.search_id
                job, job_type = process_search.get_job(self.state.number)
                sid = self.search_id
                self.search_id += 1

            job['id'] = "%i_%i" % (self.state.number, self.wuid)
            job_entry = {'type':job_type, 'status':'running'}

            if sid not in self.job_info:
                self.job_info[sid] = {}
            self.job_info[sid][job['id']] = job_entry

            self.wuid += 1
            self.save_wuid()
            jobs.append(job)

        self.save()
        if config.recycling_on and self.nrecycled > 0:
            logger.info("Recycled %i saddles" % self.nrecycled)

        try:
            self.comm.submit_jobs(jobs, invariants)
            t2 = time()
            logger.info( "Created " + str(len(jobs)) + " searches") 
            logger.debug( "Created " + str(num_to_make/(t2-t1)) + " searches per second")
        except:
            logger.exception("Failed to submit searches")
Ejemplo n.º 8
0
    def make_jobs(self):
        #XXX:what if the user changes the bundle size?
        num_in_buffer = self.comm.get_queue_size()*config.comm_job_bundle_size 
        logger.info("Queue contains %i searches" % num_in_buffer)
        num_to_make = max(config.comm_job_buffer_size - num_in_buffer, 0)
        logger.info("Making %i process searches" % num_to_make)

        if num_to_make == 0:
            return

        searches = []

        invariants = {}

        reactIO = StringIO.StringIO()
        io.savecon(reactIO, self.reactant)
        file_permission = os.stat("pos.con").st_mode
        invariants['pos.con'] = (reactIO, file_permission)

        t1 = time()
        if config.saddle_method == 'dynamics' and \
                config.recycling_on and \
                config.disp_moved_only and \
                self.state.number != 0:

            moved_atoms = self.recycler.process_atoms
            mass_weights = self.reactant.mass.copy()
            mass_weights *= config.recycling_mass_weight_factor

            for i in range(len(self.reactant)):
                if i in moved_atoms:
                    mass_weights[i] = self.reactant.mass[i]

            weightsIO = StringIO.StringIO()
            numpy.savetxt(weightsIO, mass_weights)
            file_permission = os.stat("masses.dat").st_mode
            invariants['masses.dat'] = (weightsIO, file_permission)

        # Merge potential files into invariants
        invariants = dict(invariants, **io.load_potfiles(config.path_pot))

        for i in range(num_to_make):
            search = {}
            # The search dictionary contains the following key-value pairs:
            # id - CurrentState_WUID
            # displacement - an atoms object containing the point the saddle search will start at
            # mode - an Nx3 numpy array containing the initial mode 
            search['id'] = "%d_%d" % (self.state.number, self.wuid)
            displacement, mode, disp_type = self.generate_displacement()
            self.job_table.add_row( {'state':self.state.number,
                                     'wuid':self.wuid,
                                     'type':disp_type } )

            ini_changes = [ ('Main', 'job', 'process_search'),
                            ('Main', 'random_seed',
                                str(int(numpy.random.random()*10**9))),
                          ]
            # if we are recycling a saddle, but using "dynamics saddle search" we need
            # to switch to min_mode searches
            if config.saddle_method == 'dynamics' and disp_type != 'dynamics':
                ini_changes.append( ('Saddle Search', 'method', 'min_mode') )

            search['config.ini'] = io.modify_config(config.config_path, ini_changes)

            if displacement:
                dispIO = StringIO.StringIO()
                io.savecon(dispIO, displacement)
                search['displacement.con'] = dispIO
                modeIO = StringIO.StringIO()
                io.save_mode(modeIO, mode)
                search['direction.dat'] = modeIO

            searches.append(search) 
            self.wuid += 1
            # eager write
            self.save_wuid()

        if config.recycling_on and self.nrecycled > 0:
            logger.info("Recycled %i saddles" % self.nrecycled)

        try:
            self.comm.submit_jobs(searches, invariants)
            t2 = time()
            logger.info( "Created " + str(len(searches)) + " searches") 
            logger.debug( "Created " + str(num_to_make/(t2-t1)) + " searches per second")
        except:
            logger.exception("Failed to submit searches")
        self.job_table.write()
Ejemplo n.º 9
0
    def make_jobs(self):
        num_unsent = self.comm.get_queue_size() * config.comm_job_bundle_size
        logger.info("Queued %i jobs" % num_unsent)
        num_in_progress = self.comm.get_number_in_progress(
        ) * config.comm_job_bundle_size
        logger.info("Running %i jobs" % num_in_progress)
        num_total = num_unsent + num_in_progress
        num_to_make = max(config.comm_job_buffer_size - num_unsent, 0)
        if config.comm_job_max_size != 0:
            if num_total + num_to_make >= config.comm_job_max_size:
                num_to_make = max(0, config.comm_job_max_size - num_total)
                logger.info("Reached max_jobs")
        logger.info("Making %i jobs" % num_to_make)

        if num_to_make == 0:
            return

        jobs = []

        invariants = {}

        # Merge potential files into invariants
        invariants = dict(invariants, **io.load_potfiles(config.path_pot))

        t1 = time()

        # start new searches
        for i in range(num_to_make):
            job = None
            for ps in self.process_searches.values():
                job, job_type = ps.get_job(self.state.number)
                if job:
                    self.wuid_to_search_id[self.wuid] = ps.search_id
                    sid = ps.search_id
                    break

            if not job:
                displacement, mode, disp_type = self.generate_displacement()
                reactant = self.state.get_reactant()
                process_search = ProcessSearch(reactant, displacement, mode,
                                               disp_type, self.search_id,
                                               self.state.number)
                self.process_searches[self.search_id] = process_search
                self.wuid_to_search_id[self.wuid] = self.search_id
                job, job_type = process_search.get_job(self.state.number)
                sid = self.search_id
                self.search_id += 1

            job['id'] = "%i_%i" % (self.state.number, self.wuid)
            job_entry = {'type': job_type, 'status': 'running'}

            if sid not in self.job_info:
                self.job_info[sid] = {}
            self.job_info[sid][job['id']] = job_entry

            self.wuid += 1
            self.save_wuid()
            jobs.append(job)

        self.save()
        if config.recycling_on and self.nrecycled > 0:
            logger.info("Recycled %i saddles" % self.nrecycled)

        try:
            self.comm.submit_jobs(jobs, invariants)
            t2 = time()
            logger.info("Created " + str(len(jobs)) + " searches")
            logger.debug("Created " + str(num_to_make / (t2 - t1)) +
                         " searches per second")
        except:
            logger.exception("Failed to submit searches")
Ejemplo n.º 10
0
    def make_jobs(self):
        #XXX:what if the user changes the bundle size?
        num_in_buffer = self.comm.get_queue_size(
        ) * config.comm_job_bundle_size
        logger.info("Queue contains %i searches" % num_in_buffer)
        num_to_make = max(config.comm_job_buffer_size - num_in_buffer, 0)
        logger.info("Making %i process searches" % num_to_make)

        if num_to_make == 0:
            return

        searches = []

        invariants = {}

        reactIO = StringIO.StringIO()
        io.savecon(reactIO, self.reactant)
        file_permission = os.stat("pos.con").st_mode
        invariants['pos.con'] = (reactIO, file_permission)

        t1 = time()
        if config.saddle_method == 'dynamics' and \
                config.recycling_on and \
                config.disp_moved_only and \
                self.state.number != 0:

            moved_atoms = self.recycler.process_atoms
            mass_weights = self.reactant.mass.copy()
            mass_weights *= config.recycling_mass_weight_factor

            for i in range(len(self.reactant)):
                if i in moved_atoms:
                    mass_weights[i] = self.reactant.mass[i]

            weightsIO = StringIO.StringIO()
            numpy.savetxt(weightsIO, mass_weights)
            file_permission = os.stat("masses.dat").st_mode
            invariants['masses.dat'] = (weightsIO, file_permission)

        # Merge potential files into invariants
        invariants = dict(invariants, **io.load_potfiles(config.path_pot))

        for i in range(num_to_make):
            search = {}
            # The search dictionary contains the following key-value pairs:
            # id - CurrentState_WUID
            # displacement - an atoms object containing the point the saddle search will start at
            # mode - an Nx3 numpy array containing the initial mode
            search['id'] = "%d_%d" % (self.state.number, self.wuid)
            displacement, mode, disp_type = self.generate_displacement()
            self.job_table.add_row({
                'state': self.state.number,
                'wuid': self.wuid,
                'type': disp_type
            })

            ini_changes = [
                ('Main', 'job', 'process_search'),
                ('Main', 'random_seed',
                 str(int(numpy.random.random() * 10**9))),
            ]
            # if we are recycling a saddle, but using "dynamics saddle search" we need
            # to switch to min_mode searches
            if config.saddle_method == 'dynamics' and disp_type != 'dynamics':
                ini_changes.append(('Saddle Search', 'method', 'min_mode'))

            search['config.ini'] = io.modify_config(config.config_path,
                                                    ini_changes)

            if displacement:
                dispIO = StringIO.StringIO()
                io.savecon(dispIO, displacement)
                search['displacement.con'] = dispIO
                modeIO = StringIO.StringIO()
                io.save_mode(modeIO, mode)
                search['direction.dat'] = modeIO

            searches.append(search)
            self.wuid += 1
            # eager write
            self.save_wuid()

        if config.recycling_on and self.nrecycled > 0:
            logger.info("Recycled %i saddles" % self.nrecycled)

        try:
            self.comm.submit_jobs(searches, invariants)
            t2 = time()
            logger.info("Created " + str(len(searches)) + " searches")
            logger.debug("Created " + str(num_to_make / (t2 - t1)) +
                         " searches per second")
        except:
            logger.exception("Failed to submit searches")
        self.job_table.write()