def finalize(self): # Add finalize state # Parse remaining procedures # Create a map of "tasks" so that procedures does not have to followed self.data["success"] = True self.data["final_energies"] = {} self.data["minimum_positions"] = {} # # Get lowest energies and positions for k, v in self.data["torsiondrive_state"]["grid_status"].items(): min_pos = int(np.argmin([x[2] for x in v])) key = json.dumps(td_api.grid_id_from_string(k)) self.data["minimum_positions"][key] = min_pos self.data["final_energies"][key] = v[min_pos][2] self.data["optimization_history"] = { json.dumps(td_api.grid_id_from_string(k)): v for k, v in self.data["optimization_history"].items() } # print(self.data["optimization_history"]) # print(self.data["minimum_positions"]) # print(self.data["final_energies"]) # Pop temporaries del self.data["task_map"] del self.data["remaining_tasks"] del self.data["molecule_template"] del self.data["queue_keys"] del self.data["torsiondrive_state"] del self.data["status"] del self.data["required_tasks"] return self.data
def update_output(self): """ Adds data to the TorsionDriveRecord object """ _check_td() from torsiondrive import td_api # # Get lowest energies and positions min_positions = {} final_energy = {} for k, v in self.torsiondrive_state["grid_status"].items(): idx = int(np.argmin([x[2] for x in v])) key = json.dumps(td_api.grid_id_from_string(k)) min_positions[key] = idx final_energy[key] = v[idx][2] history = { json.dumps(td_api.grid_id_from_string(k)): v for k, v in self.optimization_history.items() } self.output = self.output.copy( update={ "status": self.status, "minimum_positions": min_positions, "final_energy_dict": final_energy, "optimization_history": history, }) return True
def submit_optimization_tasks(self, task_dict): _check_td() from torsiondrive import td_api new_tasks = {} task_map = {} for key, geoms in task_dict.items(): task_map[key] = [] for num, geom in enumerate(geoms): # Update molecule packet = json.loads(self.optimization_template) # Construct constraints constraints = json.loads(self.dihedral_template) grid_id = td_api.grid_id_from_string(key) for con_num, k in enumerate(grid_id): constraints[con_num]["value"] = k packet["meta"]["keywords"]["constraints"] = { "set": constraints } # Build new molecule mol = json.loads(self.molecule_template) mol["geometry"] = geom packet["data"] = [mol] task_key = "{}-{}".format(key, num) new_tasks[task_key] = packet task_map[key].append(task_key) self.task_manager.submit_tasks("optimization", new_tasks) self.task_map = task_map
def run_torsiondrive_scan(self): """ Run torsiondrive scan in the following steps: 1. Create json input for torsiondrive 2. Send the json input dictionary to td_api.next_jobs_from_state(), get the next set of jobs 3. If there are no jobs needed, finish and return the lowest energy on each dihedral grid 4. If there are new jobs, run them with geomeTRIC.run_json. 5. Collect the results and put them into new json input dictionary 6. Go back to step 2. """ # step 1 td_state = td_api.create_initial_state( dihedrals=self.dihedrals, grid_spacing=self.grid_spacing, elements=self.elements, init_coords=self.init_coords, dihedral_ranges=self.dihedral_ranges, energy_decrease_thresh=self.energy_decrease_thresh, energy_upper_limit=self.energy_upper_limit, ) while True: # step 2 next_jobs = td_api.next_jobs_from_state(td_state, verbose=True) # step 3 if len(next_jobs) == 0: print("torsiondrive Scan Finished") return td_api.collect_lowest_energies(td_state) # step 4 job_results = collections.defaultdict(list) for grid_id_str, job_geo_list in next_jobs.items(): for job_geo in job_geo_list: dihedral_values = td_api.grid_id_from_string(grid_id_str) # Run geometric geometric_input_dict = self.make_geomeTRIC_input( dihedral_values, job_geo) geometric_output_dict = geometric.run_json.geometric_run_json( geometric_input_dict) # Pull out relevevant data final_result = geometric_output_dict['trajectory'][-1] final_geo = final_result['molecule']['geometry'] final_energy = final_result['properties']['return_energy'] # Note: the results should be appended in the same order as in the inputs # It's not a problem here when running serial for loop job_results[grid_id_str].append( (job_geo, final_geo, final_energy)) # step 5 td_api.update_state(td_state, job_results)
def get_next_jobs(self, next_jobs): """ :param next_jobs: :return: """ new_jobs = td_api.next_jobs_from_state(self.td_state, verbose=True) # Now put them into the queue for grid_id_str, job_geo_list in new_jobs.items(): for job_geo in job_geo_list: dihedral_values = td_api.grid_id_from_string(grid_id_str) next_jobs.put((np.array(job_geo), dihedral_values[0])) return next_jobs
def submit_optimization_tasks(self, task_dict): _check_td() from torsiondrive import td_api new_tasks = {} task_map = {} for key, geoms in task_dict.items(): task_map[key] = [] for num, geom in enumerate(geoms): # Update molecule packet = json.loads(self.optimization_template) # Construct constraints constraints = json.loads(self.dihedral_template) grid_id = td_api.grid_id_from_string(key) for con_num, k in enumerate(grid_id): constraints[con_num]["value"] = k # update existing constraints to support the "extra constraints" feature packet["meta"]["keywords"].setdefault("constraints", {}) packet["meta"]["keywords"]["constraints"].setdefault("set", []) packet["meta"]["keywords"]["constraints"]["set"].extend( constraints) # Build new molecule mol = json.loads(self.molecule_template) mol["geometry"] = geom packet["data"] = [mol] task_key = "{}-{}".format(key, num) new_tasks[task_key] = packet task_map[key].append(task_key) self.task_manager.submit_tasks("optimization", new_tasks) self.task_map = task_map # Update history for key, task_ids in self.task_map.items(): if key not in self.optimization_history: self.optimization_history[key] = [] for task_id in task_ids: self.optimization_history[key].append( self.task_manager.required_tasks[task_id]) self.update_output()
def run_tdrive(self): """The access point to control the class will collect the optimised energies and structures in to the right order""" self.results = {} optimised_energies = self.start_server() # now sort the energies by key for items in sorted(optimised_energies.items()): self.results[items[0]] = [items[1]] # now we can collect the optimised geometries from the state and convert them to ang for key, value in self.td_state['grid_status'].items(): grid_id = td_api.grid_id_from_string(key) for results in value: if self.results[grid_id][0] == results[-1]: self.results[grid_id].append(results[1])
def submit_optimization_tasks(self, job_dict): # Build out all of the new molecules in a flat dictionary flat_map = {} initial_molecule = json.dumps(self.data["molecule_template"]) for v, k in job_dict.items(): for num, geom in enumerate(k): mol = json.loads(initial_molecule) mol["geometry"] = geom flat_map[(v, str(num))] = mol # Add new molecules self.storage_socket.add_molecules(flat_map) # Prepare optimization meta_packet = json.dumps({ "meta": { "procedure": "optimization", "keywords": self.data["optimization_meta"], "program": self.data["optimization_program"], "qc_meta": self.data["qc_meta"] }, }) hook_template = json.dumps({ "document": ("service_queue", self.data["id"]), "updates": [["inc", "remaining_jobs", -1], ["set", "complete_jobs", "$task_id"]] }) job_map = {} full_tasks = [] complete_jobs = {} for key, mol in flat_map.items(): packet = json.loads(meta_packet) # Construct constraints containts = [ tuple(x) + (str(y), ) for x, y in zip( self.data["torsiondrive_meta"]["dihedral_template"], td_api.grid_id_from_string(key[0])) ] packet["meta"]["keywords"]["constraints"] = {"set": containts} packet["data"] = [mol] # Turn packet into a full task task, complete, errors = procedures.get_procedure_input_parser( "optimization")(self.storage_socket, packet, duplicate_id="id") uid = str(uuid.uuid4()) if len(complete): # Job is already complete complete_jobs[uid] = complete[0] else: # Create a hook which will update the complete jobs uid hook = json.loads(hook_template) hook["updates"][-1][1] = "complete_jobs." + uid task[0]["hooks"].append(hook) full_tasks.append(task[0]) job_map[uid] = key # Create data for next round self.data["update_structure"] = { k: len(v) for k, v in job_dict.items() } self.data["job_map"] = job_map self.data["remaining_jobs"] = len(job_map) self.data["complete_jobs"] = complete_jobs # print(json.dumps(required_jobs, indent=2)) # Add tasks to Nanny ret = self.queue_socket.submit_tasks(full_tasks) self.data["queue_keys"] = [x[1] for x in ret["data"]]