def delete_data(file_id, collection_id=None): if in_orion(): if collection_id is None: raise ValueError("The Collection ID is None") # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = session.get_resource(ShardCollection, collection_id) session.delete_resource(Shard(collection=collection, id=file_id)) else: os.remove(file_id) return True
def write(self, record, port): if not record.has_value(Fields.cycle_id): raise ValueError("Missing the current cycle ID") cycle_id = record.get_value(Fields.cycle_id) if not record.has_value(Fields.prefix_name_field): raise ValueError("System prefix name is missing") prefix_name = record.get_value(Fields.prefix_name_field) name_dataset = prefix_name + '_' + "Recovery_Dataset_" + str(cycle_id - 1) if in_orion(): # Output to database stream = Dataset.create(APISession, name_dataset) job_id = environ.get('ORION_JOB_ID') APISession.tag_resource(stream, "Job " + str(job_id)) APISession.tag_resource(stream, "Gmx_Dataset") stream.write(record) stream.finalize() else: name_dataset += '.oedb' stream = oechem.oeofstream(name_dataset) OEWriteRecord(stream, record, fmt="binary") stream.close()
def upload_data(filename, collection_id=None, shard_name=""): if in_orion(): if collection_id is None: raise ValueError("The Collection ID is None") # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = session.get_resource(ShardCollection, collection_id) shard = try_hard_to_create_shard(collection, filename, name=shard_name) file_id = shard.id shard.close() else: file_id = filename return file_id
def download_file(file_id, filename): if in_orion(): # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) resource = session.get_resource(File, file_id) resource.download_to_file(filename) fn_local = filename else: fn_local = file_id if not os.path.isfile(fn_local): raise IOError( "The trajectory file has not been found: {}".format(fn_local)) return fn_local
def delete_file(file_id): if in_orion(): # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) resource = session.get_resource(File, file_id) session.delete_resource(resource) else: os.remove(file_id) return True
def upload_file(filename, orion_ui_name='OrionFile'): if in_orion(): # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) file_upload = File.upload(session, orion_ui_name, filename) session.tag_resource(file_upload, "Trajectory") job_id = environ.get('ORION_JOB_ID') if job_id: session.tag_resource(file_upload, "Job {}".format(job_id)) file_id = file_upload.id else: file_id = filename return file_id
def __init__(self, record, inplace=True): """ The Initialization function used to create the MDDatarecord object Parameters --------- record: OERecord object The OERecord used to create the MDDatarecord inplace: Bool if True the record will be update in place otherwise a copy of the record will be made """ if inplace: self.rec = record else: self.rec = copy.deepcopy(record) if not self.rec.has_field(Fields.md_stages): self.processed = {} else: stages = self.rec.get_value(Fields.md_stages) self.processed = { stg.get_value(Fields.stage_name): False for stg in stages } if in_orion(): if self.rec.has_field(Fields.collection): self.collection_id = self.rec.get_value(Fields.collection) else: self.collection_id = None self.cwd = tempfile.mkdtemp()
def process(self, record, port): try: if in_orion(): tot_size = 0 for field in record.get_fields(): tot_size += len(record.get_bytes(field)) if tot_size > 100 * 1024 * 1024: raise ValueError( "The record size exceeds the 100 MB: {}".format( get_human_readable(tot_size))) else: self.opt['Logger'].info("Record size: {}".format( get_human_readable(tot_size))) if port == "intake": self.success.emit(record) else: # Fail in port self.failure.emit(record) except Exception as e: print("Failed to complete", str(e), flush=True) self.opt['Logger'].info('Exception {} {}'.format( str(e), self.title)) self.log.error(traceback.format_exc()) return
def get_protein_traj(self): """ This method returns the protein molecule where conformers have been set as trajectory frames Parameters ---------- Returns ------- multi_conformer_protein: OEMol The multi conformer protein """ if not self.rec.has_field(Fields.protein_traj_confs): raise ValueError( "The protein conformer trajectory is not present on the record" ) protein_conf = self.rec.get_value(Fields.protein_traj_confs) if in_orion(): # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) if self.collection_id is None: raise ValueError("The Collection ID is None") collection = session.get_resource(ShardCollection, self.collection_id) shard = session.get_resource(Shard(collection=collection), protein_conf) with TemporaryDirectory() as output_directory: protein_fn = os.path.join(output_directory, MDFileNames.trajectory_conformers) try_hard_to_download_shard(shard, protein_fn) protein_conf = oechem.OEMol() with oechem.oemolistream(protein_fn) as ifs: oechem.OEReadMolecule(ifs, protein_conf) shard.close() return protein_conf
def end(self): if in_orion(): if not self.opt['open']: if self.collection is not None: if self.collection.state == "close": pass else: self.collection.close()
def process(self, record, port): try: if in_orion(): session = APISession if record.has_value(Fields.collection): if self.collection is None: collection_id = record.get_value(Fields.collection) collection = session.get_resource( ShardCollection, collection_id) self.collection = collection if self.opt['open']: if self.collection.state == "open": pass else: self.collection.open() else: if self.collection is None: job_id = environ.get('ORION_JOB_ID') self.collection = ShardCollection.create( session, job_id) job_id = environ.get('ORION_JOB_ID') if job_id: session.tag_resource(self.collection, "Job {}".format(job_id)) record.set_value(Fields.collection, self.collection.id) self.success.emit(record) except Exception as e: print("Failed to complete", str(e), flush=True) self.opt['Logger'].info('Exception {} {}'.format( str(e), self.title)) self.log.error(traceback.format_exc()) self.failure.emit(record) return
def create_collection(self, name): """ This method sets a collection field on the record to be used in Orion Parameters ----------- name: String A string used to identify in the Orion UI the collection Returns ------- boolean : Bool True if the collection creation in Orion was successful otherwise False """ if in_orion(): if self.rec.has_field(Fields.collection): raise ValueError( "Collection field already present on the record") # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = ShardCollection.create(session, name) self.rec.set_value(Fields.collection, collection.id) self.collection_id = collection.id else: return False return True
def delete_parmed(self): """ This method deletes the Parmed object from the record. True is returned if the deletion was successful. Parameters ---------- Returns ------- boolean : Bool True if Parmed object deletion was successful """ if not self.has_parmed: raise ValueError( "The Parmed structure is not present on the record") if in_orion(): if self.collection_id is None: raise ValueError("The Collection ID is None") session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = session.get_resource(ShardCollection, self.collection_id) file_id = self.rec.get_value(Fields.pmd_structure) session.delete_resource(Shard(collection=collection, id=file_id)) self.rec.delete_field(Fields.pmd_structure) return True
def download_file(file_id, filename): if in_orion(): session = APISession resource = session.get_resource(File, file_id) resource.download_to_file(filename) fn_local = filename else: fn_local = file_id if not os.path.isfile(fn_local): raise IOError("The File has not been found: {}".format(fn_local)) return fn_local
def download_data(file_id, path, collection_id=None): if in_orion(): if collection_id is None: raise ValueError("The Collection ID is None") # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = session.get_resource(ShardCollection, collection_id) shard = session.get_resource(Shard(collection=collection), file_id) from MDOrion.Standards import MDFileNames fn_local = os.path.join(path, MDFileNames.mddata) try_hard_to_download_shard(shard, fn_local) shard.close() else: fn_local = file_id if not os.path.isfile(fn_local): raise IOError( "The MD data file has not been found: {}".format(fn_local)) return fn_local
def upload_file(filename, orion_ui_name='OrionFile', tag='Trajectory'): if in_orion(): session = APISession file_upload = File.upload(session, orion_ui_name, filename) session.tag_resource(file_upload, tag) job_id = environ.get('ORION_JOB_ID') if job_id: session.tag_resource(file_upload, "Job {}".format(job_id)) file_id = file_upload.id else: file_id = filename return file_id
class Fields: # Current number of MD steps current_iteration_field = OEField("Current_Iterations_OMD", Types.Int) # Total number of MD steps md_nsteps_field = OEField("MD_nsteps_OMD", Types.Int) # Current number of cycles cycle_id = OEField("Cycle_ID_OMD", Types.Int) # Tpr binary file tpr_field = OEField("TPR_bytes_OMD", Types.Blob, meta=_metaHidden) # Prefix name field prefix_name_field = OEField("Prefix_OPLMD", Types.String) if in_orion(): trajectory = OEField("GMXTrajectory_OMD", Types.Int, meta=_metaHidden) gmx_restart = OEField("GMXRestart_OMD", Types.Int, meta=_metaHidden) else: trajectory = OEField("GMXTrajectory_OMD", Types.String, meta=_metaHidden) gmx_restart = OEField("GMXRestart_OMD", Types.String, meta=_metaHidden)
def end(self): try: for sys_id, list_conf_rec in self.lig_sys_ids.items(): # Save the first record to emit in failure cases self.record = list_conf_rec # catch case where for some reason the conf list list_conf_rec is empty if len(list_conf_rec) < 1: print('{} does not have any conformer data'.format(sys_id)) continue elif len(list_conf_rec) > 1: # Conformers for each ligand are sorted based on their confid in each ligand record list_conf_rec.sort( key=lambda x: x.get_value(Fields.confid)) new_rec = OERecord() new_rec.set_value(Fields.Analysis.oetrajconf_rec, list_conf_rec) # Get the first conf to move some general ligand data up to the top level rec0 = list_conf_rec[0] # copy all the initial fields in Fields.ligInit_rec up to the top level init_rec = rec0.get_value(Fields.ligInit_rec) # TODO METADATA IS NOT COPIED? for field in init_rec.get_fields(): new_rec.set_value(field, init_rec.get_value(field)) # next, fields that will simply be copied and not further used here protein = rec0.get_value(Fields.protein) new_rec.set_value(Fields.protein, protein) ligid = rec0.get_value(Fields.ligid) new_rec.set_value(Fields.ligid, ligid) if in_orion(): collection_id = rec0.get_value(Fields.collection) new_rec.set_value(Fields.collection, collection_id) # finally, fields that will be copied and also further used here lig_multi_conf = oechem.OEMol(rec0.get_value(Fields.ligand)) protein_name = rec0.get_value(Fields.protein_name) # MD Components copied at the ligand top level new_rec.set_value(Fields.md_components, rec0.get_value(Fields.md_components)) # if >1 confs, add their confs to the parent ligand at the top level for rec in list_conf_rec[1:]: lig_multi_conf.NewConf(rec.get_value(Fields.ligand)) # get name of initial molecule if new_rec.has_value(OEPrimaryMolField()): init_mol = new_rec.get_value(OEPrimaryMolField()) else: print( '{} ConformerGatheringData: new_rec cannot find the OEPrimaryMolField' .format(sys_id)) continue lig_title = init_mol.GetTitle() lig_multi_conf.SetTitle(lig_title) # regenerate protein-ligand title since all titles on conformers include conformer id title = 'p' + protein_name + '_l' + lig_title # set other fields on the new record new_rec.set_value(Fields.title, title) new_rec.set_value(Fields.ligand, lig_multi_conf) new_rec.set_value(Fields.primary_molecule, lig_multi_conf) new_rec.set_value(Fields.protein_name, protein_name) new_rec.set_value(Fields.ligand_name, lig_title) self.success.emit(new_rec) except Exception as e: print("Failed to complete", str(e), flush=True) self.opt['Logger'].info('Exception {} {}'.format( str(e), self.title)) self.log.error(traceback.format_exc()) self.failure.emit(self.record)
def wrapper(*args): mdstate = args[0] ff_parameters = args[1] opt = args[2] if 'OE_VISIBLE_DEVICES' in os.environ and not in_orion(): gpus_available_indexes = os.environ["OE_VISIBLE_DEVICES"].split( ',') opt['Logger'].info("OE LOCAL FLOE CLUSTER OPTION IN USE") if 'OE_MAX' in os.environ: opt['OE_MAX'] = int(os.environ["OE_MAX"]) else: opt['OE_MAX'] = 1 opt['Logger'].info("OE MAX = {}".format(opt['OE_MAX'])) while True: for gpu_id in gpus_available_indexes: for p in range(0, opt['OE_MAX']): fn = str(gpu_id) + '_' + str(p) + '.txt' try: with open(fn, 'a') as file: fcntl.flock(file, fcntl.LOCK_EX | fcntl.LOCK_NB) # opt['Logger'].warn("LOCKED GPU ID = {} - MOL ID = {}".format(gpu_id, opt['system_id'])) file.write( "MD - name = {} MOL_ID = {} GPU_IDS = {} GPU_ID = {}\n" .format(opt['system_title'], opt['system_id'], gpus_available_indexes, str(gpu_id))) opt['gpu_id'] = str(gpu_id) new_mdstate = sim(mdstate, ff_parameters, opt) time.sleep(5.0) # opt['Logger'].warn("UNLOCKING GPU ID = {} - MOL ID = {}".format(gpu_id, opt['system_id'])) fcntl.flock(file, fcntl.LOCK_UN) return new_mdstate except BlockingIOError: time.sleep(0.1) except Exception as e: # If the simulation fails for other reasons try: time.sleep(5.0) fcntl.flock(file, fcntl.LOCK_UN) except Exception as e: pass raise ValueError("{} Simulation Failed".format( str(e))) else: new_mdstate = sim(*args) return new_mdstate
def set_protein_traj(self, protein_conf, shard_name=""): """ This method sets the multi conformer protein trajectory on the record Parameters ----------- protein_conf: OEChem Th multi conformer protein trajectory shard_name: String In Orion tha shard will be named by using the shard_name Returns ------- boolean: Bool True if the setting was successful """ if not isinstance(protein_conf, oechem.OEMol): raise ValueError( "The passed Parmed object is not a valid Parmed Structure: {}". format(type(protein_conf))) if in_orion(): with TemporaryDirectory() as output_directory: protein_fn = os.path.join(output_directory, MDFileNames.trajectory_conformers) with oechem.oemolostream(protein_fn) as ofs: oechem.OEWriteConstMolecule(ofs, protein_conf) if self.collection_id is None: raise ValueError("The Collection ID is None") if self.rec.has_field(Fields.protein_traj_confs): fid = self.rec.get_value(Fields.protein_traj_confs) utils.delete_data(fid, collection_id=self.collection_id) # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = session.get_resource(ShardCollection, self.collection_id) shard = try_hard_to_create_shard(collection, protein_fn, name=shard_name) shard.close() self.rec.set_value(Fields.protein_traj_confs, shard.id) else: self.rec.set_value(Fields.protein_traj_confs, protein_conf) return True
def process(self, record, port): try: # The copy of the dictionary option as local variable # is necessary to avoid filename collisions due to # the parallel cube processes opt = dict(self.opt) # Logger string opt['Logger'].info(' Beginning ConfTrajsToLigTraj') system_title = utl.RequestOEFieldType(record, Fields.title) opt['Logger'].info( '{} Attempting to combine conf traj OEMols into ligand traj OEMol' .format(system_title)) # Go find the ligand and LigTraj fields in each of the conformer records if not record.has_field(Fields.Analysis.oetrajconf_rec): raise ValueError( '{} could not find the conformer record'.format( system_title)) else: opt['Logger'].info( '{} found the conformer record'.format(system_title)) # set up ligand and LigTraj lists then loop over conformer records poseIdVec = [] ligTrajConfs = [] protTrajConfs = [] watTrajConfs = [] list_conf_rec = record.get_value(Fields.Analysis.oetrajconf_rec) for confrec in list_conf_rec: confid = utl.RequestOEFieldType(confrec, Fields.confid) if not confrec.has_field(Fields.Analysis.oetraj_rec): raise ValueError( '{} confID {}: could not find traj record'.format( system_title, confid)) oetrajRecord = confrec.get_value(Fields.Analysis.oetraj_rec) # Extract the ligand traj OEMol from the OETraj record ligTraj = utl.RequestOEField(oetrajRecord, 'LigTraj', Types.Chem.Mol) poseIdVec += [confid] * ligTraj.NumConfs() ligTrajConfs.append(ligTraj) opt['Logger'].info( '{} confID {}: adding ligTraj with {} atoms, {} confs'. format(system_title, confid, ligTraj.NumAtoms(), ligTraj.NumConfs())) # Extract the activeSite water traj OEMol from the OETraj record watTraj = utl.RequestOEField(oetrajRecord, 'WatTraj', Types.Chem.Mol) watTrajConfs.append(watTraj) opt['Logger'].info( '{} confID {}: adding watTraj with {} atoms, {} confs'. format(system_title, confid, watTraj.NumAtoms(), watTraj.NumConfs())) # Extract the protTraj OEMol from the OETraj record mdtrajrecord = MDDataRecord(oetrajRecord) protTraj = mdtrajrecord.get_protein_traj protTrajConfs.append(protTraj) opt['Logger'].info( '{} confID {}: adding protTraj with {} atoms, {} confs'. format(system_title, confid, protTraj.NumAtoms(), protTraj.NumConfs())) del mdtrajrecord if len(ligTrajConfs) < 1 or len(protTrajConfs) < 1: raise ValueError( '{} empty list of lig or protein trajectory OEMols'.format( system_title)) ligTraj = oechem.OEMol(ligTrajConfs[0]) xyz = oechem.OEFloatArray(3 * ligTraj.GetMaxAtomIdx()) for trajMol in ligTrajConfs[1:]: for conf in trajMol.GetConfs(): conf.GetCoords(xyz) ligTraj.NewConf(xyz) opt['Logger'].info( '{} composite ligTraj has {} atoms, {} confs'.format( system_title, ligTraj.NumAtoms(), ligTraj.NumConfs())) watTraj = oechem.OEMol(watTrajConfs[0]) xyz = oechem.OEFloatArray(3 * watTraj.GetMaxAtomIdx()) for trajMol in watTrajConfs[1:]: for conf in trajMol.GetConfs(): conf.GetCoords(xyz) watTraj.NewConf(xyz) opt['Logger'].info( '{} composite watTraj has {} atoms, {} confs'.format( system_title, watTraj.NumAtoms(), watTraj.NumConfs())) protTraj = protTrajConfs[0] xyz = oechem.OEFloatArray(3 * protTraj.GetMaxAtomIdx()) for trajMol in protTrajConfs[1:]: for conf in trajMol.GetConfs(): conf.GetCoords(xyz) protTraj.NewConf(xyz) opt['Logger'].info( '{} composite protTraj has {} atoms, {} confs'.format( system_title, protTraj.NumAtoms(), protTraj.NumConfs())) record.set_value(Fields.Analysis.poseIdVec, poseIdVec) # Create new record with OETraj results oetrajRecord = OERecord() oetrajRecord.set_value(OEField('LigTraj', Types.Chem.Mol), ligTraj) if watTraj: oetrajRecord.set_value(OEField('WatTraj', Types.Chem.Mol), watTraj) if in_orion(): collection_id = utl.RequestOEFieldType(record, Fields.collection) oetrajRecord.set_value(Fields.collection, collection_id) mdrecord_traj = MDDataRecord(oetrajRecord) mdrecord_traj.set_protein_traj(protTraj, shard_name="ProteinTrajConfs_") record.set_value(Fields.Analysis.oetraj_rec, oetrajRecord) self.success.emit(record) except Exception as e: print("Failed to complete", str(e), flush=True) opt['Logger'].info( 'Exception {} in ConfTrajsToLigTraj on {}'.format( str(e), system_title)) self.log.error(traceback.format_exc()) # Return failed mol self.failure.emit(record) return
def process(self, record, port): try: # The copy of the dictionary option as local variable # is necessary to avoid filename collisions due to # the parallel cube processes opt = dict(self.opt) # Create the MD record to use the MD Record API mdrecord = MDDataRecord(record) # Logger string opt['Logger'].info(' ') system_title = mdrecord.get_title #sys_id = mdrecord.get_flask_id opt['Logger'].info( '{}: Attempting MD Traj conversion into OEMols'.format( system_title)) traj_fn = mdrecord.get_stage_trajectory() opt['Logger'].info('{} Temp Directory: {}'.format( system_title, os.path.dirname(traj_fn))) opt['Logger'].info('{} Trajectory filename: {}'.format( system_title, traj_fn)) # Generate multi-conformer protein and ligand OEMols from the trajectory opt['Logger'].info( '{} Generating protein and ligand trajectory OEMols'.format( system_title)) flask = mdrecord.get_flask md_components = record.get_value(Fields.md_components) # opt['Logger'].info(md_components.get_info) # Check Ligand Isomeric Smiles lig_comp = md_components.get_ligand lig_ref = record.get_value(Fields.ligand) smi_lig_comp = oechem.OECreateSmiString(lig_comp) smi_lig_ref = oechem.OECreateSmiString(lig_ref) if smi_lig_ref != smi_lig_comp: raise ValueError( "Ligand Isomeric Smiles String check failure: {} vs {}". format(smi_lig_comp, smi_lig_ref)) ptraj, ltraj, wtraj = utl.extract_aligned_prot_lig_wat_traj( md_components, flask, traj_fn, opt, water_cutoff=opt['water_cutoff']) ltraj.SetTitle(record.get_value(Fields.ligand_name)) ptraj.SetTitle(record.get_value(Fields.protein_name)) opt['Logger'].info( '{} #atoms, #confs in protein traj OEMol: {}, {}'.format( system_title, ptraj.NumAtoms(), ptraj.NumConfs())) opt['Logger'].info( '{} #atoms, #confs in ligand traj OEMol: {}, {}'.format( system_title, ltraj.NumAtoms(), ltraj.NumConfs())) opt['Logger'].info( '{} #atoms, #confs in water traj OEMol: {}, {}'.format( system_title, wtraj.NumAtoms(), wtraj.NumConfs())) # Create new record with OETraj results oetrajRecord = OERecord() oetrajRecord.set_value(OEField('LigTraj', Types.Chem.Mol), ltraj) if wtraj: oetrajRecord.set_value(OEField('WatTraj', Types.Chem.Mol), wtraj) if in_orion(): oetrajRecord.set_value(Fields.collection, mdrecord.collection_id) mdrecord_traj = MDDataRecord(oetrajRecord) mdrecord_traj.set_protein_traj(ptraj, shard_name="ProteinTrajConfs_") record.set_value(Fields.Analysis.oetraj_rec, oetrajRecord) # update or initiate the list of analyses that have been done if record.has_value(Fields.Analysis.analysesDone): analysesDone = utl.RequestOEFieldType( record, Fields.Analysis.analysesDone) analysesDone.append('OETraj') else: analysesDone = ['OETraj'] record.set_value(Fields.Analysis.analysesDone, analysesDone) opt['Logger'].info( '{}: saved protein, ligand and water traj OEMols'.format( system_title)) self.success.emit(record) del mdrecord del mdrecord_traj except Exception as e: print("Failed to complete", str(e), flush=True) self.log.error(traceback.format_exc()) # Return failed mol self.failure.emit(record) return
def set_parmed(self, pmd, sync_stage_name=None, shard_name=""): """ This method sets the Parmed object. Return True if the setting was successful. If sync_stage_name is not None the parmed structure positions, velocities and box vectors will be synchronized with the MD State selected by passing the MD stage name Parameters ---------- pmd: Parmed Structure object The Parmed Structure object to be set on the record sync_stage_name: String or None The stage name that is used to synchronize the Parmed structure shard_name: String In Orion tha shard will be named by using the shard_name Returns ------- boolean : Bool True if the setting was successful """ if not isinstance(pmd, parmed.Structure): raise ValueError( "The passed Parmed object is not a valid Parmed Structure: {}". format(type(pmd))) if sync_stage_name is not None: mdstate = self.get_stage_state(stg_name=sync_stage_name) pmd.positions = mdstate.get_positions() pmd.velocities = mdstate.get_velocities() pmd.box_vectors = mdstate.get_box_vectors() if in_orion(): with TemporaryDirectory() as output_directory: parmed_fn = os.path.join(output_directory, 'parmed.pickle') with open(parmed_fn, 'wb') as f: pickle.dump(pmd.__getstate__(), f) if self.collection_id is None: raise ValueError("The Collection ID is None") if self.rec.has_field(Fields.pmd_structure): fid = self.rec.get_value(Fields.pmd_structure) utils.delete_data(fid, collection_id=self.collection_id) session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) collection = session.get_resource(ShardCollection, self.collection_id) shard = try_hard_to_create_shard(collection, parmed_fn, name=shard_name) shard.close() self.rec.set_value(Fields.pmd_structure, shard.id) else: self.rec.set_value(Fields.pmd_structure, pmd) return True
def get_parmed(self, sync_stage_name=None): """ This method returns the Parmed object. An exception is raised if the Parmed object cannot be found. If sync_stage_name is not None the parmed structure positions, velocities and box vectors will be synchronized with the MD State selected by passing the MD stage name Parameters ---------- sync_stage_name: String or None The stage name that is used to synchronize the Parmed structure Returns ------- parmed : Parmed Structure The Parmed Structure object """ if not self.rec.has_field(Fields.pmd_structure): raise ValueError( "The Parmed reference is not present on the record") pmd_structure = self.rec.get_value(Fields.pmd_structure) if in_orion(): # session = APISession session = OrionSession(requests_session=get_session( retry_dict={ 403: 5, 404: 20, 409: 45, 460: 15, 500: 2, 502: 45, 503: 45, 504: 45, })) if self.collection_id is None: raise ValueError("The Collection ID is None") collection = session.get_resource(ShardCollection, self.collection_id) shard = session.get_resource(Shard(collection=collection), pmd_structure) with TemporaryDirectory() as output_directory: parmed_fn = os.path.join(output_directory, "parmed.pickle") try_hard_to_download_shard(shard, parmed_fn) with open(parmed_fn, 'rb') as f: parm_dic = pickle.load(f) pmd_structure = parmed.structure.Structure() pmd_structure.__setstate__(parm_dic) shard.close() if sync_stage_name is not None: mdstate = self.get_stage_state(stg_name=sync_stage_name) pmd_structure.positions = mdstate.get_positions() pmd_structure.velocities = mdstate.get_velocities() pmd_structure.box_vectors = mdstate.get_box_vectors() return pmd_structure