ligands (file): dataset of prepared ligands posed in the protein active site. protein (file): dataset of the prepared protein structure. Outputs: -------- out (.oedb file): file of the protein-ligand complexes with parameters. """ # Locally the floe can be invoked by running the terminal command: # python floes/LigReadPrep.py --ligands ligands.oeb --protein protein.oeb --out prod.oeb job.classification = [['Molecular Dynamics']] # job.uuid = "372e1890-d053-4027-970a-85b209e4676f" job.tags = [tag for lists in job.classification for tag in lists] # Ligand setting iligs = DatasetReaderCube("LigandReader", title="Ligand Reader") iligs.promote_parameter("data_in", promoted_name="ligands", title="Ligand Input Dataset", description="Ligand Dataset") ligset = LigandSetting("LigandSetting", title="Ligand Setting") ligset.set_parameters(lig_res_name='LIG') chargelig = ParallelLigandChargeCube("LigCharge", title="Ligand Charge") chargelig.promote_parameter('charge_ligands', promoted_name='charge_ligands', description="Charge the ligand or not", default=True) ligid = IDSettingCube("Ligand Ids")
from orionplatform.cubes import DatasetReaderCube, DatasetWriterCube from MDOrion.TrjAnalysis.cubes_trajProcessing import ( ConformerGatheringData, ParallelConfTrajsToLigTraj, ParallelConcatenateTrajMMPBSACube) job = WorkFloe("Testing combining confs Traj OEMols to lig Traj OEMol") job.description = """ Testing Aggregating conf trajs into one ligand traj OEMol. The input dataset is an .oedb file of the aggregated confs MD results with Traj OEMols + IntE + PBSA """ # job.uuid = "7cacc2af-cae7-4dc7-8956-fcf539861e3d" ifs = DatasetReaderCube("ifs") ifs.promote_parameter("data_in", promoted_name="in", title="System Input OERecord", description="OERecord file name") confGather = ConformerGatheringData("Gathering Conformer Records") ligTrajCube = ParallelConfTrajsToLigTraj("ConfTrajsToLigTraj") ligMMPBSA = ParallelConcatenateTrajMMPBSACube('ConcatenateTrajMMPBSACube') ofs = DatasetWriterCube('ofs', title='OFS-Success') ofs.promote_parameter("data_out", promoted_name="out", title="System Output OERecord", description="OERecord file name")
from orionplatform.cubes import DatasetReaderCube, DatasetWriterCube from os import path job = WorkFloe("Extract Short Trajectory MD Results for Download", title="Extract Short Trajectory MD Results for Download") job.description = open(path.join(path.dirname(__file__), 'MDData_desc.rst'), 'r').read() job.classification = [['MD Data']] job.uuid = "6665ca20-6014-4f3b-8d02-4b5d15b75ee3" job.tags = [tag for lists in job.classification for tag in lists] ifs = DatasetReaderCube("SystemReader", title="System Reader") ifs.promote_parameter( "data_in", promoted_name="system", title='STMDA Input File', description= "The Dataset produced by the Short Trajectory MD with Analysis floe") data = ExtractMDDataCube("MDData", title="Extract MD Data") data.promote_parameter('out_file_name', promoted_name='out_file_name', description="Output File name", default="md_data.tar.gz") fail = DatasetWriterCube('fail', title='Failures')
ParallelMDTrajAnalysisClusterReport, ParallelClusterPopAnalysis, ParallelTrajAnalysisReportDataset, MDFloeReportCube) from MDOrion.System.cubes import CollectionSetting job = WorkFloe('Analyze Protein-Ligand MD', title='Analyze Protein-Ligand MD') job.description = open( path.join(path.dirname(__file__), 'AnalyzePLMD_desc.rst'), 'r').read() job.classification = [['Specialized MD']] job.uuid = "7438db4d-30b1-478c-afc0-e921f0336c78" job.tags = [tag for lists in job.classification for tag in lists] # Ligand setting iMDInput = DatasetReaderCube("MDInputReader", title="MD Input Reader") iMDInput.promote_parameter("data_in", promoted_name="in", title="MD Input Dataset", description="MD Input Dataset") # This Cube is necessary for the correct work of collection and shard coll_open = CollectionSetting("OpenCollection", title="Open Collection") coll_open.set_parameters(open=True) trajCube = ParallelTrajToOEMolCube("TrajToOEMolCube", title="Trajectory To OEMols") IntECube = ParallelTrajInteractionEnergyCube("TrajInteractionEnergyCube", title="MM Energies") PBSACube = ParallelTrajPBSACube("TrajPBSACube", title="PBSA Energies")
Optional: -------- picosec (float): Number of picoseconds to warm up the complex temperature (decimal): target final temperature in K pressure (decimal): target final pressure in atm Outputs: -------- ofs: Outputs the constant temperature and pressure system """ job.classification = [['NPT']] job.uuid = "4ea1a8ab-5e8b-41c9-9317-3e36b72491fe" job.tags = [tag for lists in job.classification for tag in lists] ifs = DatasetReaderCube("SystemReader", title="System Reader") ifs.promote_parameter("data_in", promoted_name="system", title='System Input File', description="System input file") npt = ParallelMDNptCube('npt', title='NPT Simulation') npt.promote_parameter('time', promoted_name='nanoseconds', default=0.01, description='Length of MD run in nanoseconds') npt.promote_parameter('temperature', promoted_name='temperature', default=300.0, description='Selected temperature in K') npt.promote_parameter('pressure', promoted_name='pressure', default=1.0, description='Selected pressure in atm') npt.promote_parameter('md_engine', promoted_name='md_engine', default='OpenMM', description='Select the MD Engine') # Restraints npt.set_parameters(restraints="ca_protein or (noh ligand)")
# liable for any damages or liability in connection with the Sample Code # or its use. from floe.api import WorkFloe from orionplatform.cubes import DatasetReaderCube, DatasetWriterCube from cubes.perses import PersesCube # Declare and document floe job = WorkFloe("Peres Floe", title="Perses Floe") job.description = ("Run a star-map relative free energy calculation") job.classification = [['Molecular Dynamics']] job.uuid = "155b90cf-90fd-4068-8558-3eac7c01c615" job.tags = [tag for lists in job.classification for tag in lists] # Declare Cubes protein_input_cube = DatasetReaderCube("protein_input_cube") reference_ligand_input_cube = DatasetReaderCube("reference_ligand_input_cube") target_ligands_input_cube = DatasetReaderCube("target_ligands_input_cube") perses_cube = PersesCube("perses_cube") success_output_cube = DatasetWriterCube("success_output_cube", title='success') failure_output_cube = DatasetWriterCube("failure_output_cube", title='failure') # Add cubes to floe job.add_cube(protein_input_cube) job.add_cube(reference_ligand_input_cube) job.add_cube(target_ligands_input_cube) job.add_cube(perses_cube) job.add_cube(success_output_cube) job.add_cube(failure_output_cube) # Promote parameters
# PARTICULAR PURPOSE AND NONINFRINGEMENT. In no event shall OpenEye be # liable for any damages or liability in connection with the Sample Code # or its use. from floe.api import WorkFloe from orionplatform.cubes import DatasetReaderCube, DatasetWriterCube from am1bcc_charge.am1bcc_charge import AM1BCCCharge # Declare and document floe job = WorkFloe("am1bcc_charge", title="am1bcc charge") job.description = ("AM1BCC Charge") job.classification = [["Charge"]] job.tags = ["Charge"] # Declare Cubes input_cube = DatasetReaderCube("input_cube") charge_cube = MyCube("charge_cube") output_cube = DatasetWriterCube("output_cube") # Add cubes to floe job.add_cube(input_cube) job.add_cube(charge_cube) job.add_cube(output_cube) # Promote parameters input_cube.promote_parameter("data_in", promoted_name="in", title="Input data set of records") charge_cube.promoted_parameter('max_confs', promoted_name='in',