ParallelTrajPBSACube, ConformerGatheringData, ParallelConfTrajsToLigTraj, ParallelConcatenateTrajMMPBSACube) from MDOrion.TrjAnalysis.cubes_clusterAnalysis import ( ParallelClusterOETrajCube, ParallelMakeClusterTrajOEMols, ParallelMDTrajAnalysisClusterReport, ParallelClusterPopAnalysis, ParallelTrajAnalysisReportDataset, MDFloeReportCube) from MDOrion.System.cubes import CollectionSetting job = WorkFloe('Analyze Protein-Ligand MD', title='Analyze Protein-Ligand MD') job.description = open( path.join(path.dirname(__file__), 'AnalyzePLMD_desc.rst'), 'r').read() job.classification = [['Specialized MD']] job.uuid = "7438db4d-30b1-478c-afc0-e921f0336c78" job.tags = [tag for lists in job.classification for tag in lists] # Ligand setting iMDInput = DatasetReaderCube("MDInputReader", title="MD Input Reader") iMDInput.promote_parameter("data_in", promoted_name="in", title="MD Input Dataset", description="MD Input Dataset") # This Cube is necessary for the correct work of collection and shard coll_open = CollectionSetting("OpenCollection", title="Open Collection") coll_open.set_parameters(open=True) trajCube = ParallelTrajToOEMolCube("TrajToOEMolCube",
print('host:', socket.gethostname()) logging.basicConfig(level=logging.DEBUG, format='%(asctime)s' + socket.gethostname() + '%(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') from floe.api import WorkFloe from cuberecord.cubes import DatasetReaderCube, DatasetWriterCube from torsion.cubes import ParallelGenerateFragments # Declare Floe, add metadata for UI job = WorkFloe('Generate Torsional Fragments') job.description = """ Generate Torsional Fragments """ job.classification = [["Torsion"]] # Declare cubes ifs = DatasetReaderCube('ifs') fraggenCube = ParallelGenerateFragments('fraggenCube') fraggen_failure = DatasetWriterCube('fraggen_failure') ofs = DatasetWriterCube('ofs') cubes = [ifs, fraggenCube, ofs, fraggen_failure] # Promote parameters ifs.promote_parameter('data_in', promoted_name='data_in') ofs.promote_parameter('data_out', promoted_name='ofs', description='Floe output', default='output', title='Successes')
Parameters: ----------- complex (file): OEB file of the prepared system Optional: -------- picosec (float): Number of picoseconds to warm up the complex temperature (decimal): target final temperature in K pressure (decimal): target final pressure in atm Outputs: -------- ofs: Outputs the constant temperature and pressure system """ job.classification = [['NPT']] job.uuid = "4ea1a8ab-5e8b-41c9-9317-3e36b72491fe" job.tags = [tag for lists in job.classification for tag in lists] ifs = DatasetReaderCube("SystemReader", title="System Reader") ifs.promote_parameter("data_in", promoted_name="system", title='System Input File', description="System input file") npt = ParallelMDNptCube('npt', title='NPT Simulation') npt.promote_parameter('time', promoted_name='nanoseconds', default=0.01, description='Length of MD run in nanoseconds') npt.promote_parameter('temperature', promoted_name='temperature', default=300.0, description='Selected temperature in K') npt.promote_parameter('pressure', promoted_name='pressure', default=1.0, description='Selected pressure in atm') npt.promote_parameter('md_engine', promoted_name='md_engine', default='OpenMM',
Ex: python floes/openmm_prepMDminimize.py --complex complex.oeb --ofs-data_out min.oeb --steps 1000` Parameters: ----------- complex (file): OEB file of the prepared protein:ligand complex Optional: -------- steps (int): Number of MD steps to minimize the system. If 0 until convergence will be reached Outputs: -------- ofs: Outputs the minimized system """ job.classification = [['Simulation']] job.tags = [tag for lists in job.classification for tag in lists] ifs = OEMolIStreamCube("complex", title="Complex Reader") ifs.promote_parameter("data_in", promoted_name="complex", title='Complex Input File', description="protein:ligand complex input file") minComplex = OpenMMminimizeCube('minComplex') minComplex.promote_parameter('steps', promoted_name='steps') ofs = OEMolOStreamCube('ofs', title='OFS-Success') ofs.set_parameters(backend='s3') fail = OEMolOStreamCube('fail', title='OFS-Failure') fail.set_parameters(backend='s3')
# current license or subscription to the applicable OpenEye offering. # THE SAMPLE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED. OPENEYE DISCLAIMS ALL WARRANTIES, INCLUDING, BUT # NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A # PARTICULAR PURPOSE AND NONINFRINGEMENT. In no event shall OpenEye be # liable for any damages or liability in connection with the Sample Code # or its use. from floe.api import WorkFloe from orionplatform.cubes import DatasetReaderCube, DatasetWriterCube from am1bcc_charge.am1bcc_charge import AM1BCCCharge # Declare and document floe job = WorkFloe("am1bcc_charge", title="am1bcc charge") job.description = ("AM1BCC Charge") job.classification = [["Charge"]] job.tags = ["Charge"] # Declare Cubes input_cube = DatasetReaderCube("input_cube") charge_cube = MyCube("charge_cube") output_cube = DatasetWriterCube("output_cube") # Add cubes to floe job.add_cube(input_cube) job.add_cube(charge_cube) job.add_cube(output_cube) # Promote parameters input_cube.promote_parameter("data_in", promoted_name="in",
**Use at your own risk!** ## About this software See [http://getyank.org](http://getyank.org) for more information on YANK and the alchemical free energy calculations it supports. YANK is free (libre) open source software licensed under the [MIT License](https://github.com/choderalab/yank/blob/master/LICENSE). All source code is available at: [http://github.com/choderalab/yank](http://github.com/choderalab/yank) YANK is produced by the Chodera lab: [http://choderalab.org](http://choderalab.org). Please help us make it better by contributing code or funds. """ job.classification =[["YANK", "Binding free energies", "OpenMM", "choderalab"]] job.tags = [tag for lists in job.classification for tag in lists] ifs = OEMolIStreamCube("ifs") ifs.promote_parameter("data_in", promoted_name="molecules", description="Input molecules") yank_cube = YankBindingCube('yank_binding', title = 'Yank for binding free energies') for parameter_name in ['receptor', 'solvent', 'temperature', 'pressure', 'nsteps_per_iteration', 'simulation_time', 'timestep', 'minimize', 'verbose']: promoted_name = parameter_name description = yank_cube.parameters()[parameter_name].description yank_cube.promote_parameter(parameter_name, promoted_name=promoted_name, description=description) success_ofs = OEMolOStreamCube("success_ofs") success_ofs.promote_parameter("data_out", promoted_name="success", description="Output molecules") failure_ofs = OEMolOStreamCube("failure_ofs")
from cubes.input_cubes import IndexInputCube, OEMolTriggeredIStreamCube from cubes.compute_cubes import (AccuMolList, ParallelFastFPRanking, IndexGenerator, PrepareRanking, ParallelFastFPInsertKA, AccumulateRankings, AnalyseRankings) from cubes.output_cubes import TextRankingOutputCube, PlotResults, ResultsOutputCube from floe.api import WorkFloe, CubeGroup from floe.api import OEMolOStreamCube from floe.api import OEMolIStreamCube # Declare Floe, add metadata for UI job = WorkFloe('VS Floe') job.description = """ Read an index text file and write the indices in an other file """ job.classification = [["Virtual Screening", "Create Ranking"]] # Declare Cubes act_reader = OEMolIStreamCube('act_reader') act_reader.promote_parameter('data_in', promoted_name='act_db') #index_reader = IndexInputCube('index_reader') #index_reader.promote_parameter('data_in', promoted_name='index_log') index_generator = IndexGenerator('index generator') accu_act = AccuMolList('accumulate actives') #calc_fp = CalculateFPCube('calculate fingerprints') prep_sim_calc = PrepareRanking('prepare similarity calculation') prep_sim_calc.promote_parameter('method', promoted_name='method') calc_sim = ParallelFastFPRanking('calculate similarity value') calc_sim.promote_parameter('url', promoted_name='url')
Parameters: ----------- complex (file): OEB file of the prepared protein:ligand complex Optional: -------- picosec (float): Number of picoseconds to warm up the complex temperature (decimal): target final temperature after warming Outputs: -------- ofs: Outputs the constant temperature and volume system """ job.classification = [['NVT']] job.tags = [tag for lists in job.classification for tag in lists] ifs = OEMolIStreamCube("complex", title="Complex Reader") ifs.promote_parameter("data_in", promoted_name="complex", title='Complex Input File', description="protein:ligand complex input file") nvt = OpenMMnvtCube('nvt') nvt.promote_parameter('time', promoted_name='picosec', default=10.0) nvt.promote_parameter('temperature', promoted_name='temperature', default=300.0, description='Selected temperature in K') # Restraints nvt.promote_parameter('restraints', promoted_name='restraints', default='noh (ligand or protein)') nvt.promote_parameter('restraintWt', promoted_name='restraintWt', default=2.0) # Trajectory and logging info frequency intervals nvt.promote_parameter('trajectory_interval', promoted_name='trajectory_interval', default=100,
from os import path from floe.api import WorkFloe from MDOrion.MDEngines.Gromacs.cubes import (InputGromacs, GromacsProxyCube, GromacsRunCube, WriterRecordCube) from orionplatform.cubes import DatasetWriterCube job = WorkFloe('PlainGromacs', title='Plain Gromacs') job.description = open(path.join(path.dirname(__file__), 'PlainGromacs_desc.rst'), 'r').read() job.classification = [['General MD']] job.uuid = "f092b164-7400-403d-8861-b25ff741cab5" job.tags = [tag for lists in job.classification for tag in lists] ifs = InputGromacs("Input File", title="Input file") ifs.promote_parameter('tpr', promoted_name='tpr', default=None) ifs.promote_parameter("prefix_name", promoted_name="Flask prefix", default="Flask") ifs.promote_parameter("data_in", promoted_name='in') proxy = GromacsProxyCube("GromacsProxy", title="Gromacs Proxy Cube") gmx = GromacsRunCube("GromacsRun", title="Gromacs Run") gmx.promote_parameter("verbose", promoted_name="verbose", default=False) ofs = WriterRecordCube("OutputRecords", title="Output Records") fail = DatasetWriterCube('fail', title='Failures')
#!/usr/bin/env python from cubes.input_cubes import Test from cubes.compute_cubes import ParallelFastROCSRanking, AccumulateRankings from cubes.output_cubes import TextRankingOutputCube from floe.api import WorkFloe, CubeGroup from floe.api import OEMolIStreamCube from floe.api import OEMolOStreamCube, FileOutputCube # Declare Floe, add metadata for UI job = WorkFloe('FastROCS Test Floe') job.classification=[['Test']] job.tags=[['yippee ki yay mf']] job.title='test FastROCS Server' job.description = """ Read a molecule query and return the FastROCS Server Results """ input_cube = Test('input') request_cube = ParallelFastROCSRanking('request_cube') request_cube.promote_parameter('url', promoted_name='url') accu_cube = AccumulateRankings('accu') accu_cube.promote_parameter('url', promoted_name='url') output_cube = TextRankingOutputCube('results_output') output_cube.promote_parameter('name', promoted_name='name') job.add_cubes(input_cube, request_cube, accu_cube, output_cube) input_cube.success.connect(request_cube.data_input)
from __future__ import unicode_literals """ Copyright (C) 2016 OpenEye Scientific Software """ from floe.api import WorkFloe, FileOutputCube from PlatformTestCubes.cubes import BenchmarkCube job = WorkFloe("OpenMM Benchmarking") job.description = """ Performs Benchmarking upon all available Platforms Based on OpenMM SimTK Benchmarking script """ job.classification = [ ["OpenMM", "Platforms", "Benchmarking"] ] job.tags = [tag for lists in job.classification for tag in lists] benchmark_cube = BenchmarkCube("benchmark_cube") ofs = FileOutputCube("ofs") ofs.set_parameters(name="Orion_OpenMM_Benchmarks.txt") job.add_cubes(benchmark_cube, ofs) benchmark_cube.success.connect(ofs.intake) if __name__ == "__main__": job.run()
Ex: python floes/openmm_MDprep.py --ligands ligands.oeb --protein protein.oeb --ofs-data_out prep.oeb Parameters: ----------- ligands (file): oeb file of ligand posed in the protein active site. protein (file): oeb file of the protein structure, assumed to be pre-prepared Optionals: ----------- Outputs: -------- ofs: Outputs a ready system to MD production run """ job.classification = [['BindingFreeEnergy', 'Yank']] job.tags = [tag for lists in job.classification for tag in lists] # Ligand setting iligs = LigandReader("LigandReader", title="Ligand Reader") iligs.promote_parameter("data_in", promoted_name="ligands", title="Ligand Input File", description="Ligand file name") chargelig = LigChargeCube("LigCharge") chargelig.promote_parameter( 'max_conformers', promoted_name='max_conformers', description="Set the max number of conformers per ligand", default=800)
# NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A # PARTICULAR PURPOSE AND NONINFRINGEMENT. In no event shall OpenEye be # liable for any damages or liability in connection with the Sample Code # or its use. from floe.api import WorkFloe from cuberecord import (DatasetReaderCube, DatasetWriterCube) from {{cookiecutter.module_name}} import MyCube # Declare and document floe my_floe = WorkFloe('my_floe', title="My Floe") my_floe.description = "Outputs the input records unchanged unless the parameter is set to false, in which case nothing " \ "is outputted" my_floe.classification = [["Examples"]] my_floe.tags = ["Examples", "I didn't edit the tags"] # Declare Cubes input_cube = DatasetReaderCube('input_cube') switch_cube = MyCube('switch_cube') output_cube = DatasetWriterCube('output_cube') # Add cubes to floe my_floe.add_cube(input_cube) my_floe.add_cube(switch_cube) my_floe.add_cube(output_cube) # Promote parameters input_cube.promote_parameter('data_in', promoted_name='in', title='Input data set of records') switch_cube.promote_parameter('switch', promoted_name='switch', title="Switch controlling Output")
from floe.api import WorkFloe from MDOrion.TrjAnalysis.cubes_clusterAnalysis import ExtractMDDataCube from orionplatform.cubes import DatasetReaderCube, DatasetWriterCube from os import path job = WorkFloe("Extract Short Trajectory MD Results for Download", title="Extract Short Trajectory MD Results for Download") job.description = open(path.join(path.dirname(__file__), 'MDData_desc.rst'), 'r').read() job.classification = [['MD Data']] job.uuid = "6665ca20-6014-4f3b-8d02-4b5d15b75ee3" job.tags = [tag for lists in job.classification for tag in lists] ifs = DatasetReaderCube("SystemReader", title="System Reader") ifs.promote_parameter( "data_in", promoted_name="system", title='STMDA Input File', description= "The Dataset produced by the Short Trajectory MD with Analysis floe") data = ExtractMDDataCube("MDData", title="Extract MD Data") data.promote_parameter('out_file_name', promoted_name='out_file_name',
'%(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') from floe.api import WorkFloe from cuberecord import DatasetReaderCube, DatasetWriterCube from torsion.cubes import GenerateFragments, \ GenerateStartingConfs, \ GenerateTorsionalConfs, \ ParallelPsi4EnergyCalculation, \ ProfileAssembler # Declare Floe, add metadata for UI job = WorkFloe('Torsional Strain Energy') job.description = """ Calculate torsional strain energy. """ job.classification = [["Torsional Strain"]] # Declare cubes ifs = DatasetReaderCube('ifs') fraggenCube = GenerateFragments('fragment_generation') fraggen_failure = DatasetWriterCube('fraggen_failure') confgenCube = GenerateStartingConfs('starting_conf_gen') confgen_failure = DatasetWriterCube('confgen_failure') torsgenCube = GenerateTorsionalConfs('torsional_conf_gen') assembler = ProfileAssembler('assembler') # geometry optimization using smaller basis set (e.g. minix) psi4EnergyCube1 = ParallelPsi4EnergyCalculation( 'parallel_psi4_energy_calculation1') psi4EnergyCube1.title = 'Psi4_Cube1' psi4EnergyCube1.set_parameters(opt_method='hf3c')
An MMPBSA analysis is carried out on trajectory OEMols for protein, ligand and possibly waters. Required Input Parameters: -------------------------- .oedb of records containing protein, ligand and possibly water trajectory OEMols from an MD Short Trajectory run. Outputs: -------- out (.oedb file): file of the Analysis results for all ligands. """ job.uuid = "2717cf39-5bdd-4a1e-880e-5208bb232959" job.classification = [['Analysis']] job.tags = [tag for lists in job.classification for tag in lists] ifs = DatasetReaderCube("ifs") ifs.promote_parameter("data_in", promoted_name="in", title="System Input OERecord", description="OERecord file name") ofs = DatasetWriterCube('ofs', title='MD Out') ofs.promote_parameter("data_out", promoted_name="out") fail = DatasetWriterCube('fail', title='Failures') fail.promote_parameter("data_out", promoted_name="fail") IntECube = ParallelTrajInteractionEnergyCube("TrajInteractionEnergyCube")
Ex: python floes/openmm_MDprep.py --ligands ligands.oeb --protein protein.oeb --ofs-data_out prep.oeb Parameters: ----------- ligands (file): oeb file of ligand posed in the protein active site. protein (file): oeb file of the protein structure, assumed to be pre-prepared Optionals: ----------- Outputs: -------- ofs: Outputs a ready system to MD production run """ job.classification = [['Complex Setup', 'FrosstMD']] job.tags = [tag for lists in job.classification for tag in lists] # Ligand setting iligs = LigandReader("LigandReader", title="Ligand Reader") iligs.promote_parameter("data_in", promoted_name="ligands", title="Ligand Input File", description="Ligand file name") chargelig = LigChargeCube("LigCharge") chargelig.promote_parameter( 'max_conformers', promoted_name='max_conformers', description="Set the max number of conformers per ligand", default=800)
the complex is formed with each ligand/conformer separately. Required Input Parameters: -------------------------- ligands (file): dataset of prepared ligands posed in the protein active site. protein (file): dataset of the prepared protein structure. Outputs: -------- out (.oedb file): file of the protein-ligand complexes with parameters. """ # Locally the floe can be invoked by running the terminal command: # python floes/LigReadPrep.py --ligands ligands.oeb --protein protein.oeb --out prod.oeb job.classification = [['Molecular Dynamics']] # job.uuid = "372e1890-d053-4027-970a-85b209e4676f" job.tags = [tag for lists in job.classification for tag in lists] # Ligand setting iligs = DatasetReaderCube("LigandReader", title="Ligand Reader") iligs.promote_parameter("data_in", promoted_name="ligands", title="Ligand Input Dataset", description="Ligand Dataset") ligset = LigandSetting("LigandSetting", title="Ligand Setting") ligset.set_parameters(lig_res_name='LIG') chargelig = ParallelLigandChargeCube("LigCharge", title="Ligand Charge") chargelig.promote_parameter('charge_ligands',
from __future__ import unicode_literals """ Copyright (C) 2016 OpenEye Scientific Software """ from floe.api import WorkFloe, OEMolIStreamCube, OEMolOStreamCube from PlatformTestCubes.cubes import PlatformTestCube job = WorkFloe("OpenMMPlatforms") job.description = """ **Check available OpenMM Platforms** Based on OpenMM SimTK installation check script """ job.classification = [ ["OpenMM", "Platforms"] ] job.tags = [tag for lists in job.classification for tag in lists] ifs = OEMolIStreamCube("ifs") ofs = OEMolOStreamCube("ofs") # Promotes the parameter to something we can specify from the command line as "--ifs=..." ifs.promote_parameter("data_in", promoted_name="ifs") # this is hardwiring the filename to the molecules coming out of ofs ofs.set_parameters(data_out="openmmPlatformCheck.oeb") # the name of the object has to match the string: this is the name of myself platformTester = PlatformTestCube("platformTester")