Exemplo n.º 1
0
# This Cube is necessary for the correct working of collection and shard
coll_close = CollectionSetting("CloseCollection", title="Close Collection")
coll_close.set_parameters(open=False)

rec_check = ParallelRecordSizeCheck("RecordCheck")

ofs = DatasetWriterCube('ofs', title='MD Out')
ofs.promote_parameter("data_out", promoted_name="out",
                      title="MD Out", description="MD Dataset out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail", title="Failures",
                       description="MD Dataset Failures out")

job.add_cubes(ifs, sysid, md_comp, solvate, coll_open, ff, minComplex,
              warmup, equil1, equil2, equil3, prod,
              coll_close, rec_check, ofs, fail)

ifs.success.connect(sysid.intake)
sysid.success.connect(md_comp.intake)
md_comp.success.connect(solvate.intake)
solvate.success.connect(coll_open.intake)
coll_open.success.connect(ff.intake)
ff.success.connect(minComplex.intake)
minComplex.success.connect(warmup.intake)
warmup.success.connect(equil1.intake)
equil1.success.connect(equil2.intake)
equil2.success.connect(equil3.intake)
equil3.success.connect(prod.intake)
prod.success.connect(coll_close.intake)
coll_close.success.connect(rec_check.intake)
Exemplo n.º 2
0
#
Outputs:
--------
ofs (.oedb file): file of the MD results with Interaction Energy results.
"""

job.uuid = "a73059bd-25e6-47f3-b2d2-56474c439417"

ifs = DatasetReaderCube("ifs")

ifs.promote_parameter("data_in",
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

scube = ParallelTrajInteractionEnergyCube("TrajInteractionEnergyCube")

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="System Output OERecord",
                      description="OERecord file name")

job.add_cubes(ifs, scube, ofs)

ifs.success.connect(scube.intake)
scube.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 3
0
# Protein Setting
protset = MDComponentCube("ProteinSetting", title="Protein Setting")

ofs = DatasetWriterCube('ofs', title='MD Out')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="MD Out",
                      description="MD Dataset out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out",
                       promoted_name="fail",
                       title="Failures",
                       description="MD Dataset Failures out")

job.add_cubes(iligs, ligset, ligid, iprot, protset, chargelig, complx, ofs,
              fail)

iligs.success.connect(ligset.intake)
ligset.success.connect(chargelig.intake)
chargelig.success.connect(ligid.intake)
ligid.success.connect(complx.intake)
iprot.success.connect(protset.intake)
protset.success.connect(complx.protein_port)
complx.success.connect(ofs.intake)
complx.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 4
0
out (.oedb file): file of the Analysis results for all ligands.
"""

job.uuid = "43f33e3f-0240-4e34-9b8b-da4d5796052a"

ifs = DatasetReaderCube("ifs")
ifs.promote_parameter("data_in",
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="System Output OERecord",
                      description="OERecord file name")

clusCube = ParallelClusterOETrajCube("ClusterOETrajCube")
molHtml = ParallelMDTrajAnalysisClusterReport("MolHtmlCube")
floeReport = MDFloeReportCube("FloeReportCube")

job.add_cubes(ifs, clusCube, molHtml, floeReport, ofs)

ifs.success.connect(clusCube.intake)
clusCube.success.connect(molHtml.intake)
molHtml.success.connect(floeReport.intake)
floeReport.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 5
0
job.classification = [['MD Data']]
job.uuid = "6665ca20-6014-4f3b-8d02-4b5d15b75ee3"
job.tags = [tag for lists in job.classification for tag in lists]

ifs = DatasetReaderCube("SystemReader", title="System Reader")
ifs.promote_parameter(
    "data_in",
    promoted_name="system",
    title='STMDA Input File',
    description=
    "The Dataset produced by the Short Trajectory MD with Analysis floe")

data = ExtractMDDataCube("MDData", title="Extract MD Data")

data.promote_parameter('out_file_name',
                       promoted_name='out_file_name',
                       description="Output File name",
                       default="md_data.tar.gz")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out",
                       promoted_name="fail",
                       description="Fail Data Set")

job.add_cubes(ifs, data, fail)
ifs.success.connect(data.intake)
data.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 6
0
ifs = DatasetReaderCube("ifs")
ifs.promote_parameter("data_in",
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

ofs = DatasetWriterCube('ofs', title='MD Out')
ofs.promote_parameter("data_out", promoted_name="out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail")

clusCube = ParallelClusterOETrajCube("ClusterOETrajCube")
report_gen = ParallelMDTrajAnalysisClusterReport("MDTrajAnalysisClusterReport")

report = MDFloeReportCube("report", title="Floe Report")

job.add_cubes(ifs, clusCube, report_gen, report, ofs, fail)

ifs.success.connect(clusCube.intake)
clusCube.success.connect(report_gen.intake)
clusCube.failure.connect(fail.intake)
report_gen.success.connect(report.intake)
report_gen.failure.connect(fail.intake)
report.failure.connect(fail.intake)
report.success.connect(ofs.intake)
report.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 7
0
Outputs:
--------
ofs: Outputs the minimized system
"""

job.classification = [['Simulation']]
job.tags = [tag for lists in job.classification for tag in lists]

ifs = OEMolIStreamCube("complex", title="Complex Reader")
ifs.promote_parameter("data_in",
                      promoted_name="complex",
                      title='Complex Input File',
                      description="protein:ligand complex input file")

minComplex = OpenMMminimizeCube('minComplex')
minComplex.promote_parameter('steps', promoted_name='steps')

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')
fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(ifs, minComplex, ofs, fail)
ifs.success.connect(minComplex.intake)
minComplex.success.connect(ofs.intake)
minComplex.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 8
0
                          promoted_name="ligands",
                          title="Ligand Input File",
                          description="Ligand file name")

icomplex = OEMolIStreamCube("ReadingComplex")
icomplex.promote_parameter("data_in",
                           promoted_name="complex",
                           title="Complex Input File",
                           description="complex file name")

sync = SyncBindingFECube("SyncCube")
yankabfe = YankBindingFECube("YankBindingFE")

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')

fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(iligand, icomplex, sync, yankabfe, ofs, fail)

icomplex.success.connect(sync.intake)
iligand.success.connect(sync.solvated_ligand_in_port)
sync.solvated_lig_complex_out_port.connect(yankabfe.intake)

yankabfe.success.connect(ofs.intake)
yankabfe.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 9
0
#
Parameters:
-----------
in (.oedb file): file of the MD results with Traj OEMols
#
Outputs:
--------
ofs (.oedb file): file of the MD results with Traj OEMol Clustering on a conformer.
"""

# job.uuid = "7cacc2af-cae7-4dc7-8956-fcf539861e3d"

ifs = DatasetReaderCube("ifs")

ifs.promote_parameter("data_in", promoted_name="in", title="System Input OERecord", description="OERecord file name")

clusCube = ParallelClusterOETrajCube("ClusterOETrajCube")
clusOEMols = ParallelMakeClusterTrajOEMols('MakeClusterTrajOEMols')

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out", promoted_name="out", title="System Output OERecord", description="OERecord file name")

job.add_cubes(ifs, clusCube, clusOEMols, ofs)

ifs.success.connect(clusCube.intake)
clusCube.success.connect(clusOEMols.intake)
clusOEMols.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 10
0
ifs = DatasetReaderCube("ifs")
ifs.promote_parameter("data_in", promoted_name="in", title="System Input OERecord", description="OERecord file name")

confGather = ConformerGatheringData("Gathering Conformer Records")
catLigTraj = ParallelConfTrajsToLigTraj("ConfTrajsToLigTraj")
catLigMMPBSA = ParallelConcatenateTrajMMPBSACube('ConcatenateTrajMMPBSACube')
clusCube = ParallelClusterOETrajCube("ClusterOETrajCube")
clusOEMols = ParallelMakeClusterTrajOEMols('MakeClusterTrajOEMols')

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out", promoted_name="out", title="System Output OERecord", description="OERecord file name")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail")

job.add_cubes(ifs,
              confGather, catLigTraj, catLigMMPBSA, clusCube, clusOEMols,
              ofs, fail)

ifs.success.connect(confGather.intake)
confGather.success.connect(catLigTraj.intake)
catLigTraj.success.connect(catLigMMPBSA.intake)
catLigMMPBSA.success.connect(clusCube.intake)
clusCube.success.connect(clusOEMols.intake)
clusOEMols.success.connect(ofs.intake)
clusOEMols.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 11
0
                      description="MD Dataset out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out",
                       promoted_name="fail",
                       title="Failures",
                       description="MD Dataset Failures out")

# This Cube is necessary for the correct working of collection and shard
coll_close = CollectionSetting("CloseCollection", title="Close Collection")
coll_close.set_parameters(open=False)

check_rec = ParallelRecordSizeCheck("Record Check Success")

job.add_cubes(iligs, ligset, iprot, mdcomp, chargelig, complx, solvate,
              coll_open, ff, minComplex, warmup, equil1, equil2, equil3,
              equil4, prod, coll_close, check_rec, ofs, fail)

# Success Connections
iligs.success.connect(ligset.intake)
ligset.success.connect(chargelig.intake)
chargelig.success.connect(ligid.intake)
ligid.success.connect(complx.intake)
iprot.success.connect(mdcomp.intake)
mdcomp.success.connect(complx.protein_port)
complx.success.connect(solvate.intake)
solvate.success.connect(coll_open.intake)
coll_open.success.connect(ff.intake)
ff.success.connect(minComplex.intake)
minComplex.success.connect(warmup.intake)
warmup.success.connect(equil1.intake)
from floe.api import OEMolOStreamCube, FileOutputCube

# Declare Floe, add metadata for UI
job = WorkFloe('FastROCS Test Floe')
job.classification=[['Test']]
job.tags=[['yippee ki yay mf']]
job.title='test FastROCS Server'
job.description = """
Read a molecule query and return the FastROCS Server Results
"""
input_cube = Test('input')

request_cube = ParallelFastROCSRanking('request_cube')
request_cube.promote_parameter('url', promoted_name='url')
accu_cube = AccumulateRankings('accu')
accu_cube.promote_parameter('url', promoted_name='url')

output_cube = TextRankingOutputCube('results_output')
output_cube.promote_parameter('name', promoted_name='name')

job.add_cubes(input_cube, request_cube, accu_cube, output_cube)

input_cube.success.connect(request_cube.data_input)
request_cube.success.connect(accu_cube.intake)
accu_cube.success.connect(output_cube.intake)

# If called from command line, run the floe
if __name__ == "__main__":
    job.run()

Exemplo n.º 13
0
ifs = InputGromacs("Input File", title="Input file")
ifs.promote_parameter('tpr', promoted_name='tpr', default=None)
ifs.promote_parameter("prefix_name", promoted_name="Flask prefix", default="Flask")
ifs.promote_parameter("data_in", promoted_name='in')

proxy = GromacsProxyCube("GromacsProxy", title="Gromacs Proxy Cube")
gmx = GromacsRunCube("GromacsRun", title="Gromacs Run")
gmx.promote_parameter("verbose", promoted_name="verbose", default=False)

ofs = WriterRecordCube("OutputRecords", title="Output Records")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail", title="Failures",
                       description="MD Dataset Failures out")


job.add_cubes(ifs, proxy, gmx, ofs, fail)

ifs.success.connect(proxy.intake)
proxy.success.connect(gmx.intake)
gmx.success.connect(proxy.intake)
gmx.success.connect(ofs.intake)

# Fail Connections
proxy.failure.connect(fail.intake)
gmx.failure.connect(fail.intake)


if __name__ == "__main__":
    job.run()
Exemplo n.º 14
0
Ex. python floes/up.py --in STMD_results.oedb
--out ligands_with_trajOEMol.oedb
#
Parameters:
-----------
in (.oedb file): .oedb file of the MD results
#
Outputs:
--------
ofs (.oedb file): file of the MD results with Traj OEMols
"""

job.uuid = "15543564-7170-49da-935a-03310876df61"

ifs = DatasetReaderCube("ifs")

ifs.promote_parameter("data_in", promoted_name="in", title="System Input OERecord", description="OERecord file name")

trajCube = ParallelTrajToOEMolCube("TrajToOEMolCube")

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out", promoted_name="out", title="System Output OERecord", description="OERecord file name")

job.add_cubes(ifs, trajCube, ofs)

ifs.success.connect(trajCube.intake)
trajCube.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 15
0
solvationfe = YankSolvationFECube("SovationFE")
solvationfe.promote_parameter('iterations',
                              promoted_name='iterations',
                              default=1000)
solvationfe.promote_parameter('nonbondedCutoff',
                              promoted_name='nonbondedCutoff',
                              default=10.0)

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')

fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(iligs, chargelig, solvate, ff, minimize, warmup, equil,
              solvationfe, ofs, fail)

iligs.success.connect(chargelig.intake)
chargelig.success.connect(solvate.intake)
solvate.success.connect(ff.intake)
ff.success.connect(minimize.intake)
minimize.success.connect(warmup.intake)
warmup.success.connect(equil.intake)
equil.success.connect(solvationfe.intake)
solvationfe.success.connect(ofs.intake)
solvationfe.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 16
0
results_output = ResultsOutputCube('results output')
results_output.promote_parameter('name', promoted_name='output_dir')
results_output.promote_parameter('fptype', promoted_name='fptype')
plot_results = PlotResults('plot results')
plot_results.promote_parameter('name', promoted_name='output_dir')
plot_results.promote_parameter('fptype', promoted_name='fptype')

# Create Cube group
#group = CubeGroup(cubes=[prep_sim_calc, calc_sim])

# Add Groups to Workfloe
#job.add_group(group)

# Add Cubes to Floe
job.add_cubes(act_reader, index_generator, accu_act, prep_sim_calc, calc_sim,
              insert_known_actives, accu_rankings, analyse_rankings,
              results_output, plot_results, write_ranking)

# Connect ports
act_reader.success.connect(accu_act.intake)
accu_act.success.connect(prep_sim_calc.act_input)
accu_act.success.connect(index_generator.intake)
#calc_fp.success.connect(prep_sim_calc.fp_input)
index_generator.success.connect(prep_sim_calc.baitset_input)

prep_sim_calc.success.connect(calc_sim.data_input)
calc_sim.success.connect(insert_known_actives.data_input)
insert_known_actives.success.connect(accu_rankings.intake)

accu_rankings.success.connect(analyse_rankings.intake)
Exemplo n.º 17
0
                        description="Protein Prefix")

splitter = Splitter("Splitter")
solvate = SolvationCube("Solvation")

# Complex Setting
complx = ComplexPrep("Complex")
ff = ForceFieldPrep("ForceField")

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')

fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(iprot, splitter, solvate, iligs, chargelig, complx, ff, ofs,
              fail)

iprot.success.connect(splitter.intake)
splitter.success.connect(solvate.intake)
solvate.success.connect(complx.system_port)
iligs.success.connect(chargelig.intake)
chargelig.success.connect(complx.intake)
complx.success.connect(ff.intake)
ff.success.connect(ofs.intake)
ff.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 18
0
                      promoted_name="system",
                      title='System Input File',
                      description="System input file")

min = ParallelMDMinimizeCube('Minimize', title="System Minimization")
min.promote_parameter('steps', promoted_name='steps', default=0)
min.promote_parameter('md_engine',
                      promoted_name='md_engine',
                      default='OpenMM',
                      description='Select the MD Engine')
min.set_parameters(save_md_stage=True)

# Restraints
min.set_parameters(restraints='noh (ligand or protein)')
min.set_parameters(restraintWt=5.0)
min.set_parameters(suffix='min')

ofs = DatasetWriterCube('ofs', title='Out')
ofs.promote_parameter("data_out", promoted_name="out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail")

job.add_cubes(ifs, min, ofs, fail)
ifs.success.connect(min.intake)
min.success.connect(ofs.intake)
min.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 19
0
mdcomp = MDComponentCube("MDComponentSetting", title="MDComponentSetting")
# mdcomp.promote_parameter("flask_title", promoted_name="flask_title", default='MCL1')

ofs = DatasetWriterCube('ofs', title='MD Out')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="MD Out",
                      description="MD Dataset out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out",
                       promoted_name="fail",
                       title="Failures",
                       description="MD Dataset Failures out")

job.add_cubes(iligs, chargelig, ligset, ligid, iprot, mdcomp, complx, solvate,
              ff, ofs, fail)

iligs.success.connect(ligset.intake)
ligset.success.connect(chargelig.intake)
chargelig.success.connect(ligid.intake)
ligid.success.connect(complx.intake)
iprot.success.connect(mdcomp.intake)
mdcomp.success.connect(complx.protein_port)
complx.success.connect(solvate.intake)
solvate.success.connect(ff.intake)
ff.success.connect(ofs.intake)
ff.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 20
0
                         promoted_name='eq3_reporter_interval',
                         default=10000,
                         description='Reporter saving interval')
equil3.promote_parameter('outfname',
                         promoted_name='eq3_outfname',
                         default='equil3',
                         description='Equilibration suffix name')

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')

fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(iprot, iligs, chargelig, complx, solvate, ff, minComplex, warmup,
              equil1, equil2, equil3, ofs, fail)

iprot.success.connect(complx.system_port)
iligs.success.connect(chargelig.intake)
chargelig.success.connect(complx.intake)
complx.success.connect(solvate.intake)
solvate.success.connect(ff.intake)
ff.success.connect(minComplex.intake)
minComplex.success.connect(warmup.intake)
warmup.success.connect(equil1.intake)
equil1.success.connect(equil2.intake)
equil2.success.connect(equil3.intake)
equil3.success.connect(ofs.intake)
equil3.failure.connect(fail.intake)

if __name__ == "__main__":
Exemplo n.º 21
0
                                    title="Record Check Success")

ofs = DatasetWriterCube('ofs', title='MD Out')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="MD Out",
                      description="MD Dataset out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out",
                       promoted_name="fail",
                       title="Failures",
                       description="MD Dataset Failures out")

job.add_cubes(iMDInput, coll_open, trajCube, IntECube, PBSACube, confGather,
              catLigTraj, catLigMMPBSA, clusCube, clusPop, clusOEMols,
              prepDataset, report_gen, report, coll_close, check_rec, ofs,
              fail)

# Success Connections
iMDInput.success.connect(coll_open.intake)
coll_open.success.connect(trajCube.intake)
trajCube.success.connect(IntECube.intake)
IntECube.success.connect(PBSACube.intake)
PBSACube.success.connect(confGather.intake)
confGather.success.connect(catLigTraj.intake)
catLigTraj.success.connect(catLigMMPBSA.intake)
catLigMMPBSA.success.connect(clusCube.intake)
clusCube.success.connect(clusPop.intake)
clusPop.success.connect(clusOEMols.intake)
clusOEMols.success.connect(prepDataset.intake)
prepDataset.success.connect(report_gen.intake)
Exemplo n.º 22
0
iprot.promote_parameter("data_in", promoted_name="protein", title="Protein Input File", description="Protein file name")
iprot.promote_parameter("protein_prefix", promoted_name="protein_prefix", default='PRT',
                        description="Protein Prefix")

# Complex Setting
complx = ComplexPrep("Complex")
ff = ForceFieldPrep("ForceField")

# solvate the system
solvate = HydrationCube("Hydration")

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')

fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(iprot, iligs, chargelig, complx,  solvate, ff, ofs, fail)

iprot.success.connect(complx.system_port)
iligs.success.connect(chargelig.intake)
chargelig.success.connect(complx.intake)
complx.success.connect(solvate.intake)
solvate.success.connect(ff.intake)
ff.success.connect(ofs.intake)
ff.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 23
0
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="System Output OERecord",
                      description="OERecord file name")

trajToOEMol = ParallelTrajToOEMolCube("TrajToOEMolCube")
trajIntE = ParallelTrajInteractionEnergyCube("TrajInteractionEnergyCube")
trajPBSA = ParallelTrajPBSACube("TrajPBSACube")
clusCube = ParallelClusterOETrajCube("ClusterOETrajCube")
molHtml = ParallelMDTrajAnalysisClusterReport("MolHtmlCube")
floeReport = MDFloeReportCube("FloeReportCube")

job.add_cubes(ifs, trajToOEMol, trajIntE, trajPBSA, clusCube, molHtml,
              floeReport, ofs)

ifs.success.connect(trajToOEMol.intake)
trajToOEMol.success.connect(trajIntE.intake)
trajIntE.success.connect(trajPBSA.intake)
trajPBSA.success.connect(clusCube.intake)
clusCube.success.connect(molHtml.intake)
molHtml.success.connect(floeReport.intake)
floeReport.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 24
0
# job.uuid = "7cacc2af-cae7-4dc7-8956-fcf539861e3d"

ifs = DatasetReaderCube("ifs")

ifs.promote_parameter("data_in",
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

confGather = ConformerGatheringData("Gathering Conformer Records")
clusCube = ParallelClusterOETrajCube("ClusterOETrajCube")
clusOEMols = ParallelMakeClusterTrajOEMols('MakeClusterTrajOEMols')
trajMMPBSA = ParallelConcatenateTrajMMPBSACube('ConcatenateTrajMMPBSACube')

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="System Output OERecord",
                      description="OERecord file name")

job.add_cubes(ifs, confGather, clusCube, trajMMPBSA, clusOEMols, ofs)

ifs.success.connect(confGather.intake)
confGather.success.connect(clusCube.intake)
clusCube.success.connect(trajMMPBSA.intake)
trajMMPBSA.success.connect(clusOEMols.intake)
clusOEMols.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 25
0
job.classification = [['Analysis']]
job.tags = [tag for lists in job.classification for tag in lists]

ifs = DatasetReaderCube("ifs")
ifs.promote_parameter("data_in",
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

ofs = DatasetWriterCube('ofs', title='MD Out')
ofs.promote_parameter("data_out", promoted_name="out")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail")

IntECube = ParallelTrajInteractionEnergyCube("TrajInteractionEnergyCube")
PBSACube = ParallelTrajPBSACube("TrajPBSACube")

report = MDFloeReportCube("report", title="Floe Report")

job.add_cubes(ifs, IntECube, PBSACube, ofs, fail)

ifs.success.connect(IntECube.intake)
IntECube.success.connect(PBSACube.intake)
IntECube.failure.connect(fail.intake)
PBSACube.success.connect(ofs.intake)
PBSACube.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 26
0
                       default=1000,
                       description='Reporter saving interval')
prod.promote_parameter('outfname',
                       promoted_name='prod_outfname',
                       default='prod',
                       description='Equilibration suffix name')

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')

fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(iprot, iligs, chargelig, complx, solvate, ff, complex_prep_ofs,
              minComplex, minimization_ofs, warmup, equil1, equil2, equil3,
              equilibration_ofs, prod, ofs, fail)

iprot.success.connect(complx.system_port)
iligs.success.connect(chargelig.intake)
chargelig.success.connect(complx.intake)
complx.success.connect(solvate.intake)
solvate.success.connect(ff.intake)
ff.success.connect(minComplex.intake)
ff.success.connect(complex_prep_ofs.intake)
minComplex.success.connect(warmup.intake)
minComplex.success.connect(minimization_ofs.intake)
warmup.success.connect(equil1.intake)
equil1.success.connect(equil2.intake)
equil2.success.connect(equil3.intake)
equil3.success.connect(prod.intake)
Exemplo n.º 27
0
nvt = OpenMMnvtCube('nvt')
nvt.promote_parameter('time', promoted_name='picosec', default=10.0)
nvt.promote_parameter('temperature', promoted_name='temperature', default=300.0,
                      description='Selected temperature in K')
# Restraints
nvt.promote_parameter('restraints', promoted_name='restraints', default='noh (ligand or protein)')
nvt.promote_parameter('restraintWt', promoted_name='restraintWt', default=2.0)
# Trajectory and logging info frequency intervals
nvt.promote_parameter('trajectory_interval', promoted_name='trajectory_interval', default=100,
                      description='Trajectory saving interval')
nvt.promote_parameter('reporter_interval', promoted_name='reporter_interval', default=1000,
                      description='Reporter saving interval')

nvt.promote_parameter('outfname', promoted_name='suffix', default='nvt',
                      description='Equilibration suffix name')

ofs = OEMolOStreamCube('ofs', title='OFS-Success')
ofs.set_parameters(backend='s3')
fail = OEMolOStreamCube('fail', title='OFS-Failure')
fail.set_parameters(backend='s3')
fail.set_parameters(data_out='fail.oeb.gz')

job.add_cubes(ifs, nvt, ofs, fail)
ifs.success.connect(nvt.intake)
nvt.success.connect(ofs.intake)
nvt.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 28
0
job.classification =[["YANK", "Binding free energies", "OpenMM", "choderalab"]]
job.tags = [tag for lists in job.classification for tag in lists]

ifs = OEMolIStreamCube("ifs")
ifs.promote_parameter("data_in", promoted_name="molecules", description="Input molecules")

yank_cube = YankBindingCube('yank_binding', title = 'Yank for binding free energies')
for parameter_name in ['receptor', 'solvent', 'temperature', 'pressure', 'nsteps_per_iteration', 'simulation_time', 'timestep', 'minimize', 'verbose']:
    promoted_name = parameter_name
    description = yank_cube.parameters()[parameter_name].description
    yank_cube.promote_parameter(parameter_name, promoted_name=promoted_name, description=description)

success_ofs = OEMolOStreamCube("success_ofs")
success_ofs.promote_parameter("data_out", promoted_name="success", description="Output molecules")

failure_ofs = OEMolOStreamCube("failure_ofs")
failure_ofs.promote_parameter("data_out", promoted_name="failure", description="Failed molecules")

cubes = [ifs, yank_cube, success_ofs, failure_ofs]

job.add_cubes(*cubes)

ifs.success.connect(yank_cube.intake)
yank_cube.success.connect(success_ofs.intake)
yank_cube.failure.connect(failure_ofs.intake)


if __name__ == "__main__":
    job.run()
Exemplo n.º 29
0
ifs = DatasetReaderCube("ifs")
ifs.promote_parameter("data_in",
                      promoted_name="in",
                      title="System Input OERecord",
                      description="OERecord file name")

confGather = ConformerGatheringData("Gathering Conformer Records")
ligTrajCube = ParallelConfTrajsToLigTraj("ConfTrajsToLigTraj")
ligMMPBSA = ParallelConcatenateTrajMMPBSACube('ConcatenateTrajMMPBSACube')

ofs = DatasetWriterCube('ofs', title='OFS-Success')
ofs.promote_parameter("data_out",
                      promoted_name="out",
                      title="System Output OERecord",
                      description="OERecord file name")

fail = DatasetWriterCube('fail', title='Failures')
fail.promote_parameter("data_out", promoted_name="fail")

job.add_cubes(ifs, confGather, ligTrajCube, ligMMPBSA, ofs, fail)

ifs.success.connect(confGather.intake)
confGather.success.connect(ligTrajCube.intake)
ligTrajCube.success.connect(ligMMPBSA.intake)
ligMMPBSA.success.connect(ofs.intake)
ligMMPBSA.failure.connect(fail.intake)

if __name__ == "__main__":
    job.run()
Exemplo n.º 30
0
from __future__ import unicode_literals
"""
Copyright (C) 2016 OpenEye Scientific Software
"""
from floe.api import WorkFloe, FileOutputCube
from PlatformTestCubes.cubes import BenchmarkCube

job = WorkFloe("OpenMM Benchmarking")

job.description = """
Performs Benchmarking upon all available Platforms
Based on OpenMM SimTK Benchmarking script
"""

job.classification = [
    ["OpenMM", "Platforms", "Benchmarking"]
]
job.tags = [tag for lists in job.classification for tag in lists]

benchmark_cube = BenchmarkCube("benchmark_cube")
ofs = FileOutputCube("ofs")

ofs.set_parameters(name="Orion_OpenMM_Benchmarks.txt")


job.add_cubes(benchmark_cube, ofs)
benchmark_cube.success.connect(ofs.intake)

if __name__ == "__main__":
    job.run()