def main(): adageobj = adage.adageobject() prepare_node = adageobj.dag.addTask(boolean_task.s(), nodename = 'bool') adageobj.rules = [ Rule(node_done.s(nodename = 'bool'), schedule_if_else.s(depnode = 'bool')) ] adage.rundag(adageobj, default_trackers = True, trackevery = 5)
def test_simpleexample(): x = rl() @x.when def _(adageobj): return True @x.do def _(adageobj): dep = adageobj.dag.addTask(task.s(one = 'what'), nodename = 'first') newrule = rl() adageobj.rules.append(newrule) @newrule.when def _(adageobj): upstream = adageobj.dag.getNodeByName('first') return upstream.state == adage.nodestate.SUCCESS @newrule.do def _(adageobj): deptwo = adageobj.dag.addTask(task.s(one = 'ok...'), depends_on = [dep], nodename = 'what') newrule = rl() adageobj.rules.append(newrule) @newrule.when def _(adageobj): return True @newrule.do def _(adageobj): adageobj.dag.addTask(task.s(one = 'nested new'), nodename = 'the') adageobj = adage.adageobject() adageobj.rules.append(x) adage.serialize.obj_to_json(adageobj, lambda r: None, lambda n: None) adage.rundag(adageobj, default_trackers = True) data = adage.serialize.obj_to_json(adageobj,lambda r: None, lambda n: adage.serialize.node_to_json(n,lambda t: {}, lambda p: {})) adage.serialize.dag_from_json(data['dag'], lambda n: adage.node.Node(n['name'],n['task'],n['id']))
def main(): adageobj = adage.adageobject() prepare_node = adageobj.dag.addTask(boolean_task.s(), nodename='bool') adageobj.rules = [ Rule(node_done.s(nodename='bool'), schedule_if_else.s(depnode='bool')) ] adage.rundag(adageobj, default_trackers=True, trackevery=5)
def dileptoncli(workdir,logger): logging.basicConfig(level = logging.INFO) log = logging.getLogger(logger) dag,rules = build_dag(workdir) log.info('running dilepton from workdir {0}'.format(workdir)) adage.rundag(dag,rules) log.info('done')
def main(): log = logging.getLogger(__name__) logging.basicConfig(level = logging.INFO) backend = foradage.RECAST_Backend(2) g = adage.mk_dag() global_context = { 'workdir':'/Users/lukas/Code/code-snippets/cap-schema-drafts/steer', 'dataset':'user15.lheinric.p123/' } steps_graph = nx.DiGraph() workflow = json.load(open('capdata/workflow.json')) for step in workflow: steps_graph.add_node(step['name'],step) for x in step['dependencies']: steps_graph.add_edge(x,step['name']) rules = {} for stepname in nx.topological_sort(steps_graph): stepinfo = steps_graph.node[stepname] rule = foradage.RECAST_Rule(stepinfo,workflow,rules,global_context) rules[stepname] = rule adage.rundag(g,rules.values(), track = True, backend = backend) provgraph = nx.DiGraph() for x in nx.topological_sort(g): attr = g.node[x].copy() attr.update(color = 'red',label = g.getNode(x).name) provgraph.add_node(x,attr) nodeinfo = g.getNode(x).task.node if 'used_inputs' in nodeinfo: for k,inputs_from_node in nodeinfo['used_inputs'].iteritems(): for one in inputs_from_node: depname = 'output_{}_{}_{}'.format(k,one[0],one[1]) provgraph.add_edge(depname,x) else: for pre in g.predecessors(x): provgraph.add_edge(pre,x) for k,v in g.getNode(x).result_of()['RECAST_metadata']['outputs'].iteritems(): for i,y in enumerate(v): name = 'output_{}_{}_{}'.format(g.getNode(x).task.node['name'],k,i) provgraph.add_node(name,{'shape':'box','label':'{}_{}'.format(k,i),'color':'blue'}) provgraph.add_edge(x,name) nx.write_dot(provgraph,'workflow_instance.dot') subprocess.call(['dot','-Tpdf','workflow_instance.dot'], stdout = open('workflow_instance.pdf','w')) nx.write_dot(steps_graph,'steps.dot') subprocess.call(['dot','-Tpdf','steps.dot'], stdout = open('steps.pdf','w'))
def cli(workdir, logger): logging.basicConfig(level=logging.INFO) log = logging.getLogger(logger) dag, rules = build_dag(workdir) log.info('running dilepton from workdir {0}'.format(workdir)) try: adage.rundag(dag, rules, backendsubmit=adage.celerysetup(celery_app)) except RuntimeError: log.error('DAG execution failed') raise click.Abort log.info('done')
def fullchaincli(workdir,logger): global log log = logging.getLogger(logger) adagetasks.log = log log.info('running fullchain from workdir {0}'.format(workdir)) dag,rules = build_dag(workdir) adage.rundag(dag,rules,loggername = logger, track = True, workdir = workdir, trackevery = 60) log.info('done')
def cli(workdir,logger): logging.basicConfig(level = logging.INFO) log = logging.getLogger(logger) dag,rules = build_dag(workdir) log.info('running dilepton from workdir {0}'.format(workdir)) try: adage.rundag(dag,rules,backendsubmit = adage.celerysetup(celery_app)) except RuntimeError: log.error('DAG execution failed') raise click.Abort log.info('done')
def main(): adageobj = adage.adageobject() fix0 = adageobj.dag.addTask(pdfproducer.s(name = 'fixed'), nodename = 'fixed') var1 = adageobj.dag.addTask(variableoutput.s(), nodename = 'variable1') var2 = adageobj.dag.addTask(variableoutput.s(), nodename = 'variable2') varnodes = [var1,var2] adageobj.rules += [ Rule(variable_nodes_done.s(varnodes),schedule_pdf.s([fix0],varnodes)) ] adage.rundag(adageobj,default_trackers = True, workdir = 'bla')
def run_adage(self, backend = 'auto', **adage_kwargs): ''' execution workflow with adage based against given backend :param backend: backend to use for packtivity processing. ''' if backend=='auto': #respect if the controller already has a backend wired up self.controller.backend = self.controller.backend or setupbackend_fromstring('multiproc:auto') log.info('backend automatically set to %s', backend) elif backend: self.controller.backend = backend assert self.controller.backend self.adage_argument(**adage_kwargs) adage.rundag(controller = self.controller, **self.adage_kwargs)
def main(): backend = adage.backends.MultiProcBackend(2) adageobj = adage.adageobject() adageobj.rules = [Rule(always.s(),addnode.s(name = 'what')),Rule(byname.s(name = 'what'),addnode.s(name = 'the'))] # one = adageobj.dag.addTask(mytask.s(5), nodename = 'first') # two = adageobj.dag.addTask(mytask.s(3), depends_on = [one], nodename = 'second') # # adageobj.rules = [Rule(predicate.s(depnode = two),rulebody.s(depnode = two))] try: adage.rundag(adageobj, backend = backend, default_trackers = True, workdir = 'simpleTrack', update_interval = 10, trackevery = 10) except RuntimeError: log.error('ERROR')
def run_adage(self, backend='auto', **adage_kwargs): ''' execution workflow with adage based against given backend :param backend: backend to use for packtivity processing. ''' if backend == 'auto': #respect if the controller already has a backend wired up self.controller.backend = self.controller.backend or setupbackend_fromstring( 'multiproc:auto') log.info('backend automatically set to %s', backend) elif backend: self.controller.backend = backend assert self.controller.backend self.adage_argument(**adage_kwargs) adage.rundag(controller=self.controller, **self.adage_kwargs)
def test_simpleexample(): x = rl() @x.when def _(adageobj): return True @x.do def _(adageobj): dep = adageobj.dag.addTask(task.s(one='what'), nodename='first') newrule = rl() adageobj.rules.append(newrule) @newrule.when def _(adageobj): upstream = adageobj.dag.getNodeByName('first') return upstream.state == adage.nodestate.SUCCESS @newrule.do def _(adageobj): deptwo = adageobj.dag.addTask(task.s(one='ok...'), depends_on=[dep], nodename='what') newrule = rl() adageobj.rules.append(newrule) @newrule.when def _(adageobj): return True @newrule.do def _(adageobj): adageobj.dag.addTask(task.s(one='nested new'), nodename='the') adageobj = adage.adageobject() adageobj.rules.append(x) adage.serialize.obj_to_json(adageobj) adage.rundag(adageobj, default_trackers=True) data = adage.serialize.obj_to_json(adageobj) adage.serialize.dag_from_json( data['dag'], lambda n: adage.node.Node(n['name'], n['task'], n['id']), lambda p: None, None)
def main(): dag = random_dag(2,1) adageobj = adage.adageobject() numbered = {} for node in dag.nodes(): numbered[node] = adageobj.dag.addTask(hello.s(workdir = 'workdir_{}'.format(node)), nodename = 'demo_node_{}'.format(node)) for i,node in enumerate(dag.nodes()): print 'pre for: {} are: {}'.format(node,dag.predecessors(node)) for parent in dag.predecessors(node): adageobj.dag.addEdge(numbered[parent],numbered[node]) rules = [] rules += [ Rule(nodes_present.s([1]), schedule_after_these.s([1],note = 'depends on one')), Rule(nodes_present.s([4,1]), schedule_after_these.s([4,1],note = 'depends on two')) ] adageobj.rules = rules adage.rundag(adageobj, default_trackers = True, workdir = 'workdirtrack', trackevery = 4)
def main(): dag = random_dag(5,3) adageobj = adage.adageobject() numbered = {} for node in dag.nodes(): numbered[node] = adageobj.dag.addTask(hello.s(workdir = 'workdir_{}'.format(node)), nodename = 'demo_node_{}'.format(node)) for i,node in enumerate(dag.nodes()): print 'pre for: {} are: {}'.format(node,dag.predecessors(node)) for parent in dag.predecessors(node): adageobj.dag.addEdge(numbered[parent],numbered[node]) rules = [] rules += [ Rule(nodes_present.s([1]), schedule_after_these.s([1],note = 'depends on one', tag = 'dyn1')), Rule(nodes_present.s([4,'dynamic_dyn1']), schedule_after_these.s([4,'dynamic_dyn1'],note = 'depends on two', tag = 'dyn2')) ] from adage.backends import IPythonParallelBackend from ipyparallel import Client backend = IPythonParallelBackend(Client(), resolve_like_partial = True) adageobj.rules = rules adage.rundag(adageobj, backend = backend, default_trackers = True, workdir = 'workdirtrack', trackevery = 4)
logging.basicConfig() @adage.adagetask def hello(one, two): print 'calling a task with ', one, two time.sleep(1) return 'a return value' workflow = adage.adageobject() initial = workflow.dag.addTask(hello.s(one='hello', two='there')) another = workflow.dag.addTask(hello.s(one='one', two='two')) @adage.decorators.callbackrule(after={ 'init': initial.identifier, 'another': another.identifier }) def schedule(depnodes, adageobj): results = {k: v.result for k, v in depnodes.items()} parts = results['init'].split() for i, p in enumerate(parts): adageobj.dag.addTask(hello.s(one='part {}'.format(i), two=p), nodename=p, depends_on=depnodes.values()) workflow.rules = [schedule] adage.rundag(workflow, default_trackers=True, workdir='callback')
def test_trivial(): adageobj = adage.adageobject() adage.rundag(adageobj, default_trackers = False)
import adage import logging import time logging.basicConfig() @adage.adagetask def hello(one, two): print 'calling a task with ', one, two time.sleep(1) return 'a return value' workflow = adage.adageobject() initial = workflow.dag.addTask(hello.s(one = 'hello', two = 'there')) another = workflow.dag.addTask(hello.s(one = 'one', two = 'two')) @adage.decorators.callbackrule(after = {'init': initial.identifier, 'another': another.identifier}) def schedule(depnodes, adageobj): results = {k:v.result for k,v in depnodes.items()} parts = results['init'].split() for i,p in enumerate(parts): adageobj.dag.addTask(hello.s(one = 'part {}'.format(i), two = p), nodename = p, depends_on = depnodes.values()) workflow.rules = [schedule] adage.rundag(workflow, default_trackers = True, workdir = 'callback')
@x.do def _(adageobj): dep = adageobj.dag.addTask(task.s(one = 'what'), nodename = 'first') newrule = rl() adageobj.rules.append(newrule) @newrule.when def _(adageobj): upstream = adageobj.dag.getNodeByName('first') return upstream.state == adage.nodestate.SUCCESS @newrule.do def _(adageobj): deptwo = adageobj.dag.addTask(task.s(one = 'ok...'), depends_on = [dep], nodename = 'what') newrule = rl() adageobj.rules.append(newrule) @newrule.when def _(adageobj): return True @newrule.do def _(adageobj): adageobj.dag.addTask(task.s(one = 'nested new'), nodename = 'the') adageobj = adage.adageobject() adageobj.rules.append(x) logging.basicConfig(level = logging.INFO) adage.rundag(adageobj, default_trackers = True)
def test_trivial(): adageobj = adage.adageobject() adage.rundag(adageobj, default_trackers=False)
def main(): adageobj = build_initial_dag() t = talkative_decider() t.next() adage.rundag(adageobj, default_trackers = True, trackevery = 5, maxsteps = 1, update_interval = 5)