def query(param): wf = workflow.Workflow() try: ip = int2ip(int(param), False) wf.add_item(title=ip, subtitle='int2ip', arg=ip, valid=True) except: pass try: ip = int2ip(int(param)) wf.add_item(title=ip, subtitle='int2ip', arg=ip, valid=True) except: pass try: int_ip = ip2int(param, False) wf.add_item(title=int_ip, subtitle='ip2int', arg=int_ip, valid=True) except: pass try: int_ip = ip2int(param) wf.add_item(title=int_ip, subtitle='ip2int', arg=int_ip, valid=True) except: pass wf.send_feedback()
def test1(self): w = workflow.Workflow() store = dict(files=['file1', 'file2']) w.start(store=store) w >> Task1() >> Task2() if store['status']: w >> Task3a() else: w >> Task3b() for file in store['files']: w >> Task4(file=file) resultcode = w.end() assert store['result'] == [ 'file1', 'file2', ] assert resultcode == 'success' assert list(map(lambda task: task.__class__.__name__, w.task_history)) == [ 'Task1', 'Task2', 'Task3a', 'Task4', 'Task4', ]
def test_empty_machine_index(self): wf = workflow.Workflow() old_get = vagrantup.get_machine_data vagrantup.get_machine_data = lambda: generate_index(0)['machines'] vagrantup.do_list([], wf) vagrantup.get_machine_data = old_get self.assertEqual(len(wf._items), 1) self.assertEqual(wf._items[0].valid, False)
def wf(): """Return `Workflow` object for this module. Returns: workflow.Workflow: `Workflow` object for current workflow. """ global _wf if _wf is None: _wf = workflow.Workflow() return _wf
def test_workflow_step(): # Tests the creation and the generic getter function W = w.Workflow() start_step = W.start_step(a, "start_a") step = W.add_step(start_step, b, "step_b") assert step.name == 'step_b' assert step.function == b assert step.step_type == None assert W.start_steps() == [start_step] assert W.steps() == [start_step, step]
def test_workflow_start_step(): # Tests the creation and the generic getter function W = w.Workflow() step = W.start_step(a, "start_a") step.name = 'step2' assert step.name == 'step2' assert step.function == a assert step.step_type == 'start' assert W.start_steps() == [step] assert W.steps() == [step]
def test_workflow_step_merge(): # Tests the creation and the generic getter function W = w.Workflow() w_st = W.start_step(a, "start_a").next(b, 'step_b').merge(c) step_a = w.Step(W, a, "start_a", step_type='start') step_b = w.Step(W, function=b, name="step_b") step_c = w.Step(W, function=c, name="workflow_test.c", step_type='sync') assert step_c.function == c assert W.start_steps() == [step_a] assert W.steps() == [step_a, step_b, step_c]
def test_list_machines(self): wf = workflow.Workflow() machines = generate_index()['machines'] vagrantup.list_machines(machines, wf) for item in wf._items: mid, vagrantfile_path = item.arg.split(' ') meta = machines[mid] self.assertTrue(mid in machines.keys()) self.assertTrue(item.uid in machines.keys()) self.assertEqual(item.title, meta['name']) self.assertEqual(item.subtitle, meta['vagrantfile_path']) self.assertEqual(vagrantfile_path, meta['vagrantfile_path']) self.assertEqual(item.valid, True) self.assertFalse(item.icon, None)
def test_workflow_step(): # Tests the creation and the generic getter function W = w.Workflow() w_st = W.start_step(a, "start_a").next(b, 'step_b') start_step = w.Step(W, a, "start_a", step_type='start') step = w.Step(W, function=b, name="step_b") assert step.name == 'step_b' assert step.function == b assert step.step_type == None assert W.start_steps() == [start_step] assert W.steps() == [start_step, step]
def query(param): wf = workflow.Workflow() try: wf.add_item(title=timestamp_datetime(float(param)), subtitle='from unix timestamp', arg=timestamp_datetime(float(param)), valid=True) except: pass try: wf.add_item(title=datetime_timestamp(param), subtitle='to unix timestamp', arg=datetime_timestamp(param), valid=True) except: pass wf.send_feedback()
def getWorkflows(self): print(" --- Trying to get the workflows for " + self.policieName + " ---") link = self.baseurl + "/workflows/" req = requests.request('GET', link, headers=self.headers, verify=False, timeout=50) workflowsJson = json.loads(req.text)['workflows'] for workflowJson in workflowsJson: newWorkflow = workflow.Workflow(link, self.headers, workflowJson['name']) newWorkflow.getWorkflowStatus() self.workflows.append(newWorkflow) print(" --- Getting workflows end --- \n")
def to_uint(param): if len(param) == 1 and param == '-': param = 0 param = int(param) uint_num = str(ctypes.c_uint32(param).value) wf = workflow.Workflow() try: wf.add_item(title=uint_num, subtitle='to uint', arg=uint_num, valid=True) except: pass wf.send_feedback()
def search_server(param): sys.stderr.write('seach param: %s\n' % param) server_list = load_servers() idc = '' ip = '' find_items = [] params = param.split() try: int(params[0][0]) ip = params[0] if len(params) > 1: idc = params[1] except: idc = params[0] if len(params) > 1: ip = params[1] for server in server_list: if ip in server['detail_ip'] and idc in server['idc']: find_items.append(server) wf = workflow.Workflow() for server in find_items: title = server['idc'] + ' ' + server['pub_ip'] for k, v in server.items(): try: if int(v) == 1: title += ' ' + k except: continue try: wf.add_item(title=server['pub_ip'], subtitle=title, arg=server['pub_ip'], valid=True) except: continue wf.send_feedback()
def test_workflow_step_thread_merge(): # Tests the creation and the generic getter function W = w.Workflow() w_st = W.start_step(a, "start_a").next(b, 'step_b').thread_merge(c) step_a = w.Step(W, a, "start_a", step_type='start') step_b = w.Step(W, function=b, name="step_b") step_c = w.Step(W, function=c, name="workflow_test.c", step_type='thread_sync') assert step_c.function == c assert W.start_steps() == [step_a] assert W.steps() == [step_a, step_b, step_c] assert W.step_by_name('step_b') == step_b assert W.steps_by_name(['step_b']) == [step_b] assert W.steps_by_name(['start_a', 'step_b']) == [step_a, step_b] assert W.next_steps(step_b.name) == [step_c] assert W.next_steps(step_b.name) == [step_c] assert W.get_step_dependencies(step_a) == None assert W.get_step_dependencies(step_c) == sorted([step_a, step_b])
from garcon import activity from garcon import decider from threading import Thread import time import boto3 import workflow # Initiate the workflow on the dev domain and custom_decider name. client = boto3.client('swf', region_name='us-east-1') workflow = workflow.Workflow(client, 'dev', 'custom_decider') deciderworker = decider.DeciderWorker(workflow) client.start_workflow_execution( domain=workflow.domain, workflowId='unique-workflow-identifier', workflowType=dict( name=workflow.name, version='1.0'), executionStartToCloseTimeout='3600', taskStartToCloseTimeout='3600', childPolicy='TERMINATE', taskList=dict(name=workflow.name)) Thread(target=activity.ActivityWorker(workflow).run).start() while(True): deciderworker.run() time.sleep(1)
def wf(): global _wf if _wf is None: _wf = workflow.Workflow() return _wf
def eval_point(self, point): #logger.info('Point: {}'.format(point)) wf = workflow.Workflow(graph=self.substitute_params(point)) data = wf.run() return data[self.optimization_field]
query = wf.args[0] users = wxc.search_user(query, True) if users == False: raise Exception('Open wanxin userdata.db failed') #wf.add_item("ERROR: Open wanxin userdata.db failed") elif not users: wf.add_item(u'Not found "%s"' % query) else: # 0 1 2 3 4 5 6 7 8 9 #'中文名,英文名,代码,性别,职位,电话,手机,邮件,地址,部门' [ wf.add_item("%s (%s) %s %s" % (u[0], u[2], u[4], u[9]), u"📱%s ☎️%s 📧%s" % (pretty_mobile(u[6]), u[5], u[7]), copytext='%s %s %s %s' % (u[2], u[6], u[5], u[7]), arg=u'%s %s\n手机: %s\n电话: %s\n邮箱: %s\n地址: %s' % (u[0], u[2], u[6], u[5], u[7], u[8]), uid=u[2], valid=True, icon="female.png" if u[3] == '0' else 'male.png') for u in users[:100] ] wf.send_feedback() if __name__ == '__main__': wf = workflow.Workflow() sys.exit(wf.run(main))
#!/usr/bin/python # encoding: utf-8 import os import string import sys import workflow def main(wf): log.debug('Started') if wf.update_available: log.debug("update available, attempting update") wf.start_update() if __name__ == u"__main__": wf = workflow.Workflow(update_settings={ 'github_slug': 'plongitudes/SGLinkTransform', 'frequency': 1 }) log = wf.logger sys.exit(wf.run(main))
def run_graph( self, workflow_file, full_result, comment, main, graph_args, disable_inmemory_cache, disable_file_cache, frozens_id, create_frozens, use_frozens, use_frozen_only_if_exists, cleanup, perfomance_logging, ): self.workflow_file = workflow_file self.comment = comment start_time = time.time() with open(workflow_file) as f: self.graph = f.read() self.db_register() message_delay = 60 * float( config.get('cuber', 'message_delay', fallback=3)) job_descritpion = '{}; {}'.format(workflow_file, self.comment) try: cube.Cube.checkpoints_dir = self.checkpoints_dir logging.info('Checkpoints dir: {}'.format( cube.Cube.checkpoints_dir)) wf = workflow.Workflow( workflow_file, main=main, graph_args=graph_args, frozens_dir=self.frozens_dir, frozens_id=frozens_id, create_frozens=create_frozens, use_frozens=use_frozens, use_frozen_only_if_exists=use_frozen_only_if_exists, ) self.db_update_status('running') data = wf.run( disable_inmemory_cache=disable_inmemory_cache, disable_file_cache=disable_file_cache, cleanup=cleanup, perfomance_logging=perfomance_logging, ) res = utils.dict_to_string(data, full=full_result) if time.time() - start_time >= message_delay: logging.critical( 'Calculation is done: {} (graph id: {})\n{}'.format( job_descritpion, self.db_id, res)) else: logging.info( 'Calculation is done: {} (graph id: {})\n{}'.format( job_descritpion, self.db_id, res)) self.db_save_result(res) self.db_update_status('done') except KeyboardInterrupt: if time.time() - start_time >= message_delay: logging.critical( 'Calculation is cancelled: {} (graph id: {})'.format( job_descritpion, self.db_id)) else: logging.error( 'Calculation is cancelled: {} (graph id: {})'.format( job_descritpion, self.db_id)) self.db_save_result('candelled') self.db_update_status('cancelled') except: import traceback traceback.print_exc() if time.time() - start_time >= message_delay: logging.critical( 'Calculation is failed: {} (graph id: {})'.format( job_descritpion, self.db_id)) else: logging.error( 'Calculation is failed: {} (graph id: {})'.format( job_descritpion, self.db_id)) self.db_update_status('failed')
import sys import os sys.path.insert(0, '/home/denest/PERFetc2/') import matplotlib.pyplot as plt import numpy as np import workflow from perfusion import express ROOT_FOLDER_LIN = './' wf = workflow.Workflow(ROOT_FOLDER_LIN) wf.dir_manager.add_path('FILTERED', 'filtered', add_to='nii') wf.setup_env(mricron='/home/denest/mricron/dcm2nii') #wf.make_time_file() #wf.convert_dcm_to_nii(make_time=True) #wf.separate_nii() #wf.filter_vols(intensity_sigma=40, gaussian_sigma=1.5) wf.update_label() #wf.registration_start(11) #wf.make_4dvol() #wf.add_roi('aorta') #wf.add_roi('porta') #wf.add_roi('4d_mask') #wf.dir_manager.add_path('tumor_roi.nii.gz', 'tumor', add_to='roi', create=False) #wf.dir_manager.add_path('pancreas_roi.nii.gz', 'pancreas', add_to='roi', create=False) #for r in ['tumor','pancreas']: # wf.add_roi(r) """ wf.add_roi('aorta') wf.dir_manager.add_path('tumor2.nii.gz', 'tumor2', add_to='roi', create=False) wf.dir_manager.add_path('tumor1.nii.gz', 'tumor1', add_to='roi', create=False)
from garcon import activity from garcon import decider from threading import Thread import boto.swf.layer2 as swf import time import workflow # Initiate the workflow on the dev domain and custom_decider name. flow = workflow.Workflow('dev', 'custom_decider') deciderworker = decider.DeciderWorker(flow) # swf.WorkflowType( # name=flow.name, domain=flow.domain, # version='1.0', task_list=flow.name).start() Thread(target=activity.ActivityWorker(flow).run).start() while (True): deciderworker.run() time.sleep(1)
def wf(): """Lazy `Workflow` object.""" global _wf if _wf is None: _wf = workflow.Workflow() return _wf
rulers = [] if 1: #parse frag/libs out of pipeline.ini out of rundir availableRulers = ruler.getSupportedRulers( "%s/rulers" % (utils.INITIAL_UTILS), True) availableRulers.extend(ruler.getSupportedRulers(os.getcwd(), True)) availableRulers_dict = dict() for rl in availableRulers: try: availableRulers_dict[rl.step].append(rl) except KeyError: availableRulers_dict[rl.step] = [rl] # finally reload any commands we had pip = workflow.Workflow("pipeline", settings.rundir + os.sep) pip.read() if len(pip.commandList.strip()) > 0: try: wfopts, wfargs = getopt.getopt(pip.commandList.strip().split(), shortOptions, longOptions) wfopts.extend(opts) wfargs.extend(args) opts = wfopts args = wfargs except getopt.GetoptError, err: # print help information and exit: print str(err) # will print something like "option -a not recognized" usage() sys.exit(2)