Esempio n. 1
0
 def export_to_gui(self, soma_workflow_dirpath, **Xy):
     '''
     Example
     -------
     see the directory of "examples/run_somaworkflow_gui.py" in epac
     '''
     try:
         from soma.workflow.client import Job, Workflow
         from soma.workflow.client import Helper, FileTransfer
     except ImportError:
         errmsg = "No soma-workflow is found. "\
             "Please verify your soma-worklow"\
             "on your computer (e.g. PYTHONPATH) \n"
         sys.stderr.write(errmsg)
         sys.stdout.write(errmsg)
         raise NoSomaWFError
     if not os.path.exists(soma_workflow_dirpath):
         os.makedirs(soma_workflow_dirpath)
     tmp_work_dir_path = soma_workflow_dirpath
     cur_work_dir = os.getcwd()
     os.chdir(tmp_work_dir_path)
     ft_working_directory = FileTransfer(is_input=True,
                                     client_path=tmp_work_dir_path,
                                     name="working directory")
     ## Save the database and tree to working directory
     ## ===============================================
     np.savez(os.path.join(tmp_work_dir_path,
              SomaWorkflowEngine.dataset_relative_path), **Xy)
     store = StoreFs(dirpath=os.path.join(
         tmp_work_dir_path,
         SomaWorkflowEngine.tree_root_relative_path))
     self.tree_root.save_tree(store=store)
     ## Subtree job allocation on disk
     ## ==============================
     node_input = NodesInput(self.tree_root.get_key())
     split_node_input = SplitNodesInput(self.tree_root,
                                        num_processes=self.num_processes)
     nodesinput_list = split_node_input.split(node_input)
     keysfile_list = self._save_job_list(tmp_work_dir_path,
                                         nodesinput_list)
     ## Build soma-workflow
     ## ===================
     jobs = [Job(command=[u"epac_mapper",
                      u'--datasets', '"%s"' %
                      (SomaWorkflowEngine.dataset_relative_path),
                      u'--keysfile', '"%s"' %
                      (nodesfile)],
             referenced_input_files=[ft_working_directory],
             referenced_output_files=[ft_working_directory],
             name="epac_job_key=%s" % (nodesfile),
             working_directory=ft_working_directory)
             for nodesfile in keysfile_list]
     soma_workflow = Workflow(jobs=jobs)
     if soma_workflow_dirpath and soma_workflow_dirpath != "":
         out_soma_workflow_file = os.path.join(soma_workflow_dirpath,
                      SomaWorkflowEngine.open_me_by_soma_workflow_gui)
         Helper.serialize(out_soma_workflow_file, soma_workflow)
     os.chdir(cur_work_dir)
Esempio n. 2
0
    def _submit_graph(self, pyfiles, dependencies, nodes):
        jobs = []
        soma_deps = []
        for idx, fname in enumerate(pyfiles):
            name = os.path.splitext(os.path.split(fname)[1])[0]
            jobs.append(Job(command=[sys.executable, fname], name=name))
        for key, values in list(dependencies.items()):
            for val in values:
                soma_deps.append((jobs[val], jobs[key]))

        wf = Workflow(jobs, soma_deps)
        logger.info('serializing workflow')
        Helper.serialize('workflow', wf)
        controller = WorkflowController()
        logger.info('submitting workflow')
        wf_id = controller.submit_workflow(wf)
        Helper.wait_workflow(wf_id, controller)
Esempio n. 3
0
    def _submit_graph(self, pyfiles, dependencies, nodes):
        jobs = []
        soma_deps = []
        for idx, fname in enumerate(pyfiles):
            name = os.path.splitext(os.path.split(fname)[1])[0]
            jobs.append(Job(command=[sys.executable, fname], name=name))
        for key, values in list(dependencies.items()):
            for val in values:
                soma_deps.append((jobs[val], jobs[key]))

        wf = Workflow(jobs, soma_deps)
        logger.info('serializing workflow')
        Helper.serialize('workflow', wf)
        controller = WorkflowController()
        logger.info('submitting workflow')
        wf_id = controller.submit_workflow(wf)
        Helper.wait_workflow(wf_id, controller)
Esempio n. 4
0
 def export_to_gui(self, soma_workflow_dirpath, **Xy):
     '''
     Example
     -------
     see the directory of "examples/run_somaworkflow_gui.py" in epac
     '''
     try:
         from soma.workflow.client import Job, Workflow
         from soma.workflow.client import Helper, FileTransfer
     except ImportError:
         errmsg = "No soma-workflow is found. "\
             "Please verify your soma-worklow"\
             "on your computer (e.g. PYTHONPATH) \n"
         sys.stderr.write(errmsg)
         sys.stdout.write(errmsg)
         raise NoSomaWFError
     if not os.path.exists(soma_workflow_dirpath):
         os.makedirs(soma_workflow_dirpath)
     tmp_work_dir_path = soma_workflow_dirpath
     cur_work_dir = os.getcwd()
     os.chdir(tmp_work_dir_path)
     ft_working_directory = FileTransfer(is_input=True,
                                         client_path=tmp_work_dir_path,
                                         name="working directory")
     ## Save the database and tree to working directory
     ## ===============================================
     np.savez(
         os.path.join(tmp_work_dir_path,
                      SomaWorkflowEngine.dataset_relative_path), **Xy)
     store = StoreFs(dirpath=os.path.join(
         tmp_work_dir_path, SomaWorkflowEngine.tree_root_relative_path))
     self.tree_root.save_tree(store=store)
     ## Subtree job allocation on disk
     ## ==============================
     node_input = NodesInput(self.tree_root.get_key())
     split_node_input = SplitNodesInput(self.tree_root,
                                        num_processes=self.num_processes)
     nodesinput_list = split_node_input.split(node_input)
     keysfile_list = self._save_job_list(tmp_work_dir_path, nodesinput_list)
     ## Build soma-workflow
     ## ===================
     jobs = [
         Job(command=[
             u"epac_mapper", u'--datasets',
             '"%s"' % (SomaWorkflowEngine.dataset_relative_path),
             u'--keysfile',
             '"%s"' % (nodesfile)
         ],
             referenced_input_files=[ft_working_directory],
             referenced_output_files=[ft_working_directory],
             name="epac_job_key=%s" % (nodesfile),
             working_directory=ft_working_directory)
         for nodesfile in keysfile_list
     ]
     soma_workflow = Workflow(jobs=jobs)
     if soma_workflow_dirpath and soma_workflow_dirpath != "":
         out_soma_workflow_file = os.path.join(
             soma_workflow_dirpath,
             SomaWorkflowEngine.open_me_by_soma_workflow_gui)
         Helper.serialize(out_soma_workflow_file, soma_workflow)
     os.chdir(cur_work_dir)
Esempio n. 5
0
  workflows.append(("multiple", simple_wf_examples.multiple_simple_example()))
  workflows.append(("special_command", simple_wf_examples.special_command()))
  workflows.append(("mutiple_transfer", tr_wf_examples.multiple_simple_example()))
  workflows.append(("special_command_transfer", tr_wf_examples.special_command()))
  workflows.append(("special_transfer", tr_wf_examples.special_transfer()))
  workflows.append(("mutiple_crp", srp_wf_examples.multiple_simple_example()))
  workflows.append(("special_command_crp", srp_wf_examples.special_command()))
    
  ret_value = 0

  for workflow_name, workflow in workflows:
    print "--------------------------------------------------------------"
    print workflow_name

    file_path = os.path.join(directory, "json_" + workflow_name + ".wf")
    Helper.serialize(file_path, workflow)      

    new_workflow = Helper.unserialize(file_path)

    if not new_workflow.attributs_equal(workflow):
      print "FAILED !!"
      ret_value = 1
    else:
      print "OK"
    
    try:
      os.remove(file_path)
    except IOError:
      pass  

  if ret_value == 0: