def test_workflow_info(self): """ This test checks that the workflow info is generate without any exceptions :return: """ # Assuming there is only one user dbuser = User.search_for_users(email=self.user_email)[0] # Creating a simple workflow & storing it a = WFTestEmpty() a.store() # Emulate the workflow list for w in get_workflow_list(all_states=True, user=dbuser): if not w.is_subworkflow(): get_workflow_info(w) # Create a workflow with sub-workflows and store it b = WFTestSimpleWithSubWF() b.store() # Emulate the workflow list for w in get_workflow_list(all_states=True, user=dbuser): if not w.is_subworkflow(): get_workflow_info(w) # Start the first workflow and perform a workflow list b.start() for w in get_workflow_list(all_states=True, user=dbuser): if not w.is_subworkflow(): get_workflow_info(w)
def workflow_list(short, all_states, depth, past_days, workflows): """List legacy workflows""" from aiida.backends.utils import get_workflow_list from aiida.orm.workflow import get_workflow_info from aiida.orm.backend import construct_backend # pylint: disable=no-name-in-module tab_size = 2 backend = construct_backend() current_user = backend.users.get_automatic_user() wf_list = get_workflow_list([workflow.pk for workflow in workflows], user=current_user, all_states=all_states, n_days_ago=past_days) for workflow in wf_list: if not workflow.is_subworkflow() or workflow in workflows: echo.echo('\n'.join( get_workflow_info(workflow, tab_size=tab_size, short=short, depth=depth))) if not workflows: if all_states: echo.echo_info('# No workflows found') else: echo.echo_info('# No running workflows found')
def test_listing_workflows(self): """ Test ensuring that the workflow listing works as expected. (Listing initialized & running workflows and not listing finished workflows or workflows with errors). """ # Assuming there is only one user dbuser = User.search_for_users(email=self.user_email)[0] # Creating a workflow & storing it a = WorkflowTestEmpty() a.store() # Setting manually the state to RUNNING. a.set_state(wf_states.RUNNING) # Getting all the available workflows of the current user # and checking if we got the right one. wfqs = get_workflow_list(all_states=True, user=dbuser) self.assertTrue(len(wfqs) == 1, "We expect one workflow") a_prime = wfqs[0].get_aiida_class() self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected " "one") # We ask all the running workflows. We should get one workflow. wfqs = get_workflow_list(all_states=True, user=dbuser) self.assertTrue(len(wfqs) == 1, "We expect one workflow") a_prime = wfqs[0].get_aiida_class() self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected " "one") # We change the state of the workflow to FINISHED. a.set_state(wf_states.FINISHED) # Getting all the available workflows of the current user # and checking if we got the right one. wfqs = get_workflow_list(all_states=True, user=dbuser) self.assertTrue(len(wfqs) == 1, "We expect one workflow") a_prime = wfqs[0].get_aiida_class() self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected " "one") # We ask all the running workflows. We should get zero results. wfqs = get_workflow_list(all_states=False, user=dbuser) self.assertTrue(len(wfqs) == 0, "We expect zero workflows") # We change the state of the workflow to INITIALIZED. a.set_state(wf_states.INITIALIZED) # We ask all the running workflows. We should get one workflow. wfqs = get_workflow_list(all_states=True, user=dbuser) self.assertTrue(len(wfqs) == 1, "We expect one workflow") a_prime = wfqs[0].get_aiida_class() self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected " "one") # We change the state of the workflow to ERROR. a.set_state(wf_states.ERROR) # We ask all the running workflows. We should get zero results. wfqs = get_workflow_list(all_states=False, user=dbuser) self.assertTrue(len(wfqs) == 0, "We expect zero workflows")
def workflow_list(self, *args): """ Return a list of workflows on screen """ from aiida.backends.utils import load_dbenv, is_dbenv_loaded if not is_dbenv_loaded(): load_dbenv() from aiida.backends.utils import get_workflow_list, get_automatic_user from aiida.orm.workflow import get_workflow_info from aiida.orm import User import argparse parser = argparse.ArgumentParser(prog=self.get_full_command_name(), description='List AiiDA workflows.') parser.add_argument('-s', '--short', action='store_true', help="show shorter output " "(only subworkflows and steps, no calculations)") parser.add_argument( '-a', '--all-states', action='store_true', help="show all existing " "AiiDA workflows, not only running ones", ) parser.add_argument( '-d', '--depth', metavar='M', action='store', type=int, default=16, help="add a filter " "to show only steps down to a depth of M levels in " "subworkflows (0 means only the parent " "workflows are shown)") parser.add_argument( '-p', '--past-days', metavar='N', action='store', type=int, help="add a " "filter to show only workflows created in the past N days") parser.add_argument( 'pks', type=int, nargs='*', help="a list of workflows to show. If empty, " "all running workflows are shown. If non-empty, " "automatically sets --all and ignores the -p option.") tab_size = 2 # how many spaces to use for indentation of subworkflows args = list(args) parsed_args = parser.parse_args(args) workflows = get_workflow_list(parsed_args.pks, user=User(dbuser=get_automatic_user()), all_states=parsed_args.all_states, n_days_ago=parsed_args.past_days) for w in workflows: if not w.is_subworkflow() or w.pk in parsed_args.pks: print "\n".join( get_workflow_info(w, tab_size=tab_size, short=parsed_args.short, depth=parsed_args.depth)) if not workflows: if parsed_args.all_states: print "# No workflows found" else: print "# No running workflows found"
def test_workflow_fast_kill(self): from aiida.cmdline.commands.workflow import Workflow as WfCmd from aiida.orm.backend import construct_backend backend = construct_backend() params = dict() params['nmachine'] = 2 # Create a workflow with 2 subworkflows and start it head_wf = WFTestSimpleWithSubWF() head_wf.start() # Get the user user = backend.users.find(email=self.user_email)[0] wfl = get_workflow_list(user=user) running_no = 0 for w in get_workflow_list(user=user, all_states=True): if w.get_aiida_class().get_state() == wf_states.RUNNING: running_no += 1 self.assertEquals(running_no, 3) # Killing the head workflow wf_cmd = WfCmd() wf_cmd.workflow_kill(*[str(head_wf.pk), '-f', '-q']) # At this point no running workflow should be found running_no = 0 for w in get_workflow_list(user=user, all_states=True): if w.get_aiida_class().get_state() == wf_states.RUNNING: running_no += 1 self.assertEquals(running_no, 0, "No running workflows should be found") self.assertNotEquals(get_all_running_steps(), 0, "At this point there will be running steps.") # Making the daemon to advance. This will automatically set # to FINISHED all the running steps that are (directly) under # a finished workflow legacy_workflow_stepper() self.assertEquals(len(list(get_all_running_steps())), 0, "At this point there should be no running steps.") running_no = 0 for w in get_workflow_list(user=user, all_states=True): if w.get_aiida_class().get_state() == wf_states.RUNNING: running_no += 1 self.assertEquals( running_no, 0, "At this point there should be " "no running workflows.") # Make the daemon to advance a bit more and make sure that no # workflows resurrect. for _ in range(5): legacy_workflow_stepper() self.assertEquals(len(list(get_all_running_steps())), 0, "At this point there should be no running steps.") running_no = 0 for w in get_workflow_list(user=user, all_states=True): if w.get_aiida_class().get_state() == wf_states.RUNNING: running_no += 1 self.assertEquals( running_no, 0, "At this point there should be " "no running workflows.")