Esempio n. 1
0
def test_wfp_check_processor():

    p = Pipeline()
    s = Stage()
    t = Task()

    t.executable = '/bin/date'
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      rmq_conn_params=amgr._rmq_conn_params,
                      resubmit_failed=False)

    wfp.start_processor()
    assert wfp.check_processor()

    wfp.terminate_processor()
    assert not wfp.check_processor()
Esempio n. 2
0
def test_amgr_resource_terminate():

    res_dict = {

        'resource': 'xsede.supermic',
        'walltime': 30,
        'cpus': 20,
        'project': 'TG-MCB090174'

    }

    from radical.entk.execman.rp import TaskManager

    amgr = Amgr(rts='radical.pilot', hostname=hostname, port=port)
    amgr.resource_desc = res_dict
    amgr._setup_mqs()
    amgr._rmq_cleanup = True
    amgr._task_manager = TaskManager(sid='test',
                                     pending_queue=list(),
                                     completed_queue=list(),
                                     mq_hostname=amgr._mq_hostname,
                                     rmgr=amgr._resource_manager,
                                     port=amgr._port
                                     )

    amgr.resource_terminate()
Esempio n. 3
0
def test_amgr_synchronizer():

    amgr = Amgr(hostname=host, port=port, username=username, password=password)
    amgr._setup_mqs()

    p = Pipeline()
    s = Stage()

    # Create and add 10 tasks to the stage
    for cnt in range(10):

        t = Task()
        t.executable = 'some-executable-%s' % cnt

        s.add_tasks(t)

    p.add_stages(s)
    p._validate()

    amgr.workflow = [p]

    sid  = 'test.0016'
    rmgr = BaseRmgr({}, sid, None, {})
    tmgr = BaseTmgr(sid=sid,
                    pending_queue=['pending-1'],
                    completed_queue=['completed-1'],
                    rmgr=rmgr,
                    rmq_conn_params=amgr._rmq_conn_params,
                    rts=None)

    amgr._rmgr         = rmgr
    rmgr._task_manager = tmgr

    for t in p.stages[0].tasks:
        assert t.state == states.INITIAL

    assert p.stages[0].state == states.INITIAL
    assert p.state           == states.INITIAL

    # Start the synchronizer method in a thread
    amgr._terminate_sync = mt.Event()
    sync_thread = mt.Thread(target=amgr._synchronizer,
                            name='synchronizer-thread')
    sync_thread.start()

    # Start the synchronizer method in a thread
    proc = mp.Process(target=func_for_synchronizer_test, name='temp-proc',
                      args=(amgr._sid, p, tmgr))

    proc.start()
    proc.join()


    # Wait for AppManager to finish the message exchange
    # no need to set *)terminate_sync* but a timeout instead
    # amgr._terminate_sync.set()
    sync_thread.join(15)

    for t in p.stages[0].tasks:
        assert t.state == states.COMPLETED
Esempio n. 4
0
def test_amgr_setup_mqs():

    amgr = Amgr(hostname=hostname, port=port)
    amgr._setup_mqs()

    assert len(amgr._pending_queue)   == 1
    assert len(amgr._completed_queue) == 1

    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(
                                       host=amgr._hostname, port=amgr._port))
    mq_channel    = mq_connection.channel()

    qs = ['%s-tmgr-to-sync' % amgr._sid,
          '%s-cb-to-sync'   % amgr._sid,
          '%s-sync-to-tmgr' % amgr._sid,
          '%s-sync-to-cb'   % amgr._sid,
          '%s-pendingq-1'   % amgr._sid,
          '%s-completedq-1' % amgr._sid]

    for q in qs:
        mq_channel.queue_delete(queue=q)

    with open('.%s.txt' % amgr._sid, 'r') as fp:
        lines = fp.readlines()

    for ind, val in enumerate(lines):
        lines[ind] = val.strip()

    assert set(qs) == set(lines)
Esempio n. 5
0
def test_amgr_read_config():

    amgr = Amgr(hostname=host, port=port)

    assert amgr._reattempts == 3

    assert amgr._rmq_cleanup
    assert amgr._autoterminate

    assert not amgr._write_workflow
    assert not amgr._resubmit_failed

    assert amgr._rts              == 'radical.pilot'
    assert amgr._num_pending_qs   == 1
    assert amgr._num_completed_qs == 1
    assert amgr._rts_config       == {"sandbox_cleanup": False,
                                      "db_cleanup"     : False}

    d = {"hostname"       : "radical.two",
         "port"           : 25672,
         "username"       : user,
         "password"       : passwd,
         "reattempts"     : 5,
         "resubmit_failed": True,
         "autoterminate"  : False,
         "write_workflow" : True,
         "rts"            : "mock",
         "rts_config"     : {"sandbox_cleanup": True,
                             "db_cleanup"     : True},
         "pending_qs"     : 2,
         "completed_qs"   : 3,
         "rmq_cleanup"    : False}

    ru.write_json(d, './config.json')
    amgr._read_config(config_path='./',
                      hostname=None,
                      port=None,
                      username=None,
                      password=None,
                      reattempts=None,
                      resubmit_failed=None,
                      autoterminate=None,
                      write_workflow=None,
                      rts=None,
                      rmq_cleanup=None,
                      rts_config=None)

    assert amgr._hostname         == d['hostname']
    assert amgr._port             == d['port']
    assert amgr._reattempts       == d['reattempts']
    assert amgr._resubmit_failed  == d['resubmit_failed']
    assert amgr._autoterminate    == d['autoterminate']
    assert amgr._write_workflow   == d['write_workflow']
    assert amgr._rts              == d['rts']
    assert amgr._rts_config       == d['rts_config']
    assert amgr._num_pending_qs   == d['pending_qs']
    assert amgr._num_completed_qs == d['completed_qs']
    assert amgr._rmq_cleanup      == d['rmq_cleanup']

    os.remove('./config.json')
Esempio n. 6
0
def test_sid_in_mqs():

    appman = Amgr(hostname=hostname, port=port)
    appman._setup_mqs()
    sid = appman._sid

    qs = [
        '%s-tmgr-to-sync' % sid,
        '%s-cb-to-sync' % sid,
        '%s-enq-to-sync' % sid,
        '%s-deq-to-sync' % sid,
        '%s-sync-to-tmgr' % sid,
        '%s-sync-to-cb' % sid,
        '%s-sync-to-enq' % sid,
        '%s-sync-to-deq' % sid
    ]

    mq_connection = pika.BlockingConnection(
        pika.ConnectionParameters(host=hostname, port=port))
    mq_channel = mq_connection.channel()

    def callback():
        print True

    for q in qs:

        try:
            mq_channel.basic_consume(callback, queue=q, no_ack=True)
        except Exception as ex:
            raise EnTKError(ex)
Esempio n. 7
0
def test_amgr_cleanup_mqs():

    amgr = Amgr(hostname=hostname, port=port)
    sid = amgr._sid

    amgr._setup_mqs()
    amgr._cleanup_mqs()

    mq_connection = pika.BlockingConnection(
        pika.ConnectionParameters(host=hostname, port=port))

    qs = [
        '%s-tmgr-to-sync' % sid,
        '%s-cb-to-sync' % sid,
        '%s-enq-to-sync' % sid,
        '%s-deq-to-sync' % sid,
        '%s-sync-to-tmgr' % sid,
        '%s-sync-to-cb' % sid,
        '%s-sync-to-enq' % sid,
        '%s-sync-to-deq' % sid,
        '%s-pendingq-1' % sid,
        '%s-completedq-1' % sid
    ]

    for q in qs:
        with pytest.raises(pika.exceptions.ChannelClosed):
            mq_channel = mq_connection.channel()
            mq_channel.queue_purge(q)
Esempio n. 8
0
def test_sid_in_mqs():

    # FIXME: what is tested / asserted here?

    appman = Amgr(hostname=host, port=port)
    sid    = appman._sid
    appman._setup_mqs()

    qs = ['%s-tmgr-to-sync' % sid,
          '%s-cb-to-sync'   % sid,
          '%s-sync-to-tmgr' % sid,
          '%s-sync-to-cb'   % sid]

    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(
                                                      host=host, port=port))
    mq_channel    = mq_connection.channel()

    def callback():
        pass

    for q in qs:
        try:
            mq_channel.basic_consume(callback, queue=q, no_ack=True)

        except Exception as ex:
            raise EnTKError(ex)
Esempio n. 9
0
def test_amgr_setup_mqs():

    amgr = Amgr(hostname=hostname, port=port)
    assert amgr._setup_mqs() == True

    assert len(amgr._pending_queue) == 1
    assert len(amgr._completed_queue) == 1

    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=amgr._mq_hostname, port=amgr._port))
    mq_channel = mq_connection.channel()

    qs = [
        '%s-tmgr-to-sync' % amgr._sid,
        '%s-cb-to-sync' % amgr._sid,
        '%s-enq-to-sync' % amgr._sid,
        '%s-deq-to-sync' % amgr._sid,
        '%s-sync-to-tmgr' % amgr._sid,
        '%s-sync-to-cb' % amgr._sid,
        '%s-sync-to-enq' % amgr._sid,
        '%s-sync-to-deq' % amgr._sid
    ]

    for q in qs:
        mq_channel.queue_delete(queue=q)

    with open('.%s.txt' % amgr._sid, 'r') as fp:
        lines = fp.readlines()

    for i in range(len(lines)):
        lines[i] = lines[i].strip()

    assert set(qs) < set(lines)
Esempio n. 10
0
def test_wfp_start_processor():

    p = Pipeline()
    s = Stage()
    t = Task()
    t.executable = ['/bin/date']
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      mq_hostname=amgr._mq_hostname,
                      port=amgr._port,
                      resubmit_failed=False)

    assert wfp.start_processor()
    assert not wfp._enqueue_thread
    assert not wfp._dequeue_thread
    assert not wfp._enqueue_thread_terminate.is_set()
    assert not wfp._dequeue_thread_terminate.is_set()
    assert not wfp._wfp_terminate.is_set()
    assert wfp._wfp_process.is_alive()

    wfp._wfp_terminate.set()
    wfp._wfp_process.join()
Esempio n. 11
0
def test_state_order():

    """
    **Purpose**: Test if the Pipeline, Stage and Task are assigned their states in the correct order
    """

    def create_single_task():

        t1 = Task()
        t1.name = 'simulation'
        t1.executable = ['/bin/date']
        t1.copy_input_data = []
        t1.copy_output_data = []

        return t1

    p1 = Pipeline()
    p1.name = 'p1'

    s = Stage()
    s.name = 's1'
    s.tasks = create_single_task()
    s.add_tasks(create_single_task())

    p1.add_stages(s)

    res_dict = {

            'resource': 'local.localhost',
            'walltime': 5,
            'cpus': 1,
            'project': ''

    }

    os.environ['RADICAL_PILOT_DBURL'] = MLAB
    os.environ['RP_ENABLE_OLD_DEFINES'] = 'True'
    
    appman = Amgr(hostname=hostname, port=port)
    appman.resource_desc = res_dict

    appman.workflow = [p1]
    appman.run()

    p_state_hist = p1.state_history
    assert p_state_hist == ['DESCRIBED', 'SCHEDULING', 'DONE']

    s_state_hist = p1.stages[0].state_history
    assert s_state_hist == ['DESCRIBED', 'SCHEDULING', 'SCHEDULED', 'DONE']

    tasks = p1.stages[0].tasks

    for t in tasks:

        t_state_hist = t.state_history
        assert t_state_hist == ['DESCRIBED', 'SCHEDULING', 'SCHEDULED', 'SUBMITTING', 'SUBMITTED',
                            'EXECUTED', 'DEQUEUEING', 'DEQUEUED', 'DONE']
Esempio n. 12
0
def test_wfp_workflow_incomplete():

    p = Pipeline()
    s = Stage()
    t = Task()
    t.executable = ['/bin/date']
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      mq_hostname=amgr._mq_hostname,
                      port=amgr._port,
                      resubmit_failed=False)

    wfp._initialize_workflow()

    assert wfp.workflow_incomplete()

    amgr.workflow = [p]
    profiler = ru.Profiler(name='radical.entk.temp')

    p.stages[0].state == states.SCHEDULING
    p.state == states.SCHEDULED
    for t in p.stages[0].tasks:
        t.state = states.COMPLETED

    import json
    import pika

    task_as_dict = json.dumps(t.to_dict())
    mq_connection = pika.BlockingConnection(
        pika.ConnectionParameters(host=amgr._mq_hostname, port=amgr._port))
    mq_channel = mq_connection.channel()
    mq_channel.basic_publish(exchange='',
                             routing_key='%s-completedq-1' % amgr._sid,
                             body=task_as_dict)

    amgr._terminate_sync = Event()
    sync_thread = Thread(target=amgr._synchronizer, name='synchronizer-thread')
    sync_thread.start()

    proc = Process(target=func_for_dequeue_test,
                   name='temp-proc',
                   args=(wfp, ))
    proc.start()
    proc.join()

    amgr._terminate_sync.set()
    sync_thread.join()

    assert not wfp.workflow_incomplete()
Esempio n. 13
0
def test_wfp_dequeue():

    p = Pipeline()
    s = Stage()
    t = Task()

    t.executable = '/bin/date'
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      mq_hostname=amgr._hostname,
                      port=amgr._port,
                      resubmit_failed=False)

    wfp.initialize_workflow()

    assert p.state == states.INITIAL
    assert p.stages[0].state == states.INITIAL

    for t in p.stages[0].tasks:
        assert t.state == states.INITIAL

    p.state == states.SCHEDULED
    p.stages[0].state == states.SCHEDULING

    for t in p.stages[0].tasks:
        t.state = states.COMPLETED

    task_as_dict = json.dumps(t.to_dict())
    mq_connection = pika.BlockingConnection(
        pika.ConnectionParameters(host=amgr._hostname, port=amgr._port))
    mq_channel = mq_connection.channel()

    mq_channel.basic_publish(exchange='',
                             routing_key='%s' % amgr._completed_queue[0],
                             body=task_as_dict)

    wfp.start_processor()

    th = mt.Thread(target=func_for_dequeue_test, name='temp-proc', args=(p, ))
    th.start()
    th.join()

    wfp.terminate_processor()

    assert p.state == states.DONE
    assert p.stages[0].state == states.DONE

    for t in p.stages[0].tasks:
        assert t.state == states.DONE
Esempio n. 14
0
def test_amgr_resource_description_assignment():

    res_dict = {'resource': 'xsede.supermic',
                'walltime': 30,
                'cpus'    : 1000,
                'project' : 'TG-MCB090174'}

    amgr = Amgr(rts='radical.pilot')
    amgr.resource_desc = res_dict

    from radical.entk.execman.rp import ResourceManager as RM_RP
    assert isinstance(amgr._rmgr, RM_RP)

    amgr = Amgr(rts='mock')
    amgr.resource_desc = res_dict

    from radical.entk.execman.mock import ResourceManager as RM_MOCK
    assert isinstance(amgr._rmgr, RM_MOCK)
Esempio n. 15
0
def test_amgr_rmq_auth():

    amgr_name = ru.generate_id('test.amgr.%(item_counter)04d', ru.ID_CUSTOM)
    amgr      = Amgr(hostname=host, port=port, username=user, password=passwd,
                     name=amgr_name)

    assert(amgr._rmq_conn_params.credentials)
    assert(amgr._rmq_conn_params.credentials.username == user)
    assert(amgr._rmq_conn_params.credentials.password == passwd)
Esempio n. 16
0
def test_amgr_synchronizer():

    logger = ru.get_logger('radical.entk.temp_logger')
    profiler = ru.Profiler(name='radical.entk.temp')
    amgr = Amgr(hostname=hostname, port=port)

    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=hostname, port=port))
    mq_channel = mq_connection.channel()

    amgr._setup_mqs()

    p = Pipeline()
    s = Stage()

    # Create and add 100 tasks to the stage
    for cnt in range(100):

        t = Task()
        t.executable = ['some-executable-%s' % cnt]

        s.add_tasks(t)

    p.add_stages(s)
    p._assign_uid(amgr._sid)
    p._validate()

    amgr.workflow = [p]

    for t in p.stages[0].tasks:
        assert t.state == states.INITIAL

    assert p.stages[0].state == states.INITIAL
    assert p.state == states.INITIAL

    # Start the synchronizer method in a thread
    amgr._terminate_sync = Event()
    sync_thread = Thread(target=amgr._synchronizer, name='synchronizer-thread')
    sync_thread.start()

    # Start the synchronizer method in a thread
    proc = Process(target=func_for_synchronizer_test, name='temp-proc',
                   args=(amgr._sid, p, logger, profiler))

    proc.start()
    proc.join()

    for t in p.stages[0].tasks:
        assert t.state == states.SCHEDULING

    assert p.stages[0].state == states.SCHEDULING
    assert p.state == states.SCHEDULING

    amgr._terminate_sync.set()
    sync_thread.join()
Esempio n. 17
0
    def test_amgr_read_config(self, mocked_init, mocked_PlainCredentials,
                              mocked_ConnectionParameters, d):

        amgr = Amgr(hostname='host',
                    port='port',
                    username='******',
                    password='******')

        d["rts"] = "mock"
        d["rts_config"] = {"sandbox_cleanup": True, "db_cleanup": True}

        ru.write_json(d, './config.json')
        amgr._read_config(config_path='./',
                          hostname=None,
                          port=None,
                          username=None,
                          password=None,
                          reattempts=None,
                          resubmit_failed=None,
                          autoterminate=None,
                          write_workflow=None,
                          rts=None,
                          rmq_cleanup=None,
                          rts_config=None)

        self.assertEqual(amgr._hostname, d['hostname'])
        self.assertEqual(amgr._port, d['port'])
        self.assertEqual(amgr._reattempts, d['reattempts'])
        self.assertEqual(amgr._resubmit_failed, d['resubmit_failed'])
        self.assertEqual(amgr._autoterminate, d['autoterminate'])
        self.assertEqual(amgr._write_workflow, d['write_workflow'])
        self.assertEqual(amgr._rts, d['rts'])
        self.assertEqual(amgr._rts_config, d['rts_config'])
        self.assertEqual(amgr._num_pending_qs, d['pending_qs'])
        self.assertEqual(amgr._num_completed_qs, d['completed_qs'])
        self.assertEqual(amgr._rmq_cleanup, d['rmq_cleanup'])

        d['rts'] = 'another'
        ru.write_json(d, './config.json')
        print(d)
        with self.assertRaises(ValueError):
            amgr._read_config(config_path='./',
                              hostname=None,
                              port=None,
                              username=None,
                              password=None,
                              reattempts=None,
                              resubmit_failed=None,
                              autoterminate=None,
                              write_workflow=None,
                              rts=None,
                              rmq_cleanup=None,
                              rts_config=None)
Esempio n. 18
0
def test_amgr_run():

    amgr = Amgr(hostname=host, port=port)

    with pytest.raises(MissingError):
        amgr.run()

    p1 = Pipeline()
    p2 = Pipeline()
    p3 = Pipeline()

    with pytest.raises(MissingError):
        amgr.workflow = [p1, p2, p3]
Esempio n. 19
0
def test_amgr_assign_shared_data():

    amgr = Amgr(rts='radical.pilot', hostname=host, port=port)

    res_dict = {'resource': 'xsede.supermic',
                'walltime': 30,
                'cpus'    : 20,
                'project' : 'TG-MCB090174'}

    amgr.resource_desc = res_dict
    amgr.shared_data   = ['file1.txt','file2.txt']

    assert amgr._rmgr.shared_data == ['file1.txt','file2.txt']
Esempio n. 20
0
def test_wfp_enqueue():

    p = Pipeline()
    s = Stage()
    t = Task()
    t.executable = ['/bin/date']
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      mq_hostname=amgr._mq_hostname,
                      port=amgr._port,
                      resubmit_failed=False)

    wfp._initialize_workflow()

    amgr.workflow = [p]
    profiler = ru.Profiler(name='radical.entk.temp')

    for t in p.stages[0].tasks:
        assert t.state == states.INITIAL

    assert p.stages[0].state == states.INITIAL
    assert p.state == states.INITIAL

    amgr._terminate_sync = Event()
    sync_thread = Thread(target=amgr._synchronizer, name='synchronizer-thread')
    sync_thread.start()

    proc = Process(target=func_for_enqueue_test,
                   name='temp-proc',
                   args=(wfp, ))
    proc.start()
    proc.join()

    amgr._terminate_sync.set()
    sync_thread.join()

    for t in p.stages[0].tasks:
        assert t.state == states.SCHEDULED

    assert p.stages[0].state == states.SCHEDULED
    assert p.state == states.SCHEDULING
Esempio n. 21
0
def test_amgr_run():

    amgr = Amgr()

    with pytest.raises(MissingError):
        amgr.run()

    p1 = Pipeline()
    p2 = Pipeline()
    p3 = Pipeline()

    amgr._workflow = [p1, p2, p3]

    with pytest.raises(MissingError):
        amgr.run()
Esempio n. 22
0
def test_amgr_assign_workflow():

    amgr = Amgr()

    with pytest.raises(TypeError):
        amgr.workflow = [1, 2, 3]

    with pytest.raises(TypeError):
        amgr.workflow = set([1, 2, 3])

    p1 = Pipeline()
    p2 = Pipeline()
    p3 = Pipeline()

    amgr._workflow = [p1, p2, p3]
    amgr._workflow = set([p1, p2, p3])
Esempio n. 23
0
def test_wfp_enqueue():

    p = Pipeline()
    s = Stage()
    t = Task()

    t.executable = '/bin/date'
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port, username=username,
            password=password)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      rmq_conn_params=amgr._rmq_conn_params,
                      resubmit_failed=False)

    assert p.uid           is not None
    assert p.stages[0].uid is not None

    for t in p.stages[0].tasks:
        assert t.uid is not None

    assert p.state           == states.INITIAL
    assert p.stages[0].state == states.INITIAL

    for t in p.stages[0].tasks:
        assert t.state == states.INITIAL

    wfp.start_processor()

    th = mt.Thread(target=func_for_enqueue_test, name='temp-proc', args=(p,))
    th.start()
    th.join()

    wfp.terminate_processor()

    assert p.state           == states.SCHEDULING
    assert p.stages[0].state == states.SCHEDULED

    for t in p.stages[0].tasks:
        assert t.state == states.SCHEDULED
Esempio n. 24
0
def test_amgr_assign_workflow():

    amgr = Amgr(hostname=host, port=port, username=username,
            password=password)

    with pytest.raises(TypeError):
        amgr.workflow = [1, 2, 3]

    with pytest.raises(TypeError):
        amgr.workflow = set([1, 2, 3])

    p1 = Pipeline()
    p2 = Pipeline()
    p3 = Pipeline()

    amgr._workflow = [p1, p2, p3]
    amgr._workflow = set([p1, p2, p3])
Esempio n. 25
0
def test_wfp_workflow_incomplete():

    p = Pipeline()
    s = Stage()
    t = Task()

    t.executable = '/bin/date'
    s.add_tasks(t)
    p.add_stages(s)

    amgr = Amgr(hostname=hostname, port=port, username=username,
            password=password)
    amgr._setup_mqs()

    wfp = WFprocessor(sid=amgr._sid,
                      workflow=[p],
                      pending_queue=amgr._pending_queue,
                      completed_queue=amgr._completed_queue,
                      rmq_conn_params=amgr._rmq_conn_params,
                      resubmit_failed=False)

    for t in p.stages[0].tasks:
        t.state = states.COMPLETED

    task_as_dict  = json.dumps(t.to_dict())
    credentials = pika.PlainCredentials(amgr._username, amgr._password)
    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(
                                          host=amgr._hostname, port=amgr._port,
                                          credentials=credentials))
    mq_channel    = mq_connection.channel()

    mq_channel.basic_publish(exchange    = '',
                             routing_key = '%s' % amgr._completed_queue[0],
                             body        = task_as_dict)

    wfp.start_processor()

    th = mt.Thread(target=func_for_dequeue_test, name='temp-proc', args=(p,))
    th.start()
    th.join()

    wfp.terminate_processor()

    assert not wfp.workflow_incomplete()
Esempio n. 26
0
def test_amgr_assign_shared_data(s, i, b, se):
    amgr = Amgr(rts='radical.pilot', hostname=hostname, port=port)

    res_dict = {
        'resource': 'xsede.supermic',
        'walltime': 30,
        'cpus': 20,
        'project': 'TG-MCB090174'
    }

    amgr.resource_desc = res_dict

    data = [s, i, b, se]

    for d in data:
        with pytest.raises(TypeError):
            amgr.shared_data = d

    amgr.shared_data = ['file1.txt', 'file2.txt']
    assert amgr._resource_manager.shared_data == ['file1.txt', 'file2.txt']
Esempio n. 27
0
def test_amgr_setup_mqs():

    amgr = Amgr(hostname=host, port=port)
    amgr._setup_mqs()

    assert len(amgr._pending_queue)   == 1
    assert len(amgr._completed_queue) == 1

    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(
                                       host=amgr._hostname, port=amgr._port))
    mq_channel    = mq_connection.channel()

    qs = ['%s-tmgr-to-sync' % amgr._sid,
          '%s-cb-to-sync'   % amgr._sid,
          '%s-sync-to-tmgr' % amgr._sid,
          '%s-sync-to-cb'   % amgr._sid,
          '%s-pendingq-1'   % amgr._sid,
          '%s-completedq-1' % amgr._sid]

    for q in qs:
        mq_channel.queue_delete(queue=q)
Esempio n. 28
0
def test_amgr_run_mock():

    p = Pipeline()
    s = Stage()
    t = Task()

    t.name       = 'simulation'
    t.executable = '/bin/date'
    s.tasks      = t
    p.add_stages(s)

    res_dict = {'resource': 'local.localhost',
                'walltime': 5,
                'cpus'    : 1,
                'project' : ''}

    appman = Amgr(hostname=host, port=port, rts="mock")
    appman.resource_desc = res_dict

    appman.workflow = [p]
    appman.run()
Esempio n. 29
0
    def test_run_workflow(self, mocked_init, mocked_ResourceManager,
                          mocked_WFprocessor, mocked_TaskManager,
                          mocked_Profiler):
        mocked_TaskManager.check_heartbeat = mock.MagicMock(return_value=False)
        mocked_TaskManager.terminate_heartbeat = mock.MagicMock(
            return_value=True)
        mocked_TaskManager.terminate_manager = mock.MagicMock(
            return_value=True)
        mocked_TaskManager.start_manager = mock.MagicMock(return_value=True)
        mocked_TaskManager.start_heartbeat = mock.MagicMock(return_value=True)
        mocked_ResourceManager.get_resource_allocation_state = mock.MagicMock(
            return_value='RUNNING')
        mocked_ResourceManager.get_completed_states = mock.MagicMock(
            return_value=['DONE'])
        mocked_WFprocessor.workflow_incomplete = mock.MagicMock(
            return_value=True)
        mocked_WFprocessor.check_processor = mock.MagicMock(return_value=True)

        appman = Amgr()
        appman._uid = 'appman.0000'
        appman._logger = ru.Logger(name='radical.entk.taskmanager',
                                   ns='radical.entk')
        appman._prof = mocked_Profiler
        pipe = mock.Mock()
        pipe.lock = mt.Lock()
        pipe.completed = False
        pipe.uid = 'pipe.0000'
        appman._workflow = set([pipe])
        appman._cur_attempt = 0
        appman._reattempts = 3
        appman._rmgr = mocked_ResourceManager
        appman._wfp = mocked_WFprocessor
        appman._task_manager = mocked_TaskManager
        appman._sync_thread = mock.Mock()
        appman._sync_thread.is_alive = mock.MagicMock(return_value=True)

        with self.assertRaises(ree.EnTKError):
            appman._run_workflow()
Esempio n. 30
0
def test_amgr_cleanup_mqs():

    amgr = Amgr(hostname=host, port=port, username=username, password=password)
    sid  = amgr._sid

    amgr._setup_mqs()
    amgr._cleanup_mqs()

    credentials = pika.PlainCredentials(username, password)
    mq_connection = pika.BlockingConnection(pika.ConnectionParameters(
                                                      host=host, port=port,
                                                      credentials=credentials))

    qs = ['%s-tmgr-to-sync' % sid,
          '%s-cb-to-sync'   % sid,
          '%s-sync-to-tmgr' % sid,
          '%s-sync-to-cb'   % sid,
          '%s-pendingq-1'   % sid,
          '%s-completedq-1' % sid]

    for q in qs:
        with pytest.raises(pika.exceptions.ChannelClosed):
            mq_channel = mq_connection.channel()
            mq_channel.queue_purge(q)