Пример #1
0
def get_pipeline(shared_fs=False, size=1):

    p = Pipeline()
    p.name = 'p'

    n = 4

    s1 = Stage()
    s1.name = 's1'
    for x in range(n):
        t = Task()
        t.name = 't%s'%x

        # dd if=/dev/random bs=<byte size of a chunk> count=<number of chunks> of=<output file name>

        t.executable = ['dd']

        if not shared_fs:
            t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s1_t%s.txt'%x]
        else:
            t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=/home/vivek91/s1_t%s.txt'%x]

        t.cpu_reqs['processes'] = 1        
        t.cpu_reqs['threads_per_process'] = 24
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.lfs_per_process = 1024

        s1.add_tasks(t)

    p.add_stages(s1)

    s2 = Stage()
    s2.name = 's2'
    for x in range(n):
        t = Task()
        t.executable = ['dd']

        if not shared_fs:
            t.arguments = ['if=$NODE_LFS_PATH/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s2_t%s.txt'%x]
        else:
            t.arguments = ['if=/home/vivek91/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=/home/vivek91/s2_t%s.txt'%x]

        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 24
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.tag = 't%s'%x

        s2.add_tasks(t)


    p.add_stages(s2)    

    return p
def get_pipeline(shared_fs=False, size=1):

    p = Pipeline()
    p.name = 'p'

    n = 4

    s1 = Stage()
    s1.name = 's1'
    for x in range(n):
        t = Task()
        t.name = 't%s'%x

        # dd if=/dev/random bs=<byte size of a chunk> count=<number of chunks> of=<output file name>

        t.executable = 'dd'

        if not shared_fs:
            t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s1_t%s.txt'%x]
        else:
            t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=/home/vivek91/s1_t%s.txt'%x]

        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 24
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.lfs_per_process = 1024

        s1.add_tasks(t)

    p.add_stages(s1)

    s2 = Stage()
    s2.name = 's2'
    for x in range(n):
        t = Task()
        t.executable = ['dd']

        if not shared_fs:
            t.arguments = ['if=$NODE_LFS_PATH/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s2_t%s.txt'%x]
        else:
            t.arguments = ['if=/home/vivek91/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=/home/vivek91/s2_t%s.txt'%x]

        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 24
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.tag = 't%s'%x

        s2.add_tasks(t)


    p.add_stages(s2)

    return p
Пример #3
0
def test_task_to_dict():
    """
    **Purpose**: Test if the 'to_dict' function of Task class converts all expected attributes of the Task into a
    dictionary
    """

    t = Task()
    d = t.to_dict()

    assert d == {
        'uid': None,
        'name': None,
        'state': states.INITIAL,
        'state_history': [states.INITIAL],
        'pre_exec': [],
        'executable': [],
        'arguments': [],
        'post_exec': [],
        'cpu_reqs': {
            'processes': 1,
            'process_type': None,
            'threads_per_process': 1,
            'thread_type': None
        },
        'gpu_reqs': {
            'processes': 0,
            'process_type': None,
            'threads_per_process': 0,
            'thread_type': None
        },
        'lfs_per_process': 0,
        'upload_input_data': [],
        'copy_input_data': [],
        'link_input_data': [],
        'move_input_data': [],
        'copy_output_data': [],
        'move_output_data': [],
        'download_output_data': [],
        'stdout': None,
        'stderr': None,
        'exit_code': None,
        'path': None,
        'tag': None,
        'parent_stage': {
            'uid': None,
            'name': None
        },
        'parent_pipeline': {
            'uid': None,
            'name': None
        }
    }

    t = Task()
    t.uid = 'test.0000'
    t.name = 'new'
    t.pre_exec = ['module load abc']
    t.executable = ['sleep']
    t.arguments = ['10']
    t.cpu_reqs['processes'] = 10
    t.cpu_reqs['threads_per_process'] = 2
    t.gpu_reqs['processes'] = 5
    t.gpu_reqs['threads_per_process'] = 3
    t.lfs_per_process = 1024
    t.upload_input_data = ['test1']
    t.copy_input_data = ['test2']
    t.link_input_data = ['test3']
    t.move_input_data = ['test4']
    t.copy_output_data = ['test5']
    t.move_output_data = ['test6']
    t.download_output_data = ['test7']
    t.stdout = 'out'
    t.stderr = 'err'
    t.exit_code = 1
    t.path = 'a/b/c'
    t.tag = 'task.0010'
    t.parent_stage = {'uid': 's1', 'name': 'stage1'}
    t.parent_pipeline = {'uid': 'p1', 'name': 'pipeline1'}

    d = t.to_dict()

    assert d == {
        'uid': 'test.0000',
        'name': 'new',
        'state': states.INITIAL,
        'state_history': [states.INITIAL],
        'pre_exec': ['module load abc'],
        'executable': ['sleep'],
        'arguments': ['10'],
        'post_exec': [],
        'cpu_reqs': {
            'processes': 10,
            'process_type': None,
            'threads_per_process': 2,
            'thread_type': None
        },
        'gpu_reqs': {
            'processes': 5,
            'process_type': None,
            'threads_per_process': 3,
            'thread_type': None
        },
        'lfs_per_process': 1024,
        'upload_input_data': ['test1'],
        'copy_input_data': ['test2'],
        'link_input_data': ['test3'],
        'move_input_data': ['test4'],
        'copy_output_data': ['test5'],
        'move_output_data': ['test6'],
        'download_output_data': ['test7'],
        'stdout': 'out',
        'stderr': 'err',
        'exit_code': 1,
        'path': 'a/b/c',
        'tag': 'task.0010',
        'parent_stage': {
            'uid': 's1',
            'name': 'stage1'
        },
        'parent_pipeline': {
            'uid': 'p1',
            'name': 'pipeline1'
        }
    }
Пример #4
0
def test_rp_da_scheduler_bw():
    """
    **Purpose**: Run an EnTK application on localhost
    """

    p1 = Pipeline()
    p1.name = 'p1'

    n = 10

    s1 = Stage()
    s1.name = 's1'
    for x in range(n):
        t = Task()
        t.name = 't%s' % x
        t.executable = ['/bin/hostname']
        t.arguments = ['>', 'hostname.txt']
        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 16
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.lfs_per_process = 10
        t.download_output_data = ['hostname.txt > s1_t%s_hostname.txt' % (x)]

        s1.add_tasks(t)

    p1.add_stages(s1)

    s2 = Stage()
    s2.name = 's2'
    for x in range(n):
        t = Task()
        t.executable = ['/bin/hostname']
        t.arguments = ['>', 'hostname.txt']
        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 16
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.download_output_data = ['hostname.txt > s2_t%s_hostname.txt' % (x)]
        t.tag = 't%s' % x

        s2.add_tasks(t)

    p1.add_stages(s2)

    res_dict = {
        'resource': 'ncsa.bw_aprun',
        'walltime': 10,
        'cpus': 128,
        'project': 'gk4',
        'queue': 'high'
    }

    os.environ['RADICAL_PILOT_DBURL'] = MLAB

    appman = AppManager(hostname=hostname, port=port)
    appman.resource_desc = res_dict
    appman.workflow = [p1]
    appman.run()

    for i in range(n):
        assert open('s1_t%s_hostname.txt' % i,
                    'r').readline().strip() == open('s2_t%s_hostname.txt' % i,
                                                    'r').readline().strip()

    txts = glob('%s/*.txt' % os.getcwd())
    for f in txts:
        os.remove(f)
def test_task_to_dict():

    """
    **Purpose**: Test if the 'to_dict' function of Task class converts all expected attributes of the Task into a
    dictionary
    """

    t = Task()
    d = t.to_dict()

    assert d == {   'uid': None,
                    'name': None,
                    'state': states.INITIAL,
                    'state_history': [states.INITIAL],
                    'pre_exec': [],
                    'executable': str(),
                    'arguments': [],
                    'post_exec': [],
                    'cpu_reqs': { 'processes': 1,
                                'process_type': None,
                                'threads_per_process': 1,
                                'thread_type': None
                                },
                    'gpu_reqs': { 'processes': 0,
                                'process_type': None,
                                'threads_per_process': 0,
                                'thread_type': None
                                },
                    'lfs_per_process': 0,
                    'upload_input_data': [],
                    'copy_input_data': [],
                    'link_input_data': [],
                    'move_input_data': [],
                    'copy_output_data': [],
                    'move_output_data': [],
                    'download_output_data': [],
                    'stdout': None,
                    'stderr': None,
                    'exit_code': None,
                    'path': None,
                    'tag': None,
                    'parent_stage': {'uid':None, 'name': None},
                    'parent_pipeline': {'uid':None, 'name': None}}


    t = Task()
    t.uid = 'test.0000'
    t.name = 'new'
    t.pre_exec = ['module load abc']
    t.executable = ['sleep']
    t.arguments = ['10']
    t.cpu_reqs['processes'] = 10
    t.cpu_reqs['threads_per_process'] = 2
    t.gpu_reqs['processes'] = 5
    t.gpu_reqs['threads_per_process'] = 3
    t.lfs_per_process = 1024
    t.upload_input_data = ['test1']
    t.copy_input_data = ['test2']
    t.link_input_data = ['test3']
    t.move_input_data = ['test4']
    t.copy_output_data = ['test5']
    t.move_output_data = ['test6']
    t.download_output_data = ['test7']
    t.stdout = 'out'
    t.stderr = 'err'
    t.exit_code = 1
    t.path = 'a/b/c'
    t.tag = 'task.0010'
    t.parent_stage = {'uid': 's1', 'name': 'stage1'}
    t.parent_pipeline = {'uid': 'p1', 'name': 'pipeline1'}

    d = t.to_dict()

    assert d == {   'uid': 'test.0000',
                    'name': 'new',
                    'state': states.INITIAL,
                    'state_history': [states.INITIAL],
                    'pre_exec': ['module load abc'],
                    'executable': 'sleep',
                    'arguments': ['10'],
                    'post_exec': [],
                    'cpu_reqs': { 'processes': 10,
                                'process_type': None,
                                'threads_per_process': 2,
                                'thread_type': None
                                },
                    'gpu_reqs': { 'processes': 5,
                                'process_type': None,
                                'threads_per_process': 3,
                                'thread_type': None
                                },
                    'lfs_per_process': 1024,
                    'upload_input_data': ['test1'],
                    'copy_input_data': ['test2'],
                    'link_input_data': ['test3'],
                    'move_input_data': ['test4'],
                    'copy_output_data': ['test5'],
                    'move_output_data': ['test6'],
                    'download_output_data': ['test7'],
                    'stdout': 'out',
                    'stderr': 'err',
                    'exit_code': 1,
                    'path': 'a/b/c',
                    'tag': 'task.0010',
                    'parent_stage': {'uid': 's1', 'name': 'stage1'},
                    'parent_pipeline': {'uid': 'p1', 'name': 'pipeline1'}}


    t.executable = 'sleep'
    d = t.to_dict()

    assert d == {   'uid': 'test.0000',
                    'name': 'new',
                    'state': states.INITIAL,
                    'state_history': [states.INITIAL],
                    'pre_exec': ['module load abc'],
                    'executable': 'sleep',
                    'arguments': ['10'],
                    'post_exec': [],
                    'cpu_reqs': { 'processes': 10,
                                'process_type': None,
                                'threads_per_process': 2,
                                'thread_type': None
                                },
                    'gpu_reqs': { 'processes': 5,
                                'process_type': None,
                                'threads_per_process': 3,
                                'thread_type': None
                                },
                    'lfs_per_process': 1024,
                    'upload_input_data': ['test1'],
                    'copy_input_data': ['test2'],
                    'link_input_data': ['test3'],
                    'move_input_data': ['test4'],
                    'copy_output_data': ['test5'],
                    'move_output_data': ['test6'],
                    'download_output_data': ['test7'],
                    'stdout': 'out',
                    'stderr': 'err',
                    'exit_code': 1,
                    'path': 'a/b/c',
                    'tag': 'task.0010',
                    'parent_stage': {'uid': 's1', 'name': 'stage1'},
                    'parent_pipeline': {'uid': 'p1', 'name': 'pipeline1'}}
def test_rp_da_scheduler_bw():

    """
    **Purpose**: Run an EnTK application on localhost
    """

    p1 = Pipeline()
    p1.name = 'p1'

    n = 10

    s1 = Stage()
    s1.name = 's1'
    for x in range(n):
        t = Task()
        t.name = 't%s'%x
        t.executable = ['/bin/hostname']
        t.arguments = ['>','hostname.txt']
        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 16
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.lfs_per_process = 10
        t.download_output_data = ['hostname.txt > s1_t%s_hostname.txt'%(x)]

        s1.add_tasks(t)

    p1.add_stages(s1)

    s2 = Stage()
    s2.name = 's2'
    for x in range(n):
        t = Task()
        t.executable = ['/bin/hostname']
        t.arguments = ['>','hostname.txt']
        t.cpu_reqs['processes'] = 1
        t.cpu_reqs['threads_per_process'] = 16
        t.cpu_reqs['thread_type'] = ''
        t.cpu_reqs['process_type'] = ''
        t.download_output_data = ['hostname.txt > s2_t%s_hostname.txt'%(x)]
        t.tag = 't%s'%x

        s2.add_tasks(t)


    p1.add_stages(s2)

    res_dict = {
                'resource'      : 'ncsa.bw_aprun',
                'walltime'      : 10,
                'cpus'          : 128,
                'project'       : 'gk4',
                'queue'         : 'high'
            }

    os.environ['RADICAL_PILOT_DBURL'] = MLAB

    appman = AppManager(hostname=hostname, port=port)
    appman.resource_desc = res_dict
    appman.workflow = [p1]
    appman.run()

    for i in range(n):
        assert open('s1_t%s_hostname.txt'%i,'r').readline().strip() == open('s2_t%s_hostname.txt'%i,'r').readline().strip()


    txts = glob('%s/*.txt' % os.getcwd())
    for f in txts:
        os.remove(f)