Exemplo n.º 1
0
def run_function(return_name=None,
                 func_name='',
                 execid=[],
                 work_range=None,
                 args=[],
                 dtype_dict={},
                 output_halo=0,
                 halo_dict={},
                 split_dict={},
                 merge_func='',
                 merge_order=''):
    def input_argument_check():

        if type(func_name) != str:
            print "Function_name error"
            print "Func_name: ", func_name

        if type(dtype_dict) != dict:
            print "Dtype_dict error"
            print "Dtype_dict: ", dtype_dict
            assert (False)

        if type(split_dict) != dict:
            print "Split_dict error"
            print "Split_dict: ", split_dict
            assert (False)

        if type(halo_dict) != dict:
            print "Halo_dict error"
            print "Halo_dict: ", halo_dict
            assert (False)

        if type(merge_func) != str:
            print "Merge function_name error"
            print "Merge_function name: ", merge_func

    # compatibility
    ############################################################
    function_name = func_name
    args_list = args

    modifier_dict = {}

    # input argument error check
    input_argument_check()

    # initialization
    ##############################################################
    global mmtx, inv_mmtx
    global unique_id
    function_package = Function_package()
    fp = function_package

    if Vivaldi_viewer.v != None:
        mmtx = Vivaldi_viewer.mmtx
        inv_mmtx = Vivaldi_viewer.inv_mmtx

    Debug = False

    # arguments
    ##################################################################################
    new_args = []
    for data_name in args_list:
        if data_name in data_package_list:
            # data_name, is managed as data package in the main manager
            dp = data_package_list[data_name]
            dtype = str(dp.data_contents_dtype)
            dtype = dtype.replace('_volume', '')
            function_name += dtype
            # we should give access to data to reader, before running function
            # check it is already available from reader or not

            data = globals()[data_name]

            u = dp.unique_id

            flag = False
            if dp.out_of_core and u not in retain_list:
                flag = True  # out of core and Not informed to memory manager
            if not dp.out_of_core and u not in retain_list:
                flag = True  # in core and Didn't informed ( function output already informed)
            if flag:
                manage_as_data_package(data_name)
                dp = data_package_list[data_name]

                u = dp.unique_id
                if u not in retain_list: retain_list[u] = []
                retain_list[u].append(dp.copy())

                reader_give_access_to_data(data_name)

            dp = dp.copy()
            dp.data = None
            dp.devptr = None

        elif data_name in globals():
            # There are two kinds of data here
            # 1. volume
            # 2. values

            data = globals()[data_name]

            if type(data) == numpy.ndarray:  # this is volume
                # now the data is also managed as data package
                manage_as_data_package(data_name)
                # now we have data_package correspond to the data
                dp = data_package_list[data_name]

                u = dp.unique_id
                if u not in retain_list: retain_list[u] = []
                retain_list[u].append(dp.copy())

                # Vivaldi reader have access to this data
                reader_give_access_to_data(data_name)

                # than make a new function name using the existing function name, the data_name and data dtype

                dtype = str(dp.data_contents_dtype)
                dtype = dtype.replace('_volume', '')

                function_name += str(dp.data_contents_dtype)
                dp = dp.copy()
                dp.data = None
                dp.devptr = None
            else:  # this is constant
                dp = Data_package()

                dtype = type(data_name)

                dp.data_name = data_name
                dp.unique_id = -1
                dp.data_dtype = dtype
                dp.data_contents_dtype = dtype
                dp.data_contents_memory_dtype = dtype
                dp.data = data
        else:
            # data_name not in the globals list
            # it is usually AXIS or constant like x,y not previously defined
            if isinstance(data_name, Data_package):
                dp = data_name
            else:
                data = None
                dp = Data_package()

                dtype = type(data_name)

                dp.data_name = data_name
                dp.unique_id = -1
                dp.data_dtype = dtype
                dp.data_contents_dtype = dtype
                dp.data_contents_memory_dtype = dtype
                dp.data = data_name

        new_args.append(dp)
    args_list = new_args

    # get Vivaldi functions
    ######################################################################################
    global parsed_Vivaldi_functions
    func_args = args_list

    return_dtype = parsed_Vivaldi_functions.get_return_dtype(function_name)
    fp.set_function_name(function_name)
    fp.output.unique_id = unique_id
    fp.mmtx = mmtx
    fp.inv_mmtx = inv_mmtx
    fp.output.data_dtype = numpy.ndarray
    fp.output.data_name = return_name

    if return_dtype == '':
        print "======================================================="
        print "VIVALDI ERROR, can not find return dtype"
        print "function_name:", function_name
        print "return name:", return_name
        print "return dtype:", return_dtype
        print "======================================================="
        assert (False)
    fp.output.set_data_contents_dtype(return_dtype)

    v = Vivaldi_viewer.v
    trans_on = Vivaldi_viewer.trans_on
    transN = Vivaldi_viewer.transN

    if trans_on == True:
        if v.getIsTFupdated() == 1:
            fp.trans_tex = v.getTFF()
            fp.update_tf = 1
            fp.update_tf2 = 0
            v.TFF.widget.updated = 0
        elif v.getIsTFupdated2() == 1:
            fp.trans_tex = v.getTFF2()
            fp.update_tf = 0
            fp.update_tf2 = 1
            v.TFF2.widget.updated = 0

        fp.TF_bandwidth = v.getTFBW()
        fp.CRG = v.window.CRG

    output_halo = 0
    if type(work_range) == dict:
        if 'work_range' in work_range:
            work_range = work_range['work_range']

    if return_name != None:
        # merge_func
        ###############################################################
        func_args = ['front', 'back']
        func_dtypes = {}
        for elem in func_args:
            func_dtypes[elem] = return_dtype

        new_name = make_func_name_with_dtypes(merge_func, func_args,
                                              func_dtypes)
        merge_func = new_name
        # execid
        ###################################################################################
        if isinstance(execid, Data_package): execid = execid.data
        if type(execid) != list: execid = [execid]
        execid_list = execid
        fp.execid_list = execid

        # work range
        ##################################################################################
        if type(work_range) == dict and work_range == {}:
            for data_name in args_list:
                if isinstance(data_name, Data_package):
                    if dp.unique_id == -1: continue
                    data_name = dp.data_name
                    dp = data_package_list[data_name]
                    work_range = dp.full_data_range
                    break

        work_range = to_range(work_range)

    if return_name == '':
        return_name = None
        work_range = {'work_range': work_range}

        args = [
            return_name, function_name, execid, work_range, args_list,
            dtype_dict, output_halo, halo_dict, split_dict, merge_func,
            merge_order
        ]

        return None, run_function, args
        #return None

    # local functions
    ############################################################################
    def make_tasks2(arg_packages, i):
        global unique_id
        if i == len(args_list):

            # common variables
            global unique_id
            fp.function_args = arg_packages

            modifier = modifier_dict['output']

            if decom == 'in_and_out_split1':
                num = modifier['num']
                work_range = modifier['range_list'][num - 1]
                fp.work_range = work_range

                split = modifier['split']
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']
                full_data_range = modifier['data_range']

                fp.output.data_halo = data_halo
                split_position = make_split_position(split, num)
                fp.output.split_position = str(split_position)
                data_range = apply_halo(work_range, data_halo)

                fp.output.set_data_range(str(data_range))
                fp.output.set_full_data_range(str(full_data_range))
                fp.output.set_buffer_range(buffer_halo)

                modifier['num'] += 1
            elif decom == 'in_and_out_split2':
                num = modifier['num']
                work_range = modifier['range_list'][num - 1]
                fp.work_range = work_range

                split = modifier['split']
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']
                full_data_range = modifier['data_range']

                fp.output.data_halo = data_halo
                split_position = make_split_position(split, num)
                fp.output.split_position = str(split_position)
                data_range = apply_halo(work_range, data_halo)

                fp.output.set_data_range(str(data_range))
                fp.output.set_full_data_range(str(full_data_range))
                fp.output.set_buffer_range(buffer_halo)

                modifier['num'] += 1
            elif decom == 'in':
                fp.output.unique_id = unique_id

                output_range = apply_halo(output_range_list[0], output_halo)
                fp.output.set_data_range(output_range)
                fp.output.split_shape = str(SPLIT_BASE)
                fp.output.split_position = str(SPLIT_BASE)
                fp.work_range = output_range

                # buffer
                modifier = modifier_dict['output']
                buffer_halo = modifier['buffer_halo']
                fp.output.set_buffer_range(buffer_halo)
            elif decom == 'out':
                num = modifier['num']
                work_range = modifier['range_list'][num - 1]
                fp.work_range = work_range

                split = modifier['split']
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']
                full_data_range = modifier['data_range']

                fp.output.data_halo = data_halo
                split_position = make_split_position(split, num)
                fp.output.split_position = str(split_position)
                data_range = apply_halo(work_range, data_halo)

                fp.output.set_data_range(str(data_range))
                fp.output.set_full_data_range(str(full_data_range))
                fp.output.set_buffer_range(buffer_halo)

                modifier['num'] += 1

            u = fp.output.unique_id
            unique_id += 1
            mem_retain(fp.output)
            if u not in retain_list: retain_list[u] = []
            retain_list[u].append(fp.output.copy())
            register_function(execid, fp)

            return

        dp = args_list[i]
        data_name = dp.data_name
        dp.memory_type = 'memory'

        # normal variables
        if dp.unique_id != -1:
            # setting about full data
            dp.split_shape = str(SPLIT_BASE)
            buf = dp.data
            # replace copy of original
            dp = dp.copy()

            if decom == 'in_and_out_split1':
                """
					input output decomposition
				"""
                global in_and_out_n

                u = dp.unique_id
                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}

                # split shape
                split_shape = modifier['split']
                dp.split_shape = str(split_shape)
                range_list = data_range_list_dict[data_name]
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']

                cnt = modifier['cnt']

                split_position = make_split_position(split_shape, in_and_out_n)
                dp.split_position = str(split_position)

                # set data_range
                data_range = apply_halo(range_list[in_and_out_n - 1],
                                        data_halo, dp.full_data_range)
                dp.set_data_range(data_range)
                dp.data_halo = data_halo

                # set buffer halo
                buffer_range = apply_halo(range_list[in_and_out_n - 1],
                                          buffer_halo)
                dp.set_buffer_range(buffer_range)
                dp.buffer_halo = buffer_halo

                if Debug:
                    print "In and out DP", dp
                make_tasks2(arg_packages + [dp], i + 1)
            elif decom == 'in_and_out_split2':
                u = dp.unique_id
                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}
                # split shape

                split_shape = modifier['split']
                dp.split_shape = str(split_shape)
                # make splited data and go to next argument

                data_name = dp.data_name
                data_halo = modifier['data_halo']
                data_range_list = data_range_list_dict[data_name]

                buffer_halo = modifier['buffer_halo']
                n = 1
                dp.data_halo = data_halo
                for data_range in data_range_list:
                    data_range = apply_halo(data_range, data_halo)

                    dp.data_dtype = numpy.ndarray
                    dp.set_data_range(data_range)
                    dp.data_halo = data_halo

                    memory_shape = dp.data_memory_shape
                    shape = dp.data_shape
                    bytes = dp.data_bytes

                    # make depth
                    depth = make_depth(data_range, mmtx)
                    dp.depth = depth
                    fp.output.depth = depth
                    mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

                    split_position = make_split_position(split_shape, n)
                    n += 1
                    dp.split_position = str(split_position)
                    mem_depth(data_list[data_name], str(split_shape),
                              dp.split_position, depth)

                    dp.set_buffer_range(buffer_halo)
                    make_tasks2(arg_packages + [dp], i + 1)
            elif decom == 'in':
                """
					input decomposition
					data range is same
				"""
                u = dp.unique_id
                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}
                # split shape

                split_shape = modifier['split']
                dp.split_shape = str(split_shape)
                # make splited data and go to next argument

                data_name = dp.data_name
                data_halo = modifier['data_halo']
                data_range_list = data_range_list_dict[data_name]

                buffer_halo = modifier['buffer_halo']
                n = 1
                dp.data_halo = data_halo
                for data_range in data_range_list:
                    data_range = apply_halo(data_range, data_halo)

                    dp.data_dtype = numpy.ndarray
                    dp.set_data_range(data_range)
                    dp.data_halo = data_halo

                    memory_shape = dp.data_memory_shape
                    shape = dp.data_shape
                    bytes = dp.data_bytes

                    # make depth
                    depth = make_depth(data_range, mmtx)
                    dp.depth = depth
                    fp.output.depth = depth
                    mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

                    split_position = make_split_position(split_shape, n)
                    n += 1
                    dp.split_position = str(split_position)
                    mem_depth(data_list[data_name], str(split_shape),
                              dp.split_position, depth)

                    dp.set_buffer_range(buffer_halo)
                    if Debug:
                        print "DP", dp
                    make_tasks2(arg_packages + [dp], i + 1)
            elif decom == 'out':
                u = dp.unique_id
                # basic package setting
                dp.split_shape = str(SPLIT_BASE)
                dp.split_position = str(SPLIT_BASE)

                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}

                range_list = data_range_list_dict[data_name]

                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']

                # data_range
                data_range = apply_halo(range_list[0], data_halo)
                dp.set_data_range(data_range)
                dp.data_halo = data_halo

                dp.set_full_data_range(data_range)

                # buffer range
                buffer_range = apply_halo(range_list[0], buffer_halo)
                dp.set_buffer_range(buffer_range)
                dp.buffer_halo = buffer_halo

                make_tasks2(arg_packages + [dp], i + 1)
        else:
            make_tasks2(arg_packages + [dp], i + 1)

    def check_in_and_out(modifier_dict, input_cnt, output_cnt):
        flag = False
        in_and_out_split = True

        # in_and_out spilt version1 test

        # all split shape is identical
        for data_name in modifier_dict:
            modifier = modifier_dict[data_name]
            if flag:
                if split == modifier['split']:
                    pass
                else:
                    in_and_out_split = False
                    break
            else:
                # first one skip
                split = modifier['split']
                flag = True

        if in_and_out_split:
            return 'in_and_out_split1'

        # in_and_out split version2 test

        # same number of input and output count
        if output_cnt == input_cnt:
            return 'in_and_out_split2'

        return False

    ############################################################################

    # make argument name list
    args_name_list = []
    for elem in args_list:
        if isinstance(elem, Data_package):
            args_name_list.append(elem.data_name)

    if return_name == None:
        return_name = 'output'

    # set output information
    modifier_dict['output'] = {}
    output_split = split_dict[
        return_name] if return_name in split_dict else SPLIT_BASE

    output_data_range = work_range
    output_data_halo = 0

    buffer_halo = 10

    output_dtype = return_dtype
    output_range = to_range(output_data_range)
    output_split = to_split(output_split)
    output_range_list = make_range_list(output_range, output_split)

    cnt = shape_to_count(output_split)

    modifier_dict['output']['split'] = output_split
    modifier_dict['output']['data_range'] = output_range
    modifier_dict['output']['data_halo'] = output_halo
    modifier_dict['output']['cnt'] = cnt
    modifier_dict['output']['buffer_halo'] = buffer_halo
    modifier_dict['output']['num'] = 1
    modifier_dict['output']['range_list'] = output_range_list
    output_cnt = cnt

    # temp data package
    temp = Data_package()
    temp.data_name = return_name
    temp.unique_id = unique_id
    temp.data_dtype = numpy.ndarray
    temp.data_halo = output_halo
    temp.set_data_contents_dtype(return_dtype)

    # modifier information about input
    input_cnt = 1

    data_range_list_dict = {}

    # make modifiers_list for each argument
    for args in args_list:
        name = args.data_name
        if args.unique_id != -1:
            modifier = {}

            modifier['data_range'] = args.data_range
            modifier['dtype'] = args.data_contents_dtype

            data_range = args.data_range
            data_halo = args.data_halo
            data_range = apply_halo(data_range, -data_halo)

            split = split_dict[name] if name in split_dict else SPLIT_BASE

            for axis in AXIS:
                if axis not in split:
                    split[axis] = 1

            data_range_list = make_range_list(data_range, split)

            data_range_list_dict[name] = data_range_list
            cnt = shape_to_count(split)

            modifier_dict[name] = {}
            modifier_dict[name]['split'] = split
            modifier_dict[name]['data_range'] = data_range
            modifier_dict[name][
                'data_halo'] = halo_dict[name] if name in halo_dict else 0
            modifier_dict[name]['buffer_halo'] = buffer_halo

            modifier_dict[name]['cnt'] = cnt
            input_cnt *= cnt

    in_and_out_split = check_in_and_out(modifier_dict, input_cnt, output_cnt)

    if in_and_out_split == 'in_and_out_split1':
        decom = 'in_and_out_split1'
        # this is special case called in&out split
        fp.output.split_shape = str(output_split)

        global in_and_out_n
        in_and_out_n = 1
        for work_range in output_range_list:
            make_tasks2([], 0)
            in_and_out_n += 1

        modifier = modifier_dict['output']
        data_halo = modifier['data_halo']
        full_data_range = apply_halo(output_range, data_halo)

        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))
        temp.data_halo = data_halo

        unique_id += 1
        #	print "TEMP", temp
        return temp
    elif in_and_out_split == 'in_and_out_split2':
        decom = 'in_and_out_split2'

        fp.output.split_shape = str(output_split)

        make_tasks2([], 0)

        modifier = modifier_dict['output']
        data_halo = modifier['data_halo']
        full_data_range = apply_halo(output_range, data_halo)

        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))
        temp.data_halo = data_halo

        unique_id += 1
        return temp
    elif input_cnt > 1:
        """
			input decomposition
		"""
        decom = 'in'
        count = input_cnt
        # set function package output
        full_data_range = apply_halo(output_range, output_halo)

        fp.output.set_data_range(dict(full_data_range))
        fp.output.set_full_data_range(dict(full_data_range))
        fp.output.data_halo = output_halo

        # set output package
        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))
        # register intermediate merge function
        u = unique_id
        inter = range(unique_id + 1, unique_id + count - 1)

        for inter_id in inter:
            temp.unique_id = inter_id
            mem_retain(temp)

        unique_id += count - 1
        temp.unique_id = u

        # make input functions
        make_tasks2([], 0)

        out_range = range(unique_id, unique_id + count)

        # intermediate merge functions
        dimension = len(output_range)
        scheduler_request_merge(temp, out_range, merge_func, merge_order,
                                dimension)

        #mem_inform(temp)
        return temp
    elif output_cnt > 1:
        """
			output decomposition
		"""
        decom = 'out'
        fp.output.split_shape = str(output_split)
        fp.output.data_halo = output_halo

        full_data_range = apply_halo(output_range, output_halo)

        n = 1
        for work_range in output_range_list:
            split_position = make_split_position(output_split, n)
            n += 1
            fp.output.split_position = str(split_position)
            data_range = apply_halo(work_range, output_halo)
            fp.output.set_data_range(str(data_range))
            fp.output.set_full_data_range(str(full_data_range))
            fp.output.set_buffer_range(buffer_halo)

            make_tasks2([], 0)

        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))

        unique_id += 1
        return temp
    else:
        # split input and output both, but not in&out split

        print "==============================="
        print "VIVALDI ERROR"
        print "tried to split input and output together but number of input split and output split is different"
        print "input_cnt: ", input_cnt
        print "output_cnt: ", output_cnt
        print "==============================="
        assert (False)

    assert (False)
Exemplo n.º 2
0
def run_function(return_name=None, func_name='', execid=[], work_range=None, args=[], dtype_dict={}, output_halo=0, halo_dict={}, split_dict={}, merge_func='', merge_order=''):

	def input_argument_check():
	
		if type(func_name) != str:
			print "Function_name error"
			print "Func_name: ", func_name
	
		if type(dtype_dict) != dict:
			print "Dtype_dict error"
			print "Dtype_dict: ", dtype_dict
			assert(False)
		
		if type(split_dict) != dict:
			print "Split_dict error"
			print "Split_dict: ", split_dict
			assert(False)
		
		if type(halo_dict) != dict:
			print "Halo_dict error"
			print "Halo_dict: ", halo_dict
			assert(False)
			
		if type(merge_func) != str:
			print "Merge function_name error"
			print "Merge_function name: ", merge_func
		
	# compatibility
	############################################################
	function_name = func_name
	args_list = args
	
	modifier_dict={}

	# input argument error check
	input_argument_check()
	
	# initialization
	##############################################################
	global mmtx, inv_mmtx
	global unique_id
	function_package = Function_package()
	fp = function_package

	if Vivaldi_viewer.v != None:
		mmtx = Vivaldi_viewer.mmtx
		inv_mmtx = Vivaldi_viewer.inv_mmtx
	
	Debug = False
	
	# arguments
	##################################################################################
	new_args = []
	for data_name in args_list:	
		if data_name in data_package_list:
			# data_name, is managed as data package in the main manager
			dp = data_package_list[data_name]
			dtype = str(dp.data_contents_dtype)
			dtype = dtype.replace('_volume','')
			function_name += dtype
			# we should give access to data to reader, before running function
			# check it is already available from reader or not

			data = globals()[data_name]

			u = dp.unique_id

			flag = False
			if dp.out_of_core and u not in retain_list: flag = True # out of core and Not informed to memory manager
			if not dp.out_of_core and u not in retain_list: flag = True # in core and Didn't informed ( function output already informed)
			if flag:
				manage_as_data_package(data_name)
				dp = data_package_list[data_name]

				u = dp.unique_id
				if u not in retain_list: retain_list[u] = []
				retain_list[u].append(dp.copy())
	
				reader_give_access_to_data(data_name)

			dp = dp.copy()
			dp.data = None
			dp.devptr = None

		elif data_name in globals():
			# There are two kinds of data here
			# 1. volume
			# 2. values
			
			data = globals()[data_name]
			
			if type(data) == numpy.ndarray: # this is volume			
				# now the data is also managed as data package 
				manage_as_data_package(data_name)
				# now we have data_package correspond to the data
				dp = data_package_list[data_name]

				u = dp.unique_id
				if u not in retain_list: retain_list[u] = []
				retain_list[u].append(dp.copy())
		
				# Vivaldi reader have access to this data
				reader_give_access_to_data(data_name)
		
				# than make a new function name using the existing function name, the data_name and data dtype
			
				dtype = str(dp.data_contents_dtype)
				dtype = dtype.replace('_volume','')
				
				function_name += str(dp.data_contents_dtype)
				dp = dp.copy()
				dp.data = None
				dp.devptr = None
			else: # this is constant
				dp = Data_package()

				dtype = type(data_name)
	
				dp.data_name = data_name
				dp.unique_id = -1
				dp.data_dtype = dtype
				dp.data_contents_dtype = dtype
				dp.data_contents_memory_dtype = dtype
				dp.data = data
		else:
			# data_name not in the globals list
			# it is usually AXIS or constant like x,y not previously defined
			if isinstance(data_name, Data_package):
				dp = data_name
			else:
				data = None
				dp = Data_package()

				dtype = type(data_name)
	
				dp.data_name = data_name
				dp.unique_id = -1
				dp.data_dtype = dtype
				dp.data_contents_dtype = dtype
				dp.data_contents_memory_dtype = dtype
				dp.data = data_name
		
		new_args.append(dp)
	args_list = new_args	

	# get Vivaldi functions
	######################################################################################
	global parsed_Vivaldi_functions
	func_args = args_list

	return_dtype = parsed_Vivaldi_functions.get_return_dtype(function_name)
	fp.set_function_name(function_name)
	fp.output.unique_id					= unique_id
	fp.mmtx								= mmtx
	fp.inv_mmtx							= inv_mmtx
	fp.output.data_dtype				= numpy.ndarray
	fp.output.data_name					= return_name

	if return_dtype == '':
		print "======================================================="
		print "VIVALDI ERROR, can not find return dtype"
		print "function_name:", function_name
		print "return name:", return_name
		print "return dtype:", return_dtype
		print "======================================================="
		assert(False)
	fp.output.set_data_contents_dtype(return_dtype)

	v = Vivaldi_viewer.v
	trans_on = Vivaldi_viewer.trans_on
	transN = Vivaldi_viewer.transN

	if trans_on == True:
		if v.getIsTFupdated() == 1:
			fp.trans_tex 		  = v.getTFF()
			fp.update_tf = 1
			fp.update_tf2 = 0
			v.TFF.widget.updated = 0
		elif v.getIsTFupdated2() == 1:
			fp.trans_tex 		  = v.getTFF2()
			fp.update_tf = 0
			fp.update_tf2 = 1
			v.TFF2.widget.updated = 0
	
		fp.TF_bandwidth		  = v.getTFBW()
		fp.CRG = v.window.CRG

	output_halo = 0
	if type(work_range) == dict:
		if 'work_range' in work_range:
			work_range = work_range['work_range']
	
	if return_name != None:
		# merge_func
		###############################################################
		func_args = ['front','back']
		func_dtypes = {}
		for elem in func_args:
			func_dtypes[ elem ] = return_dtype
		
		new_name = make_func_name_with_dtypes(merge_func, func_args, func_dtypes)
		merge_func = new_name
		# execid
		###################################################################################
		if isinstance(execid, Data_package): execid = execid.data
		if type(execid) != list: execid = [execid]
		execid_list = execid
		fp.execid_list                    = execid

		# work range
		##################################################################################
		if type(work_range) == dict and work_range == {}:
			for data_name in args_list:
				if isinstance(data_name, Data_package):
					if dp.unique_id == -1:continue
					data_name = dp.data_name
					dp = data_package_list[data_name]
					work_range = dp.full_data_range
					break
		

		work_range = to_range(work_range) 
	
	if return_name == '':
		return_name = None
		work_range = {'work_range':work_range}

		args = [return_name, function_name, execid, work_range, args_list, dtype_dict, output_halo, halo_dict, split_dict, merge_func, merge_order]

		return None, run_function, args
		#return None

	# local functions
	############################################################################
	def make_tasks2(arg_packages, i):
		global unique_id
		if i == len(args_list):
		
			# common variables
			global unique_id
			fp.function_args					= arg_packages
			
			modifier = modifier_dict['output']
			
			if decom == 'in_and_out_split1':
				num = modifier['num']
				work_range = modifier['range_list'][num-1]
				fp.work_range = work_range
				
				split = modifier['split']
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				full_data_range = modifier['data_range']
				
				fp.output.data_halo = data_halo
				split_position = make_split_position(split, num)
				fp.output.split_position = str(split_position)
				data_range = apply_halo(work_range, data_halo)
		
				fp.output.set_data_range(str(data_range))
				fp.output.set_full_data_range(str(full_data_range))
				fp.output.set_buffer_range(buffer_halo)
			
				modifier['num'] += 1
			elif decom == 'in_and_out_split2':
				num = modifier['num']
				work_range = modifier['range_list'][num-1]
				fp.work_range = work_range
				
				split = modifier['split']
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				full_data_range = modifier['data_range']
				
				fp.output.data_halo = data_halo
				split_position = make_split_position(split, num)
				fp.output.split_position = str(split_position)
				data_range = apply_halo(work_range, data_halo)
		
				fp.output.set_data_range(str(data_range))
				fp.output.set_full_data_range(str(full_data_range))
				fp.output.set_buffer_range(buffer_halo)
			
				modifier['num'] += 1
			elif decom == 'in':
				fp.output.unique_id = unique_id
				
				output_range = apply_halo(output_range_list[0], output_halo)
				fp.output.set_data_range(output_range)
				fp.output.split_shape = str(SPLIT_BASE)
				fp.output.split_position = str(SPLIT_BASE)
				fp.work_range = output_range
				
				# buffer
				modifier = modifier_dict['output']
				buffer_halo = modifier['buffer_halo']
				fp.output.set_buffer_range(buffer_halo)
			elif decom == 'out':
				num = modifier['num']
				work_range = modifier['range_list'][num-1]
				fp.work_range = work_range
				
				split = modifier['split']
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				full_data_range = modifier['data_range']
				
				fp.output.data_halo = data_halo
				split_position = make_split_position(split, num)
				fp.output.split_position = str(split_position)
				data_range = apply_halo(work_range, data_halo)
		
				fp.output.set_data_range(str(data_range))
				fp.output.set_full_data_range(str(full_data_range))
				fp.output.set_buffer_range(buffer_halo)
			
				modifier['num'] += 1

			u = fp.output.unique_id
			unique_id += 1
			mem_retain(fp.output)
			if u not in retain_list: retain_list[u] = []
			retain_list[u].append(fp.output.copy())
			register_function(execid, fp)
		
			return

		dp = args_list[i]
		data_name = dp.data_name
		dp.memory_type = 'memory'

		# normal variables
		if dp.unique_id != -1:
			# setting about full data
			dp.split_shape = str(SPLIT_BASE)
			buf = dp.data
			# replace copy of original
			dp = dp.copy()

			if decom == 'in_and_out_split1':
				"""
					input output decomposition
				"""
				global in_and_out_n
				
				u = dp.unique_id
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}


				# split shape
				split_shape = modifier['split']
				dp.split_shape = str(split_shape)
				range_list = data_range_list_dict[data_name]
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				
				cnt = modifier['cnt']
		
				split_position = make_split_position(split_shape, in_and_out_n)
				dp.split_position = str(split_position)
		
				# set data_range
				data_range = apply_halo(range_list[in_and_out_n-1], data_halo, dp.full_data_range)
				dp.set_data_range(data_range)
				dp.data_halo = data_halo
				
				# set buffer halo
				buffer_range = apply_halo(range_list[in_and_out_n-1], buffer_halo)
				dp.set_buffer_range(buffer_range)
				dp.buffer_halo = buffer_halo
				
				if Debug:
					print "In and out DP", dp
				make_tasks2(arg_packages + [dp], i + 1)
			elif decom == 'in_and_out_split2':
				u = dp.unique_id
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}
				# split shape
				
				split_shape = modifier['split']
				dp.split_shape = str(split_shape)
				# make splited data and go to next argument

				data_name = dp.data_name
				data_halo = modifier['data_halo']
				data_range_list = data_range_list_dict[data_name]

				buffer_halo = modifier['buffer_halo']
				n = 1
				dp.data_halo = data_halo
				for data_range in data_range_list:
					data_range = apply_halo(data_range, data_halo)
					
					dp.data_dtype = numpy.ndarray
					dp.set_data_range(data_range)
					dp.data_halo = data_halo
					
					memory_shape = dp.data_memory_shape
					shape = dp.data_shape
					bytes = dp.data_bytes
	
					# make depth
					depth = make_depth(data_range, mmtx)
					dp.depth = depth
					fp.output.depth = depth
					mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

					split_position = make_split_position(split_shape, n)
					n += 1
					dp.split_position = str(split_position)
					mem_depth(data_list[data_name], str(split_shape), dp.split_position, depth)

					dp.set_buffer_range(buffer_halo)
					make_tasks2(arg_packages + [dp], i + 1)
			elif decom == 'in':
				"""
					input decomposition
					data range is same
				"""
				u = dp.unique_id
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}
				# split shape
				
				split_shape = modifier['split']
				dp.split_shape = str(split_shape)
				# make splited data and go to next argument

				data_name = dp.data_name
				data_halo = modifier['data_halo']
				data_range_list = data_range_list_dict[data_name]

				buffer_halo = modifier['buffer_halo']
				n = 1
				dp.data_halo = data_halo
				for data_range in data_range_list:
					data_range = apply_halo(data_range, data_halo)
					
					dp.data_dtype = numpy.ndarray
					dp.set_data_range(data_range)
					dp.data_halo = data_halo
					
					memory_shape = dp.data_memory_shape
					shape = dp.data_shape
					bytes = dp.data_bytes
	
					# make depth
					depth = make_depth(data_range, mmtx)
					dp.depth = depth
					fp.output.depth = depth
					mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

					split_position = make_split_position(split_shape, n)
					n += 1
					dp.split_position = str(split_position)
					mem_depth(data_list[data_name], str(split_shape), dp.split_position, depth)

					dp.set_buffer_range(buffer_halo)
					if Debug:
						print "DP", dp
					make_tasks2(arg_packages + [dp], i + 1)
			elif decom == 'out':
				u = dp.unique_id
				# basic package setting
				dp.split_shape = str(SPLIT_BASE)
				dp.split_position = str(SPLIT_BASE)
				
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}

				range_list = data_range_list_dict[data_name]

				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				
				# data_range
				data_range = apply_halo(range_list[0], data_halo)
				dp.set_data_range(data_range)
				dp.data_halo = data_halo
				
				dp.set_full_data_range(data_range)
				
				# buffer range
				buffer_range = apply_halo(range_list[0], buffer_halo)
				dp.set_buffer_range(buffer_range)
				dp.buffer_halo = buffer_halo
						
				make_tasks2(arg_packages + [dp], i + 1)
		else:
			make_tasks2(arg_packages + [dp], i + 1)
	def check_in_and_out(modifier_dict, input_cnt, output_cnt):
		flag = False
		in_and_out_split = True
		
		# in_and_out spilt version1 test
		
		# all split shape is identical
		for data_name in modifier_dict:
			modifier = modifier_dict[data_name]
			if flag:
				if split == modifier['split']:
					pass
				else:
					in_and_out_split = False
					break
			else:
				# first one skip
				split = modifier['split']
				flag = True
		
		if in_and_out_split:
			return 'in_and_out_split1'
		
		# in_and_out split version2 test
		
		# same number of input and output count
		if output_cnt == input_cnt:
			return 'in_and_out_split2'
			
		return False
	############################################################################

	# make argument name list
	args_name_list = []
	for elem in args_list:
		if isinstance(elem, Data_package):
			args_name_list.append(elem.data_name)

	if return_name == None:
		return_name = 'output'

	# set output information
	modifier_dict['output'] = {}
	output_split = split_dict[return_name] if return_name in split_dict else SPLIT_BASE

	output_data_range = work_range
	output_data_halo = 0

	buffer_halo = 10

	output_dtype = return_dtype
	output_range = to_range(output_data_range)
	output_split = to_split(output_split)
	output_range_list = make_range_list(output_range, output_split)

	cnt = shape_to_count(output_split)

	modifier_dict['output']['split'] = output_split
	modifier_dict['output']['data_range'] = output_range
	modifier_dict['output']['data_halo'] = output_halo
	modifier_dict['output']['cnt'] = cnt
	modifier_dict['output']['buffer_halo'] = buffer_halo
	modifier_dict['output']['num'] = 1
	modifier_dict['output']['range_list'] = output_range_list
	output_cnt = cnt

	
	# temp data package
	temp = Data_package()
	temp.data_name = return_name
	temp.unique_id = unique_id
	temp.data_dtype = numpy.ndarray
	temp.data_halo = output_halo
	temp.set_data_contents_dtype(return_dtype)

	# modifier information about input
	input_cnt = 1

	data_range_list_dict = {}
	
	# make modifiers_list for each argument
	for args in args_list:
		name = args.data_name
		if args.unique_id != -1:
			modifier = {}

			modifier['data_range'] = args.data_range
			modifier['dtype'] = args.data_contents_dtype

			data_range = args.data_range
			data_halo = args.data_halo
			data_range = apply_halo(data_range, -data_halo)

			split = split_dict[name] if name in split_dict else SPLIT_BASE

			for axis in AXIS:
				if axis not in split:
					split[axis] = 1
			
			data_range_list = make_range_list(data_range, split)

			data_range_list_dict[name] = data_range_list
			cnt = shape_to_count(split)

			modifier_dict[name] = {}
			modifier_dict[name]['split'] = split
			modifier_dict[name]['data_range'] = data_range
			modifier_dict[name]['data_halo'] = halo_dict[name] if name in halo_dict else 0
			modifier_dict[name]['buffer_halo'] = buffer_halo
			
			modifier_dict[name]['cnt'] = cnt	
			input_cnt *= cnt

	in_and_out_split = check_in_and_out(modifier_dict, input_cnt, output_cnt)
	
	if in_and_out_split == 'in_and_out_split1':
		decom = 'in_and_out_split1'
		# this is special case called in&out split
		fp.output.split_shape = str(output_split)
		
		global in_and_out_n
		in_and_out_n = 1
		for work_range in output_range_list:
			make_tasks2([], 0)
			in_and_out_n += 1

		modifier = modifier_dict['output']
		data_halo = modifier['data_halo']	
		full_data_range = apply_halo(output_range, data_halo)
		
		temp.set_data_range(str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		temp.data_halo = data_halo

		unique_id += 1
	#	print "TEMP", temp
		return temp
	elif in_and_out_split == 'in_and_out_split2':
		decom = 'in_and_out_split2'
		
		fp.output.split_shape = str(output_split)
		
		make_tasks2([], 0)
		
		modifier = modifier_dict['output']
		data_halo = modifier['data_halo']
		full_data_range = apply_halo(output_range, data_halo)
		
		temp.set_data_range(str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		temp.data_halo = data_halo
		
		unique_id += 1
		return temp
	elif input_cnt > 1:
		"""
			input decomposition
		"""
		decom = 'in'
		count = input_cnt
		# set function package output
		full_data_range = apply_halo(output_range, output_halo)

		fp.output.set_data_range( dict(full_data_range))
		fp.output.set_full_data_range( dict(full_data_range))
		fp.output.data_halo = output_halo

		# set output package
		temp.set_data_range( str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		# register intermediate merge function
		u = unique_id
		inter = range(unique_id+1, unique_id+count-1)
		
		for inter_id in inter:
			temp.unique_id = inter_id
			mem_retain(temp)
		
		unique_id += count-1
		temp.unique_id = u

		# make input functions 
		make_tasks2([], 0)

		out_range = range(unique_id, unique_id+count)
		
		# intermediate merge functions 
		dimension = len(output_range)
		scheduler_request_merge(temp, out_range, merge_func, merge_order, dimension)

		#mem_inform(temp)
		return temp
	elif output_cnt > 1:
		"""
			output decomposition
		"""
		decom = 'out'
		fp.output.split_shape = str(output_split)
		fp.output.data_halo = output_halo
		
		full_data_range = apply_halo(output_range, output_halo)
		
		n = 1
		for work_range in output_range_list:
			split_position = make_split_position(output_split, n)
			n += 1
			fp.output.split_position = str(split_position)
			data_range = apply_halo( work_range, output_halo)
			fp.output.set_data_range( str(data_range))
			fp.output.set_full_data_range( str(full_data_range))
			fp.output.set_buffer_range(buffer_halo)
			
			make_tasks2([], 0)
	
		temp.set_data_range(str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		
		unique_id += 1
		return temp
	else:
		# split input and output both, but not in&out split

		print "==============================="
		print "VIVALDI ERROR"
		print "tried to split input and output together but number of input split and output split is different"
		print "input_cnt: ", input_cnt
		print "output_cnt: ", output_cnt
		print "==============================="
		assert(False)

	assert(False)