Exemplo n.º 1
0
	def get_argument_package_list(args, arg_names, split_dict, halo_dict):
		i = 0
		argument_package_list = []
		for data_name in arg_names:
			arg = args[i]
			if data_name in AXIS:
				argument_package = Data_package(data_name)
				argument_package.shared = False
				argument_package_list.append(argument_package)
			else:
				argument_package = None
				# get modifier split and halo
				split = split_dict[data_name] if data_name in split_dict else {}
				halo = halo_dict[data_name] if data_name in halo_dict else 0 
				# apply to data_package
				if isinstance(arg, Data_package):
					argument_package = arg
					argument_package.split = split
					argument_package.halo = halo
					
					if argument_package.unique_id == None:
						def get_unique_id(arg):
							aid = id(arg)
							if aid in data_package_list:
								return data_package_list[aid].get_unique_id()
							else:
								global unique_id
								unique_id += 1
							return unique_id
						argument_package.unique_id = get_unique_id(arg)
						argument_package.shared = False
				else:
					argument_package = Data_package(arg,split=split,halo=halo)
					def get_unique_id(arg):
						if type(arg) != numpy.ndarray:
							return -1
						aid = id(arg)
						if aid in data_package_list:
							return data_package_list[aid].get_unique_id()
						else:
							global unique_id
							unique_id += 1
						return unique_id
					argument_package.unique_id = get_unique_id(arg)
					def add_to_data_package_list(data_package, data):
						if type(data) == numpy.ndarray:
							key = id(data)
							data_package_list[key] = data_package
					add_to_data_package_list(argument_package, arg)
					argument_package.shared = False
				argument_package_list.append(argument_package)
			i += 1
		return argument_package_list
Exemplo n.º 2
0
		def get_merge_package_args(input_package, merge_func):
			def get_argument_list(function_name):
				function_code = function_code_dict[merge_func]
				def get_args(name, code):
					idx_start = code.find(name) + len(name) + 1
					idx_end = code.find(')', idx_start)
					args = code[idx_start:idx_end]
					return args.strip()
				function_args = get_args(function_name, function_code)
				if function_args == '':
					print "Vivaldi warning"
					print "================================="
					print "There are no function argument"
					print "================================="
					assert(False)
				argument_list = []
				for arg in function_args.split(','):
					argument_list.append(arg.strip())
				return argument_list
			argument_list = get_argument_list(merge_func)
			argument_package_list = []
			for arg in argument_list:
				argument_package = None
				if arg in AXIS:
					argument_package = Data_package(arg)
				else:
					argument_package = input_package.copy()
				argument_package_list.append(argument_package)
			return argument_package_list
Exemplo n.º 3
0
def load_data_3d(file_full_name, dtype=None, out_of_core=False):
    dtype = python_dtype_to_Vivaldi_dtype(dtype)

    file_name, extension = split_file_name_and_extension(file_full_name)
    el = extension.lower()

    if out_of_core:
        if el == 'dat':
            # read dat file
            # dat file have FileName and
            # It may have AbsolutePath or RelativePath
            file_info = matrixio.read_dat(file_full_name, out_of_core=True)
            file_name = file_info['FileName']
            assert (file_name != None)

            if 'Path' in file_info:
                # user defined path
                path = file_info['Path']
            else:
                # default is find same folder
                idx = file_full_name.rfind('/')
                path = file_full_name[:idx + 1]

            shape = file_info['Resolution'].split()
            shape[0], shape[1], shape[2] = int(shape[2]), int(shape[1]), int(
                shape[0])
            contents_dtype = file_info['Format'].lower()
            chan = file_info['Channel'] if 'Channel' in file_info else 1

            temp = Data_package()
            temp.out_of_core = True

            temp.data_dtype = numpy.ndarray
            temp.file_name = path + file_name
            temp.file_dtype = python_dtype_to_Vivaldi_dtype(contents_dtype)

            temp.buffer_dtype = numpy.ndarray
            if dtype != None: dtype = dtype
            else: dtype = contents_dtype
            if chan != 1: dtype += str(chan)

            temp.set_data_contents_dtype(dtype)

            data_range = shape_to_range(shape)

            temp.set_data_range(data_range)
            temp.set_buffer_range(data_range)
            temp.set_full_data_range(data_range)
            return temp

        else:
            print "NOT PROVIDE DATA TYPE"
            assert (False)
    else:
        st = time.time()
        data = matrixio.read_3d_data(file_full_name)

        if dtype != None:
            dtype = python_dtype_to_Vivaldi_dtype(dtype)
            dtype = Vivaldi_dtype_to_python_dtype(dtype)
            data = data.astype(dtype)

        if data == None:
            print "======================================"
            print "VIVALDI ERROR, data load is failed"
            print "Are you sure, exension is correct?"
            print "Extension: %s" % (extension)
            print "======================================"
            assert (False)

        print 'load_data_3d:', file_full_name, 'dtype:', data.dtype, 'shape:', data.shape, 'loading time:', 1000 * (
            time.time() - st), 'ms'
        return data

    return None
Exemplo n.º 4
0
def load_data_2d(file_full_name, dtype=None, out_of_core=False):
    dtype = python_dtype_to_Vivaldi_dtype(dtype)
    file_name, extension = split_file_name_and_extension(file_full_name)
    el = extension.lower()
    if out_of_core:
        if extension == 'dat':
            file_info = matrixio.read_dat(filename, out_of_core=True)
            file_name = file_info['FileName']
            assert (file_name != None)

            if 'Path' in file_info:
                # user defined path
                path = file_info['Path']
            else:
                # default is find same folder
                idx = file_full_name.rfind('/')
                path = file_full_name[:idx + 1]

            shape = file_info['Resolution'].split()
            shape[0], shape[1] = int(shape[1]), int(shape[0])
            contents_dtype = file_info['Format'].lower()
            chan = 1

            temp = Data_package()
            temp.out_of_core = True

            temp.data_dtype = numpy.ndarray
            temp.file_name = path + file_name
            temp.file_dtype = python_dtype_to_Vivaldi_dtype(contents_dtype)

            temp.buffer_dtype = numpy.ndarray
            if dtype != None: dtype = dtype
            else: dtype = contents_dtype
            if chan != 1: dtype += str(chan)

            temp.set_buffer_contents_dtype(dtype)

            data_range = shape_to_range(shape)
            temp.set_data_range(data_range)
            temp.set_buffer_range(data_range)
            temp.set_full_buffer_range(data_range)

            return temp

    else:
        st = time.time()
        data = matrixio.read_2d_data(file_full_name)

        dtype = Vivaldi_dtype_to_python_dtype(dtype)
        if dtype != None:
            data = data.astype(dtype)
        else:
            dtype = data.dtype
        print 'load_data_2d_rgb:', file_full_name, 'dtype:', data.dtype, 'shape:', data.shape, 'loading time:', 1000 * (
            time.time() - st), 'ms'

        return data
    return None
Exemplo n.º 5
0
def run_function(return_name=None,
                 func_name='',
                 execid=[],
                 work_range=None,
                 args=[],
                 dtype_dict={},
                 output_halo=0,
                 halo_dict={},
                 split_dict={},
                 merge_func='',
                 merge_order=''):
    def input_argument_check():

        if type(func_name) != str:
            print "Function_name error"
            print "Func_name: ", func_name

        if type(dtype_dict) != dict:
            print "Dtype_dict error"
            print "Dtype_dict: ", dtype_dict
            assert (False)

        if type(split_dict) != dict:
            print "Split_dict error"
            print "Split_dict: ", split_dict
            assert (False)

        if type(halo_dict) != dict:
            print "Halo_dict error"
            print "Halo_dict: ", halo_dict
            assert (False)

        if type(merge_func) != str:
            print "Merge function_name error"
            print "Merge_function name: ", merge_func

    # compatibility
    ############################################################
    function_name = func_name
    args_list = args

    modifier_dict = {}

    # input argument error check
    input_argument_check()

    # initialization
    ##############################################################
    global mmtx, inv_mmtx
    global unique_id
    function_package = Function_package()
    fp = function_package

    if Vivaldi_viewer.v != None:
        mmtx = Vivaldi_viewer.mmtx
        inv_mmtx = Vivaldi_viewer.inv_mmtx

    Debug = False

    # arguments
    ##################################################################################
    new_args = []
    for data_name in args_list:
        if data_name in data_package_list:
            # data_name, is managed as data package in the main manager
            dp = data_package_list[data_name]
            dtype = str(dp.data_contents_dtype)
            dtype = dtype.replace('_volume', '')
            function_name += dtype
            # we should give access to data to reader, before running function
            # check it is already available from reader or not

            data = globals()[data_name]

            u = dp.unique_id

            flag = False
            if dp.out_of_core and u not in retain_list:
                flag = True  # out of core and Not informed to memory manager
            if not dp.out_of_core and u not in retain_list:
                flag = True  # in core and Didn't informed ( function output already informed)
            if flag:
                manage_as_data_package(data_name)
                dp = data_package_list[data_name]

                u = dp.unique_id
                if u not in retain_list: retain_list[u] = []
                retain_list[u].append(dp.copy())

                reader_give_access_to_data(data_name)

            dp = dp.copy()
            dp.data = None
            dp.devptr = None

        elif data_name in globals():
            # There are two kinds of data here
            # 1. volume
            # 2. values

            data = globals()[data_name]

            if type(data) == numpy.ndarray:  # this is volume
                # now the data is also managed as data package
                manage_as_data_package(data_name)
                # now we have data_package correspond to the data
                dp = data_package_list[data_name]

                u = dp.unique_id
                if u not in retain_list: retain_list[u] = []
                retain_list[u].append(dp.copy())

                # Vivaldi reader have access to this data
                reader_give_access_to_data(data_name)

                # than make a new function name using the existing function name, the data_name and data dtype

                dtype = str(dp.data_contents_dtype)
                dtype = dtype.replace('_volume', '')

                function_name += str(dp.data_contents_dtype)
                dp = dp.copy()
                dp.data = None
                dp.devptr = None
            else:  # this is constant
                dp = Data_package()

                dtype = type(data_name)

                dp.data_name = data_name
                dp.unique_id = -1
                dp.data_dtype = dtype
                dp.data_contents_dtype = dtype
                dp.data_contents_memory_dtype = dtype
                dp.data = data
        else:
            # data_name not in the globals list
            # it is usually AXIS or constant like x,y not previously defined
            if isinstance(data_name, Data_package):
                dp = data_name
            else:
                data = None
                dp = Data_package()

                dtype = type(data_name)

                dp.data_name = data_name
                dp.unique_id = -1
                dp.data_dtype = dtype
                dp.data_contents_dtype = dtype
                dp.data_contents_memory_dtype = dtype
                dp.data = data_name

        new_args.append(dp)
    args_list = new_args

    # get Vivaldi functions
    ######################################################################################
    global parsed_Vivaldi_functions
    func_args = args_list

    return_dtype = parsed_Vivaldi_functions.get_return_dtype(function_name)
    fp.set_function_name(function_name)
    fp.output.unique_id = unique_id
    fp.mmtx = mmtx
    fp.inv_mmtx = inv_mmtx
    fp.output.data_dtype = numpy.ndarray
    fp.output.data_name = return_name

    if return_dtype == '':
        print "======================================================="
        print "VIVALDI ERROR, can not find return dtype"
        print "function_name:", function_name
        print "return name:", return_name
        print "return dtype:", return_dtype
        print "======================================================="
        assert (False)
    fp.output.set_data_contents_dtype(return_dtype)

    v = Vivaldi_viewer.v
    trans_on = Vivaldi_viewer.trans_on
    transN = Vivaldi_viewer.transN

    if trans_on == True:
        if v.getIsTFupdated() == 1:
            fp.trans_tex = v.getTFF()
            fp.update_tf = 1
            fp.update_tf2 = 0
            v.TFF.widget.updated = 0
        elif v.getIsTFupdated2() == 1:
            fp.trans_tex = v.getTFF2()
            fp.update_tf = 0
            fp.update_tf2 = 1
            v.TFF2.widget.updated = 0

        fp.TF_bandwidth = v.getTFBW()
        fp.CRG = v.window.CRG

    output_halo = 0
    if type(work_range) == dict:
        if 'work_range' in work_range:
            work_range = work_range['work_range']

    if return_name != None:
        # merge_func
        ###############################################################
        func_args = ['front', 'back']
        func_dtypes = {}
        for elem in func_args:
            func_dtypes[elem] = return_dtype

        new_name = make_func_name_with_dtypes(merge_func, func_args,
                                              func_dtypes)
        merge_func = new_name
        # execid
        ###################################################################################
        if isinstance(execid, Data_package): execid = execid.data
        if type(execid) != list: execid = [execid]
        execid_list = execid
        fp.execid_list = execid

        # work range
        ##################################################################################
        if type(work_range) == dict and work_range == {}:
            for data_name in args_list:
                if isinstance(data_name, Data_package):
                    if dp.unique_id == -1: continue
                    data_name = dp.data_name
                    dp = data_package_list[data_name]
                    work_range = dp.full_data_range
                    break

        work_range = to_range(work_range)

    if return_name == '':
        return_name = None
        work_range = {'work_range': work_range}

        args = [
            return_name, function_name, execid, work_range, args_list,
            dtype_dict, output_halo, halo_dict, split_dict, merge_func,
            merge_order
        ]

        return None, run_function, args
        #return None

    # local functions
    ############################################################################
    def make_tasks2(arg_packages, i):
        global unique_id
        if i == len(args_list):

            # common variables
            global unique_id
            fp.function_args = arg_packages

            modifier = modifier_dict['output']

            if decom == 'in_and_out_split1':
                num = modifier['num']
                work_range = modifier['range_list'][num - 1]
                fp.work_range = work_range

                split = modifier['split']
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']
                full_data_range = modifier['data_range']

                fp.output.data_halo = data_halo
                split_position = make_split_position(split, num)
                fp.output.split_position = str(split_position)
                data_range = apply_halo(work_range, data_halo)

                fp.output.set_data_range(str(data_range))
                fp.output.set_full_data_range(str(full_data_range))
                fp.output.set_buffer_range(buffer_halo)

                modifier['num'] += 1
            elif decom == 'in_and_out_split2':
                num = modifier['num']
                work_range = modifier['range_list'][num - 1]
                fp.work_range = work_range

                split = modifier['split']
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']
                full_data_range = modifier['data_range']

                fp.output.data_halo = data_halo
                split_position = make_split_position(split, num)
                fp.output.split_position = str(split_position)
                data_range = apply_halo(work_range, data_halo)

                fp.output.set_data_range(str(data_range))
                fp.output.set_full_data_range(str(full_data_range))
                fp.output.set_buffer_range(buffer_halo)

                modifier['num'] += 1
            elif decom == 'in':
                fp.output.unique_id = unique_id

                output_range = apply_halo(output_range_list[0], output_halo)
                fp.output.set_data_range(output_range)
                fp.output.split_shape = str(SPLIT_BASE)
                fp.output.split_position = str(SPLIT_BASE)
                fp.work_range = output_range

                # buffer
                modifier = modifier_dict['output']
                buffer_halo = modifier['buffer_halo']
                fp.output.set_buffer_range(buffer_halo)
            elif decom == 'out':
                num = modifier['num']
                work_range = modifier['range_list'][num - 1]
                fp.work_range = work_range

                split = modifier['split']
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']
                full_data_range = modifier['data_range']

                fp.output.data_halo = data_halo
                split_position = make_split_position(split, num)
                fp.output.split_position = str(split_position)
                data_range = apply_halo(work_range, data_halo)

                fp.output.set_data_range(str(data_range))
                fp.output.set_full_data_range(str(full_data_range))
                fp.output.set_buffer_range(buffer_halo)

                modifier['num'] += 1

            u = fp.output.unique_id
            unique_id += 1
            mem_retain(fp.output)
            if u not in retain_list: retain_list[u] = []
            retain_list[u].append(fp.output.copy())
            register_function(execid, fp)

            return

        dp = args_list[i]
        data_name = dp.data_name
        dp.memory_type = 'memory'

        # normal variables
        if dp.unique_id != -1:
            # setting about full data
            dp.split_shape = str(SPLIT_BASE)
            buf = dp.data
            # replace copy of original
            dp = dp.copy()

            if decom == 'in_and_out_split1':
                """
					input output decomposition
				"""
                global in_and_out_n

                u = dp.unique_id
                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}

                # split shape
                split_shape = modifier['split']
                dp.split_shape = str(split_shape)
                range_list = data_range_list_dict[data_name]
                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']

                cnt = modifier['cnt']

                split_position = make_split_position(split_shape, in_and_out_n)
                dp.split_position = str(split_position)

                # set data_range
                data_range = apply_halo(range_list[in_and_out_n - 1],
                                        data_halo, dp.full_data_range)
                dp.set_data_range(data_range)
                dp.data_halo = data_halo

                # set buffer halo
                buffer_range = apply_halo(range_list[in_and_out_n - 1],
                                          buffer_halo)
                dp.set_buffer_range(buffer_range)
                dp.buffer_halo = buffer_halo

                if Debug:
                    print "In and out DP", dp
                make_tasks2(arg_packages + [dp], i + 1)
            elif decom == 'in_and_out_split2':
                u = dp.unique_id
                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}
                # split shape

                split_shape = modifier['split']
                dp.split_shape = str(split_shape)
                # make splited data and go to next argument

                data_name = dp.data_name
                data_halo = modifier['data_halo']
                data_range_list = data_range_list_dict[data_name]

                buffer_halo = modifier['buffer_halo']
                n = 1
                dp.data_halo = data_halo
                for data_range in data_range_list:
                    data_range = apply_halo(data_range, data_halo)

                    dp.data_dtype = numpy.ndarray
                    dp.set_data_range(data_range)
                    dp.data_halo = data_halo

                    memory_shape = dp.data_memory_shape
                    shape = dp.data_shape
                    bytes = dp.data_bytes

                    # make depth
                    depth = make_depth(data_range, mmtx)
                    dp.depth = depth
                    fp.output.depth = depth
                    mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

                    split_position = make_split_position(split_shape, n)
                    n += 1
                    dp.split_position = str(split_position)
                    mem_depth(data_list[data_name], str(split_shape),
                              dp.split_position, depth)

                    dp.set_buffer_range(buffer_halo)
                    make_tasks2(arg_packages + [dp], i + 1)
            elif decom == 'in':
                """
					input decomposition
					data range is same
				"""
                u = dp.unique_id
                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}
                # split shape

                split_shape = modifier['split']
                dp.split_shape = str(split_shape)
                # make splited data and go to next argument

                data_name = dp.data_name
                data_halo = modifier['data_halo']
                data_range_list = data_range_list_dict[data_name]

                buffer_halo = modifier['buffer_halo']
                n = 1
                dp.data_halo = data_halo
                for data_range in data_range_list:
                    data_range = apply_halo(data_range, data_halo)

                    dp.data_dtype = numpy.ndarray
                    dp.set_data_range(data_range)
                    dp.data_halo = data_halo

                    memory_shape = dp.data_memory_shape
                    shape = dp.data_shape
                    bytes = dp.data_bytes

                    # make depth
                    depth = make_depth(data_range, mmtx)
                    dp.depth = depth
                    fp.output.depth = depth
                    mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

                    split_position = make_split_position(split_shape, n)
                    n += 1
                    dp.split_position = str(split_position)
                    mem_depth(data_list[data_name], str(split_shape),
                              dp.split_position, depth)

                    dp.set_buffer_range(buffer_halo)
                    if Debug:
                        print "DP", dp
                    make_tasks2(arg_packages + [dp], i + 1)
            elif decom == 'out':
                u = dp.unique_id
                # basic package setting
                dp.split_shape = str(SPLIT_BASE)
                dp.split_position = str(SPLIT_BASE)

                data_name = dp.data_name
                modifier = modifier_dict[
                    data_name] if data_name in modifier_dict else {}

                range_list = data_range_list_dict[data_name]

                data_halo = modifier['data_halo']
                buffer_halo = modifier['buffer_halo']

                # data_range
                data_range = apply_halo(range_list[0], data_halo)
                dp.set_data_range(data_range)
                dp.data_halo = data_halo

                dp.set_full_data_range(data_range)

                # buffer range
                buffer_range = apply_halo(range_list[0], buffer_halo)
                dp.set_buffer_range(buffer_range)
                dp.buffer_halo = buffer_halo

                make_tasks2(arg_packages + [dp], i + 1)
        else:
            make_tasks2(arg_packages + [dp], i + 1)

    def check_in_and_out(modifier_dict, input_cnt, output_cnt):
        flag = False
        in_and_out_split = True

        # in_and_out spilt version1 test

        # all split shape is identical
        for data_name in modifier_dict:
            modifier = modifier_dict[data_name]
            if flag:
                if split == modifier['split']:
                    pass
                else:
                    in_and_out_split = False
                    break
            else:
                # first one skip
                split = modifier['split']
                flag = True

        if in_and_out_split:
            return 'in_and_out_split1'

        # in_and_out split version2 test

        # same number of input and output count
        if output_cnt == input_cnt:
            return 'in_and_out_split2'

        return False

    ############################################################################

    # make argument name list
    args_name_list = []
    for elem in args_list:
        if isinstance(elem, Data_package):
            args_name_list.append(elem.data_name)

    if return_name == None:
        return_name = 'output'

    # set output information
    modifier_dict['output'] = {}
    output_split = split_dict[
        return_name] if return_name in split_dict else SPLIT_BASE

    output_data_range = work_range
    output_data_halo = 0

    buffer_halo = 10

    output_dtype = return_dtype
    output_range = to_range(output_data_range)
    output_split = to_split(output_split)
    output_range_list = make_range_list(output_range, output_split)

    cnt = shape_to_count(output_split)

    modifier_dict['output']['split'] = output_split
    modifier_dict['output']['data_range'] = output_range
    modifier_dict['output']['data_halo'] = output_halo
    modifier_dict['output']['cnt'] = cnt
    modifier_dict['output']['buffer_halo'] = buffer_halo
    modifier_dict['output']['num'] = 1
    modifier_dict['output']['range_list'] = output_range_list
    output_cnt = cnt

    # temp data package
    temp = Data_package()
    temp.data_name = return_name
    temp.unique_id = unique_id
    temp.data_dtype = numpy.ndarray
    temp.data_halo = output_halo
    temp.set_data_contents_dtype(return_dtype)

    # modifier information about input
    input_cnt = 1

    data_range_list_dict = {}

    # make modifiers_list for each argument
    for args in args_list:
        name = args.data_name
        if args.unique_id != -1:
            modifier = {}

            modifier['data_range'] = args.data_range
            modifier['dtype'] = args.data_contents_dtype

            data_range = args.data_range
            data_halo = args.data_halo
            data_range = apply_halo(data_range, -data_halo)

            split = split_dict[name] if name in split_dict else SPLIT_BASE

            for axis in AXIS:
                if axis not in split:
                    split[axis] = 1

            data_range_list = make_range_list(data_range, split)

            data_range_list_dict[name] = data_range_list
            cnt = shape_to_count(split)

            modifier_dict[name] = {}
            modifier_dict[name]['split'] = split
            modifier_dict[name]['data_range'] = data_range
            modifier_dict[name][
                'data_halo'] = halo_dict[name] if name in halo_dict else 0
            modifier_dict[name]['buffer_halo'] = buffer_halo

            modifier_dict[name]['cnt'] = cnt
            input_cnt *= cnt

    in_and_out_split = check_in_and_out(modifier_dict, input_cnt, output_cnt)

    if in_and_out_split == 'in_and_out_split1':
        decom = 'in_and_out_split1'
        # this is special case called in&out split
        fp.output.split_shape = str(output_split)

        global in_and_out_n
        in_and_out_n = 1
        for work_range in output_range_list:
            make_tasks2([], 0)
            in_and_out_n += 1

        modifier = modifier_dict['output']
        data_halo = modifier['data_halo']
        full_data_range = apply_halo(output_range, data_halo)

        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))
        temp.data_halo = data_halo

        unique_id += 1
        #	print "TEMP", temp
        return temp
    elif in_and_out_split == 'in_and_out_split2':
        decom = 'in_and_out_split2'

        fp.output.split_shape = str(output_split)

        make_tasks2([], 0)

        modifier = modifier_dict['output']
        data_halo = modifier['data_halo']
        full_data_range = apply_halo(output_range, data_halo)

        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))
        temp.data_halo = data_halo

        unique_id += 1
        return temp
    elif input_cnt > 1:
        """
			input decomposition
		"""
        decom = 'in'
        count = input_cnt
        # set function package output
        full_data_range = apply_halo(output_range, output_halo)

        fp.output.set_data_range(dict(full_data_range))
        fp.output.set_full_data_range(dict(full_data_range))
        fp.output.data_halo = output_halo

        # set output package
        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))
        # register intermediate merge function
        u = unique_id
        inter = range(unique_id + 1, unique_id + count - 1)

        for inter_id in inter:
            temp.unique_id = inter_id
            mem_retain(temp)

        unique_id += count - 1
        temp.unique_id = u

        # make input functions
        make_tasks2([], 0)

        out_range = range(unique_id, unique_id + count)

        # intermediate merge functions
        dimension = len(output_range)
        scheduler_request_merge(temp, out_range, merge_func, merge_order,
                                dimension)

        #mem_inform(temp)
        return temp
    elif output_cnt > 1:
        """
			output decomposition
		"""
        decom = 'out'
        fp.output.split_shape = str(output_split)
        fp.output.data_halo = output_halo

        full_data_range = apply_halo(output_range, output_halo)

        n = 1
        for work_range in output_range_list:
            split_position = make_split_position(output_split, n)
            n += 1
            fp.output.split_position = str(split_position)
            data_range = apply_halo(work_range, output_halo)
            fp.output.set_data_range(str(data_range))
            fp.output.set_full_data_range(str(full_data_range))
            fp.output.set_buffer_range(buffer_halo)

            make_tasks2([], 0)

        temp.set_data_range(str(full_data_range))
        temp.set_full_data_range(str(full_data_range))
        temp.set_buffer_range(str(full_data_range))

        unique_id += 1
        return temp
    else:
        # split input and output both, but not in&out split

        print "==============================="
        print "VIVALDI ERROR"
        print "tried to split input and output together but number of input split and output split is different"
        print "input_cnt: ", input_cnt
        print "output_cnt: ", output_cnt
        print "==============================="
        assert (False)

    assert (False)
Exemplo n.º 6
0
def manage_as_data_package(data_name):
    # bring data from global variable dictionary
    data = globals()[data_name]

    # make data package for manage
    temp = Data_package()

    if type(data) == numpy.ndarray:
        # case of data is numpy.ndarray
        dtype = data.dtype
        shape = list(data.shape)
        n = len(shape)
        last = shape[n - 1]
        # for make Vivaldi data type name
        type_name, shape = make_type_name(dtype, shape, last)
        temp.data_dtype = numpy.ndarray
        temp.set_data_contents_dtype(type_name)
        data_range = shape_to_range(shape)

        temp.set_data_range(data_range)
        temp.set_full_data_range(data_range)

        temp.data = copy.copy(data)

    elif isinstance(data, Data_package):
        temp = data.copy()

    temp.data_name = data_name
    global unique_id
    u = unique_id
    unique_id += 1

    temp.unique_id = u

    data_list[data_name] = u
    data_package_list[data_name] = temp
Exemplo n.º 7
0
def run_function(return_name=None, func_name='', execid=[], work_range=None, args=[], dtype_dict={}, output_halo=0, halo_dict={}, split_dict={}, merge_func='', merge_order=''):

	def input_argument_check():
	
		if type(func_name) != str:
			print "Function_name error"
			print "Func_name: ", func_name
	
		if type(dtype_dict) != dict:
			print "Dtype_dict error"
			print "Dtype_dict: ", dtype_dict
			assert(False)
		
		if type(split_dict) != dict:
			print "Split_dict error"
			print "Split_dict: ", split_dict
			assert(False)
		
		if type(halo_dict) != dict:
			print "Halo_dict error"
			print "Halo_dict: ", halo_dict
			assert(False)
			
		if type(merge_func) != str:
			print "Merge function_name error"
			print "Merge_function name: ", merge_func
		
	# compatibility
	############################################################
	function_name = func_name
	args_list = args
	
	modifier_dict={}

	# input argument error check
	input_argument_check()
	
	# initialization
	##############################################################
	global mmtx, inv_mmtx
	global unique_id
	function_package = Function_package()
	fp = function_package

	if Vivaldi_viewer.v != None:
		mmtx = Vivaldi_viewer.mmtx
		inv_mmtx = Vivaldi_viewer.inv_mmtx
	
	Debug = False
	
	# arguments
	##################################################################################
	new_args = []
	for data_name in args_list:	
		if data_name in data_package_list:
			# data_name, is managed as data package in the main manager
			dp = data_package_list[data_name]
			dtype = str(dp.data_contents_dtype)
			dtype = dtype.replace('_volume','')
			function_name += dtype
			# we should give access to data to reader, before running function
			# check it is already available from reader or not

			data = globals()[data_name]

			u = dp.unique_id

			flag = False
			if dp.out_of_core and u not in retain_list: flag = True # out of core and Not informed to memory manager
			if not dp.out_of_core and u not in retain_list: flag = True # in core and Didn't informed ( function output already informed)
			if flag:
				manage_as_data_package(data_name)
				dp = data_package_list[data_name]

				u = dp.unique_id
				if u not in retain_list: retain_list[u] = []
				retain_list[u].append(dp.copy())
	
				reader_give_access_to_data(data_name)

			dp = dp.copy()
			dp.data = None
			dp.devptr = None

		elif data_name in globals():
			# There are two kinds of data here
			# 1. volume
			# 2. values
			
			data = globals()[data_name]
			
			if type(data) == numpy.ndarray: # this is volume			
				# now the data is also managed as data package 
				manage_as_data_package(data_name)
				# now we have data_package correspond to the data
				dp = data_package_list[data_name]

				u = dp.unique_id
				if u not in retain_list: retain_list[u] = []
				retain_list[u].append(dp.copy())
		
				# Vivaldi reader have access to this data
				reader_give_access_to_data(data_name)
		
				# than make a new function name using the existing function name, the data_name and data dtype
			
				dtype = str(dp.data_contents_dtype)
				dtype = dtype.replace('_volume','')
				
				function_name += str(dp.data_contents_dtype)
				dp = dp.copy()
				dp.data = None
				dp.devptr = None
			else: # this is constant
				dp = Data_package()

				dtype = type(data_name)
	
				dp.data_name = data_name
				dp.unique_id = -1
				dp.data_dtype = dtype
				dp.data_contents_dtype = dtype
				dp.data_contents_memory_dtype = dtype
				dp.data = data
		else:
			# data_name not in the globals list
			# it is usually AXIS or constant like x,y not previously defined
			if isinstance(data_name, Data_package):
				dp = data_name
			else:
				data = None
				dp = Data_package()

				dtype = type(data_name)
	
				dp.data_name = data_name
				dp.unique_id = -1
				dp.data_dtype = dtype
				dp.data_contents_dtype = dtype
				dp.data_contents_memory_dtype = dtype
				dp.data = data_name
		
		new_args.append(dp)
	args_list = new_args	

	# get Vivaldi functions
	######################################################################################
	global parsed_Vivaldi_functions
	func_args = args_list

	return_dtype = parsed_Vivaldi_functions.get_return_dtype(function_name)
	fp.set_function_name(function_name)
	fp.output.unique_id					= unique_id
	fp.mmtx								= mmtx
	fp.inv_mmtx							= inv_mmtx
	fp.output.data_dtype				= numpy.ndarray
	fp.output.data_name					= return_name

	if return_dtype == '':
		print "======================================================="
		print "VIVALDI ERROR, can not find return dtype"
		print "function_name:", function_name
		print "return name:", return_name
		print "return dtype:", return_dtype
		print "======================================================="
		assert(False)
	fp.output.set_data_contents_dtype(return_dtype)

	v = Vivaldi_viewer.v
	trans_on = Vivaldi_viewer.trans_on
	transN = Vivaldi_viewer.transN

	if trans_on == True:
		if v.getIsTFupdated() == 1:
			fp.trans_tex 		  = v.getTFF()
			fp.update_tf = 1
			fp.update_tf2 = 0
			v.TFF.widget.updated = 0
		elif v.getIsTFupdated2() == 1:
			fp.trans_tex 		  = v.getTFF2()
			fp.update_tf = 0
			fp.update_tf2 = 1
			v.TFF2.widget.updated = 0
	
		fp.TF_bandwidth		  = v.getTFBW()
		fp.CRG = v.window.CRG

	output_halo = 0
	if type(work_range) == dict:
		if 'work_range' in work_range:
			work_range = work_range['work_range']
	
	if return_name != None:
		# merge_func
		###############################################################
		func_args = ['front','back']
		func_dtypes = {}
		for elem in func_args:
			func_dtypes[ elem ] = return_dtype
		
		new_name = make_func_name_with_dtypes(merge_func, func_args, func_dtypes)
		merge_func = new_name
		# execid
		###################################################################################
		if isinstance(execid, Data_package): execid = execid.data
		if type(execid) != list: execid = [execid]
		execid_list = execid
		fp.execid_list                    = execid

		# work range
		##################################################################################
		if type(work_range) == dict and work_range == {}:
			for data_name in args_list:
				if isinstance(data_name, Data_package):
					if dp.unique_id == -1:continue
					data_name = dp.data_name
					dp = data_package_list[data_name]
					work_range = dp.full_data_range
					break
		

		work_range = to_range(work_range) 
	
	if return_name == '':
		return_name = None
		work_range = {'work_range':work_range}

		args = [return_name, function_name, execid, work_range, args_list, dtype_dict, output_halo, halo_dict, split_dict, merge_func, merge_order]

		return None, run_function, args
		#return None

	# local functions
	############################################################################
	def make_tasks2(arg_packages, i):
		global unique_id
		if i == len(args_list):
		
			# common variables
			global unique_id
			fp.function_args					= arg_packages
			
			modifier = modifier_dict['output']
			
			if decom == 'in_and_out_split1':
				num = modifier['num']
				work_range = modifier['range_list'][num-1]
				fp.work_range = work_range
				
				split = modifier['split']
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				full_data_range = modifier['data_range']
				
				fp.output.data_halo = data_halo
				split_position = make_split_position(split, num)
				fp.output.split_position = str(split_position)
				data_range = apply_halo(work_range, data_halo)
		
				fp.output.set_data_range(str(data_range))
				fp.output.set_full_data_range(str(full_data_range))
				fp.output.set_buffer_range(buffer_halo)
			
				modifier['num'] += 1
			elif decom == 'in_and_out_split2':
				num = modifier['num']
				work_range = modifier['range_list'][num-1]
				fp.work_range = work_range
				
				split = modifier['split']
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				full_data_range = modifier['data_range']
				
				fp.output.data_halo = data_halo
				split_position = make_split_position(split, num)
				fp.output.split_position = str(split_position)
				data_range = apply_halo(work_range, data_halo)
		
				fp.output.set_data_range(str(data_range))
				fp.output.set_full_data_range(str(full_data_range))
				fp.output.set_buffer_range(buffer_halo)
			
				modifier['num'] += 1
			elif decom == 'in':
				fp.output.unique_id = unique_id
				
				output_range = apply_halo(output_range_list[0], output_halo)
				fp.output.set_data_range(output_range)
				fp.output.split_shape = str(SPLIT_BASE)
				fp.output.split_position = str(SPLIT_BASE)
				fp.work_range = output_range
				
				# buffer
				modifier = modifier_dict['output']
				buffer_halo = modifier['buffer_halo']
				fp.output.set_buffer_range(buffer_halo)
			elif decom == 'out':
				num = modifier['num']
				work_range = modifier['range_list'][num-1]
				fp.work_range = work_range
				
				split = modifier['split']
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				full_data_range = modifier['data_range']
				
				fp.output.data_halo = data_halo
				split_position = make_split_position(split, num)
				fp.output.split_position = str(split_position)
				data_range = apply_halo(work_range, data_halo)
		
				fp.output.set_data_range(str(data_range))
				fp.output.set_full_data_range(str(full_data_range))
				fp.output.set_buffer_range(buffer_halo)
			
				modifier['num'] += 1

			u = fp.output.unique_id
			unique_id += 1
			mem_retain(fp.output)
			if u not in retain_list: retain_list[u] = []
			retain_list[u].append(fp.output.copy())
			register_function(execid, fp)
		
			return

		dp = args_list[i]
		data_name = dp.data_name
		dp.memory_type = 'memory'

		# normal variables
		if dp.unique_id != -1:
			# setting about full data
			dp.split_shape = str(SPLIT_BASE)
			buf = dp.data
			# replace copy of original
			dp = dp.copy()

			if decom == 'in_and_out_split1':
				"""
					input output decomposition
				"""
				global in_and_out_n
				
				u = dp.unique_id
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}


				# split shape
				split_shape = modifier['split']
				dp.split_shape = str(split_shape)
				range_list = data_range_list_dict[data_name]
				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				
				cnt = modifier['cnt']
		
				split_position = make_split_position(split_shape, in_and_out_n)
				dp.split_position = str(split_position)
		
				# set data_range
				data_range = apply_halo(range_list[in_and_out_n-1], data_halo, dp.full_data_range)
				dp.set_data_range(data_range)
				dp.data_halo = data_halo
				
				# set buffer halo
				buffer_range = apply_halo(range_list[in_and_out_n-1], buffer_halo)
				dp.set_buffer_range(buffer_range)
				dp.buffer_halo = buffer_halo
				
				if Debug:
					print "In and out DP", dp
				make_tasks2(arg_packages + [dp], i + 1)
			elif decom == 'in_and_out_split2':
				u = dp.unique_id
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}
				# split shape
				
				split_shape = modifier['split']
				dp.split_shape = str(split_shape)
				# make splited data and go to next argument

				data_name = dp.data_name
				data_halo = modifier['data_halo']
				data_range_list = data_range_list_dict[data_name]

				buffer_halo = modifier['buffer_halo']
				n = 1
				dp.data_halo = data_halo
				for data_range in data_range_list:
					data_range = apply_halo(data_range, data_halo)
					
					dp.data_dtype = numpy.ndarray
					dp.set_data_range(data_range)
					dp.data_halo = data_halo
					
					memory_shape = dp.data_memory_shape
					shape = dp.data_shape
					bytes = dp.data_bytes
	
					# make depth
					depth = make_depth(data_range, mmtx)
					dp.depth = depth
					fp.output.depth = depth
					mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

					split_position = make_split_position(split_shape, n)
					n += 1
					dp.split_position = str(split_position)
					mem_depth(data_list[data_name], str(split_shape), dp.split_position, depth)

					dp.set_buffer_range(buffer_halo)
					make_tasks2(arg_packages + [dp], i + 1)
			elif decom == 'in':
				"""
					input decomposition
					data range is same
				"""
				u = dp.unique_id
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}
				# split shape
				
				split_shape = modifier['split']
				dp.split_shape = str(split_shape)
				# make splited data and go to next argument

				data_name = dp.data_name
				data_halo = modifier['data_halo']
				data_range_list = data_range_list_dict[data_name]

				buffer_halo = modifier['buffer_halo']
				n = 1
				dp.data_halo = data_halo
				for data_range in data_range_list:
					data_range = apply_halo(data_range, data_halo)
					
					dp.data_dtype = numpy.ndarray
					dp.set_data_range(data_range)
					dp.data_halo = data_halo
					
					memory_shape = dp.data_memory_shape
					shape = dp.data_shape
					bytes = dp.data_bytes
	
					# make depth
					depth = make_depth(data_range, mmtx)
					dp.depth = depth
					fp.output.depth = depth
					mem_depth(u, str(SPLIT_BASE), str(SPLIT_BASE), depth)

					split_position = make_split_position(split_shape, n)
					n += 1
					dp.split_position = str(split_position)
					mem_depth(data_list[data_name], str(split_shape), dp.split_position, depth)

					dp.set_buffer_range(buffer_halo)
					if Debug:
						print "DP", dp
					make_tasks2(arg_packages + [dp], i + 1)
			elif decom == 'out':
				u = dp.unique_id
				# basic package setting
				dp.split_shape = str(SPLIT_BASE)
				dp.split_position = str(SPLIT_BASE)
				
				data_name = dp.data_name
				modifier = modifier_dict[data_name] if data_name in modifier_dict else {}

				range_list = data_range_list_dict[data_name]

				data_halo = modifier['data_halo']
				buffer_halo = modifier['buffer_halo']
				
				# data_range
				data_range = apply_halo(range_list[0], data_halo)
				dp.set_data_range(data_range)
				dp.data_halo = data_halo
				
				dp.set_full_data_range(data_range)
				
				# buffer range
				buffer_range = apply_halo(range_list[0], buffer_halo)
				dp.set_buffer_range(buffer_range)
				dp.buffer_halo = buffer_halo
						
				make_tasks2(arg_packages + [dp], i + 1)
		else:
			make_tasks2(arg_packages + [dp], i + 1)
	def check_in_and_out(modifier_dict, input_cnt, output_cnt):
		flag = False
		in_and_out_split = True
		
		# in_and_out spilt version1 test
		
		# all split shape is identical
		for data_name in modifier_dict:
			modifier = modifier_dict[data_name]
			if flag:
				if split == modifier['split']:
					pass
				else:
					in_and_out_split = False
					break
			else:
				# first one skip
				split = modifier['split']
				flag = True
		
		if in_and_out_split:
			return 'in_and_out_split1'
		
		# in_and_out split version2 test
		
		# same number of input and output count
		if output_cnt == input_cnt:
			return 'in_and_out_split2'
			
		return False
	############################################################################

	# make argument name list
	args_name_list = []
	for elem in args_list:
		if isinstance(elem, Data_package):
			args_name_list.append(elem.data_name)

	if return_name == None:
		return_name = 'output'

	# set output information
	modifier_dict['output'] = {}
	output_split = split_dict[return_name] if return_name in split_dict else SPLIT_BASE

	output_data_range = work_range
	output_data_halo = 0

	buffer_halo = 10

	output_dtype = return_dtype
	output_range = to_range(output_data_range)
	output_split = to_split(output_split)
	output_range_list = make_range_list(output_range, output_split)

	cnt = shape_to_count(output_split)

	modifier_dict['output']['split'] = output_split
	modifier_dict['output']['data_range'] = output_range
	modifier_dict['output']['data_halo'] = output_halo
	modifier_dict['output']['cnt'] = cnt
	modifier_dict['output']['buffer_halo'] = buffer_halo
	modifier_dict['output']['num'] = 1
	modifier_dict['output']['range_list'] = output_range_list
	output_cnt = cnt

	
	# temp data package
	temp = Data_package()
	temp.data_name = return_name
	temp.unique_id = unique_id
	temp.data_dtype = numpy.ndarray
	temp.data_halo = output_halo
	temp.set_data_contents_dtype(return_dtype)

	# modifier information about input
	input_cnt = 1

	data_range_list_dict = {}
	
	# make modifiers_list for each argument
	for args in args_list:
		name = args.data_name
		if args.unique_id != -1:
			modifier = {}

			modifier['data_range'] = args.data_range
			modifier['dtype'] = args.data_contents_dtype

			data_range = args.data_range
			data_halo = args.data_halo
			data_range = apply_halo(data_range, -data_halo)

			split = split_dict[name] if name in split_dict else SPLIT_BASE

			for axis in AXIS:
				if axis not in split:
					split[axis] = 1
			
			data_range_list = make_range_list(data_range, split)

			data_range_list_dict[name] = data_range_list
			cnt = shape_to_count(split)

			modifier_dict[name] = {}
			modifier_dict[name]['split'] = split
			modifier_dict[name]['data_range'] = data_range
			modifier_dict[name]['data_halo'] = halo_dict[name] if name in halo_dict else 0
			modifier_dict[name]['buffer_halo'] = buffer_halo
			
			modifier_dict[name]['cnt'] = cnt	
			input_cnt *= cnt

	in_and_out_split = check_in_and_out(modifier_dict, input_cnt, output_cnt)
	
	if in_and_out_split == 'in_and_out_split1':
		decom = 'in_and_out_split1'
		# this is special case called in&out split
		fp.output.split_shape = str(output_split)
		
		global in_and_out_n
		in_and_out_n = 1
		for work_range in output_range_list:
			make_tasks2([], 0)
			in_and_out_n += 1

		modifier = modifier_dict['output']
		data_halo = modifier['data_halo']	
		full_data_range = apply_halo(output_range, data_halo)
		
		temp.set_data_range(str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		temp.data_halo = data_halo

		unique_id += 1
	#	print "TEMP", temp
		return temp
	elif in_and_out_split == 'in_and_out_split2':
		decom = 'in_and_out_split2'
		
		fp.output.split_shape = str(output_split)
		
		make_tasks2([], 0)
		
		modifier = modifier_dict['output']
		data_halo = modifier['data_halo']
		full_data_range = apply_halo(output_range, data_halo)
		
		temp.set_data_range(str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		temp.data_halo = data_halo
		
		unique_id += 1
		return temp
	elif input_cnt > 1:
		"""
			input decomposition
		"""
		decom = 'in'
		count = input_cnt
		# set function package output
		full_data_range = apply_halo(output_range, output_halo)

		fp.output.set_data_range( dict(full_data_range))
		fp.output.set_full_data_range( dict(full_data_range))
		fp.output.data_halo = output_halo

		# set output package
		temp.set_data_range( str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		# register intermediate merge function
		u = unique_id
		inter = range(unique_id+1, unique_id+count-1)
		
		for inter_id in inter:
			temp.unique_id = inter_id
			mem_retain(temp)
		
		unique_id += count-1
		temp.unique_id = u

		# make input functions 
		make_tasks2([], 0)

		out_range = range(unique_id, unique_id+count)
		
		# intermediate merge functions 
		dimension = len(output_range)
		scheduler_request_merge(temp, out_range, merge_func, merge_order, dimension)

		#mem_inform(temp)
		return temp
	elif output_cnt > 1:
		"""
			output decomposition
		"""
		decom = 'out'
		fp.output.split_shape = str(output_split)
		fp.output.data_halo = output_halo
		
		full_data_range = apply_halo(output_range, output_halo)
		
		n = 1
		for work_range in output_range_list:
			split_position = make_split_position(output_split, n)
			n += 1
			fp.output.split_position = str(split_position)
			data_range = apply_halo( work_range, output_halo)
			fp.output.set_data_range( str(data_range))
			fp.output.set_full_data_range( str(full_data_range))
			fp.output.set_buffer_range(buffer_halo)
			
			make_tasks2([], 0)
	
		temp.set_data_range(str(full_data_range))
		temp.set_full_data_range( str(full_data_range))
		temp.set_buffer_range(str(full_data_range))
		
		unique_id += 1
		return temp
	else:
		# split input and output both, but not in&out split

		print "==============================="
		print "VIVALDI ERROR"
		print "tried to split input and output together but number of input split and output split is different"
		print "input_cnt: ", input_cnt
		print "output_cnt: ", output_cnt
		print "==============================="
		assert(False)

	assert(False)
Exemplo n.º 8
0
def manage_as_data_package(data_name):
	# bring data from global variable dictionary
	data = globals()[data_name]

	# make data package for manage
	temp = Data_package()
	
	if type(data) == numpy.ndarray:
		# case of data is numpy.ndarray
		dtype = data.dtype
		shape = list(data.shape)
		n = len(shape)
		last = shape[n-1]
		# for make Vivaldi data type name
		type_name,shape = make_type_name(dtype, shape, last)
		temp.data_dtype = numpy.ndarray
		temp.set_data_contents_dtype(type_name)
		data_range = shape_to_range(shape)
		
		temp.set_data_range(data_range)
		temp.set_full_data_range(data_range)

		temp.data = copy.copy(data)
		
	elif isinstance(data, Data_package):
		temp = data.copy()
		
	temp.data_name = data_name
	global unique_id
	u = unique_id
	unique_id += 1

	temp.unique_id = u

	data_list[data_name] = u
	data_package_list[data_name] = temp
def load_data_3d(file_full_name, dtype=None, out_of_core=False):
	dtype = python_dtype_to_Vivaldi_dtype(dtype)

	file_name, extension = split_file_name_and_extension(file_full_name)
	el = extension.lower()

	if out_of_core:
		if el == 'dat':
			# read dat file
			# dat file have FileName and 
			# It may have AbsolutePath or RelativePath
			file_info = matrixio.read_dat(file_full_name, out_of_core=True)
			file_name = file_info['FileName']
			assert(file_name != None)
			
			if 'Path' in file_info:
				# user defined path
				path = file_info['Path']
			else:
				# default is find same folder
				idx = file_full_name.rfind('/')
				path = file_full_name[:idx+1] 
				

			shape = file_info['Resolution'].split()
			shape[0],shape[1],shape[2] = int(shape[2]),int(shape[1]),int(shape[0])
			contents_dtype = file_info['Format'].lower()
			chan = file_info['Channel'] if 'Channel' in file_info else 1

			temp = Data_package()
			temp.out_of_core = True

			temp.file_name = path + file_name
			temp.file_dtype = python_dtype_to_Vivaldi_dtype(contents_dtype)

			temp.buffer_dtype = numpy.ndarray
			if dtype != None: dtype = dtype
			else: dtype = contents_dtype
			if chan != 1: dtype += str(chan)

			temp.set_data_contents_dtype(dtype)
	
			data_range = shape_to_range(shape)

			temp.set_data_range(data_range)
			temp.set_buffer_range(data_range)
			temp.set_full_data_range(data_range)
			return temp

		else:
			print "NOT PROVIDE DATA TYPE"
			assert(False)
	else:
		st = time.time()
		data = matrixio.read_3d_data(file_full_name)

		if dtype != None:
			dtype = python_dtype_to_Vivaldi_dtype(dtype)
			dtype = Vivaldi_dtype_to_python_dtype(dtype)
			data = data.astype(dtype)

		if data == None:
			print "======================================"
			print "VIVALDI ERROR, data load is failed"
			print "Are you sure, exension is correct?"
			print "Extension: %s"%(extension)
			print "======================================"
			assert(False)



		print 'load_data_3d:', file_full_name, 'dtype:', data.dtype, 'shape:', data.shape,'loading time:',1000*(time.time()-st),'ms'
		return data

	return None
Exemplo n.º 10
0
def load_data_2d(file_full_name, dtype=None, out_of_core=False):
	dtype = python_dtype_to_Vivaldi_dtype(dtype)
	file_name, extension = split_file_name_and_extension(file_full_name)
	el = extension.lower()
	if out_of_core:
		if extension == 'dat':
			file_info = matrixio.read_dat(filename, out_of_core=True)
			file_name = file_info['FileName']
			assert(file_name != None)
			
			if 'Path' in file_info:
				# user defined path
				path = file_info['Path']
			else:
				# default is find same folder
				idx = file_full_name.rfind('/')
				path = file_full_name[:idx+1] 

			shape = file_info['Resolution'].split()
			shape[0],shape[1] = int(shape[1]),int(shape[0])
			contents_dtype = file_info['Format'].lower()
			chan = 1

			temp = Data_package()
			temp.out_of_core = True

			temp.file_name = path + file_name
			temp.file_dtype = python_dtype_to_Vivaldi_dtype(contents_dtype)

			temp.buffer_dtype = numpy.ndarray
			if dtype != None: dtype = dtype
			else: dtype = contents_dtype
			if chan != 1: dtype += str(chan)
	
			temp.set_buffer_contents_dtype(dtype)
	
			data_range = shape_to_range(shape)
			temp.set_data_range(data_range)
			temp.set_buffer_range(data_range)
			temp.set_full_buffer_range(data_range)

			return temp

	else:
		st = time.time()
		data = matrixio.read_2d_data(file_full_name)

		dtype = Vivaldi_dtype_to_python_dtype(dtype)
		if dtype != None:
			data = data.astype(dtype)
		else: dtype = data.dtype
		print 'load_data_2d_rgb:', file_full_name, 'dtype:', data.dtype, 'shape:', data.shape,'loading time:',1000*(time.time()-st),'ms'
	
		return data
	return None
Exemplo n.º 11
0
	def get_return_package(function_name, argument_package_list, work_range, output_halo):
		data_package = Data_package()
		def get_unique_id():
			global unique_id
			unique_id += 1
			return unique_id
		data_package.unique_id = get_unique_id()
		data_package.data_dtype = numpy.ndarray
		data_package.data_halo = output_halo
		
		def get_return_dtype(function_name, argument_package_list):
			from Vivaldi_translator_layer import get_return_dtype
			function_code = function_code_dict[function_name]
			return_dtype = get_return_dtype(function_name, argument_package_list, function_code)
			if return_dtype.endswith('_volume'):
				print "Vivaldi_warning"
				print "---------------------------------"
				print "Check your function"
				print "you are trying to return a volume"
				print "return_dtype: ", return_dtype
				print "---------------------------------"
			return return_dtype
		return_dtype = get_return_dtype(function_name, argument_package_list)

		data_package.set_data_contents_dtype(return_dtype)
		data_package.set_full_data_range(work_range)
		data_package.set_data_range(work_range)
		data_package.halo = output_halo
		data_package.split = output_split
		data_package.shared = True
		return data_package
Exemplo n.º 12
0
	def get_return_package(function_name, argument_package_list, work_range, output_halo, merge_func=''):
		data_package = Data_package()
		def get_unique_id():
			global unique_id
			unique_id += 1
			return unique_id
		data_package.unique_id = get_unique_id()
		data_package.data_dtype = numpy.ndarray
		data_package.data_halo = output_halo
		
		# Find return type of worker function
		def get_return_dtype(function_name, argument_package_list):
			from Vivaldi_translator_layer import get_return_dtype
			function_code = function_code_dict[function_name]
			return_dtype = get_return_dtype(function_name, argument_package_list, function_code)
			for elem in argument_package_list:
				if isinstance(elem, Data_package):
					if elem.data_source in ["hdfs", "local"] and merge_func == '':
						return elem.data_contents_dtype
			if return_dtype.endswith('_volume'):
				print "Vivaldi_warning"
				print "---------------------------------"
				print "Check your function"
				print "you are trying to return a volume"
				print "return_dtype: ", return_dtype
				print "---------------------------------"
			return return_dtype

		def get_return_source(argument_package_list):
			return_source = None
			
			for elem in argument_package_list:
				if elem.data_source in ["hdfs", "local"]  and merge_func== '':
					return_source = "local"
			
			return return_source
					
		return_dtype = get_return_dtype(function_name, argument_package_list)


		data_package.set_data_contents_dtype(return_dtype)
		data_package.set_full_data_range(work_range)
		data_package.set_data_range(work_range)
		data_package.halo = output_halo
		data_package.split = output_split
		data_package.shared = True

		# FREYJA STREAMING
		data_package.set_data_source(get_return_source(argument_package_list))
		return data_package
Exemplo n.º 13
0
	def get_argument_package_list(args, arg_names, split_dict, halo_dict):
		i = 0
		argument_package_list = []
		for data_name in arg_names:
			arg = args[i]
			if data_name in AXIS:
				argument_package = Data_package(data_name)
				argument_package.shared = False
				argument_package_list.append(argument_package)
			else:
				argument_package = None
				# get modifier split and halo
				split = split_dict[data_name] if data_name in split_dict else {}

				#FREYJA STREAMING
				if isinstance(arg, Data_package):
					if split_dict != {}:
						pass
					elif isinstance(work_range, Data_package):
						if arg.stream and (arg.data_shape == work_range.data_shape):
							split = {'z':arg.stream_count}
							split_dict[data_name] = split
							split_dict[return_name] = split
					elif isinstance(work_range, dict):
						if arg.stream:
							split = {'z':arg.stream_count}
							split_dict[data_name] = split
				
				halo = halo_dict[data_name] if data_name in halo_dict else 0 
				# apply to data_package
				if isinstance(arg, Data_package):
					argument_package = arg
					argument_package.split = split
					argument_package.halo = halo

					
					if argument_package.unique_id == None:
						def get_unique_id(arg):
							aid = id(arg)
							if aid in data_package_list:
								return data_package_list[aid].get_unique_id()
							else:
								global unique_id
								unique_id += 1
							return unique_id
						argument_package.unique_id = get_unique_id(arg)
						argument_package.shared = False
				else:
					argument_package = Data_package(arg,split=split,halo=halo)
					def get_unique_id(arg):
						if type(arg) != numpy.ndarray:
							return -1
						aid = id(arg)
						if aid in data_package_list:
							return data_package_list[aid].get_unique_id()
						else:
							global unique_id
							unique_id += 1
						return unique_id
					argument_package.unique_id = get_unique_id(arg)
					def add_to_data_package_list(data_package, data):
						if type(data) == numpy.ndarray:
							key = id(data)
							data_package_list[key] = data_package
					add_to_data_package_list(argument_package, arg)
					argument_package.shared = False
				argument_package_list.append(argument_package)
			i += 1
		return argument_package_list
Exemplo n.º 14
0
	def get_return_package(function_name, argument_package_list, work_range, output_halo):
		data_package = Data_package()
		def get_unique_id():
			global unique_id
			unique_id += 1
			return unique_id
		data_package.unique_id = get_unique_id()
		data_package.data_dtype = numpy.ndarray
		data_package.data_halo = output_halo
		
		def get_return_dtype(function_name, argument_package_list):
			from Vivaldi_translator_layer import get_return_dtype
			function_code = function_code_dict[function_name]
			return_dtype = get_return_dtype(function_name, argument_package_list, function_code)
			if return_dtype.endswith('_volume'):
				print "Vivaldi_warning"
				print "---------------------------------"
				print "Check your function"
				print "you are trying to return a volume"
				print "return_dtype: ", return_dtype
				print "---------------------------------"
			return return_dtype
		return_dtype = get_return_dtype(function_name, argument_package_list)

		data_package.set_data_contents_dtype(return_dtype)
		data_package.set_full_data_range(work_range)
		data_package.set_data_range(work_range)
		data_package.halo = output_halo
		data_package.split = output_split
		data_package.shared = True
		return data_package
Exemplo n.º 15
0
	def get_argument_package_list(args, arg_names, split_dict, halo_dict):
		i = 0
		argument_package_list = []
		for data_name in arg_names:
			arg = args[i]
			if data_name in AXIS:
				argument_package = Data_package(data_name)
				argument_package.shared = False
				argument_package_list.append(argument_package)
			else:
				argument_package = None
				# get modifier split and halo
				split = split_dict[data_name] if data_name in split_dict else {}
				halo = halo_dict[data_name] if data_name in halo_dict else 0 
				# apply to data_package
				if isinstance(arg, Data_package):
					argument_package = arg
					argument_package.split = split
					argument_package.halo = halo
					
					if argument_package.unique_id == None:
						def get_unique_id(arg):
							aid = id(arg)
							if aid in data_package_list:
								return data_package_list[aid].get_unique_id()
							else:
								global unique_id
								unique_id += 1
							return unique_id
						argument_package.unique_id = get_unique_id(arg)
						argument_package.shared = False
				else:
					argument_package = Data_package(arg,split=split,halo=halo)
					def get_unique_id(arg):
						if type(arg) != numpy.ndarray:
							return -1
						aid = id(arg)
						if aid in data_package_list:
							return data_package_list[aid].get_unique_id()
						else:
							global unique_id
							unique_id += 1
						return unique_id
					argument_package.unique_id = get_unique_id(arg)
					def add_to_data_package_list(data_package, data):
						if type(data) == numpy.ndarray:
							key = id(data)
							data_package_list[key] = data_package
					add_to_data_package_list(argument_package, arg)
					argument_package.shared = False
				argument_package_list.append(argument_package)
			i += 1
		return argument_package_list