def testDelayLoadAndPostCloning(ext): # create test data on disk filepath = "/tmp/clpost." + ext sz = 1000 b_ = n.IntBuffer(sz) b_[60] = 600 assert (b_.clientSize() == sz) n.save(b_, filepath) # test1: Make sure that cloning a buffer doesn't force any data resident. Also make # sure that if either buffer forces data resident via a read, both buffers get the # same resident data. b = n.load(filepath) assert (b.clientSize() == 0) b2 = b.clone() assert (b.clientSize() == 0) assert (b2.clientSize() == 0) assert (b[60] == 600) # will cause buffer to load into mem assert (b.clientSize() == sz) assert (b2.clientSize() == sz) assert (b.hasSharedStore(b2)) # test2: Make sure that if a buffer's data is non-resident, and then it is cloned, # then the clone is accessed for writing, that the original buffer's data is NOT # made resident. c = n.load(filepath) assert (c.uniqueStore()) assert (c.clientSize() == 0) c2 = c.clone() assert (c2.clientSize() == 0) c2[6] = 66 # will cause data to load for c2, but still not for c...! assert (c.clientSize() == 0) assert (c2.clientSize() == sz)
def testDelayLoadAndPostCloning(ext): # create test data on disk filepath = "/tmp/clpost." + ext sz = 1000 b_ = n.IntBuffer(sz) b_[60] = 600 assert(b_.clientSize() == sz) n.save(b_, filepath) # test1: Make sure that cloning a buffer doesn't force any data resident. Also make # sure that if either buffer forces data resident via a read, both buffers get the # same resident data. b = n.load(filepath) assert(b.clientSize() == 0) b2 = b.clone() assert(b.clientSize() == 0) assert(b2.clientSize() == 0) assert(b[60] == 600) # will cause buffer to load into mem assert(b.clientSize() == sz) assert(b2.clientSize() == sz) assert(b.hasSharedStore(b2)) # test2: Make sure that if a buffer's data is non-resident, and then it is cloned, # then the clone is accessed for writing, that the original buffer's data is NOT # made resident. c = n.load(filepath) assert(c.uniqueStore()) assert(c.clientSize() == 0) c2 = c.clone() assert(c2.clientSize() == 0) c2[6] = 66 # will cause data to load for c2, but still not for c...! assert(c.clientSize() == 0) assert(c2.clientSize() == sz)
def build_node_tree(nap_file_name, map_file_name=None): """ This function will return node.attr list from the map file :param nap_file_name: Filepath for reading napalm data :type nap_file_name: string :param map_file_name: Filepath for reading napalm channel data :type map_file_name: string :return: list for node.attribute :rtype: list """ if not map_file_name: map_file_name = build_map_file_name(nap_file_name) get_node_list = nap_core.load(map_file_name) map_tbl_keys = get_node_list.keys() node_attribute_tree = [] for each_key in map_tbl_keys: if each_key == "header": continue obj_tbl_keys = get_node_list[each_key].keys() for each_attr in obj_tbl_keys: if each_attr == "eye_val": continue else: nd_tree = "%s.%s" % (each_key, each_attr) node_attribute_tree.append(nd_tree) return node_attribute_tree
def getChannel(self, nap_file_name, channel_number): """ This function will return channel data based on the channel number :param nap_file_name: Filepath to read napalm file. :type nap_file_name: string :param channel_number: Channel number for getting data from the channel file. :type channel_number: int :return: Napalm Object :rtype: NapalmBuffer Example >>> import kip.kip_napalm_class as knc >>> kncw = knc.Napalm() >>> nap_tab = kncw.getChannel(nap_file_name,50) """ if os.path.exists(nap_file_name): chan_table = nap_core.load(nap_file_name) get_channel_value = chan_table[channel_number].contents return get_channel_value
def getAllChannles(self, nap_file_name): """ This function will return all channel data from a channel file. :param nap_file_name: Filepath to read napalm file. :type nap_file_name: string :return: Napalm Object :rtype: NapalmBuffer Example >>> import kip.kip_napalm_class as knc >>> kncw = knc.Napalm() >>> nap_tab = kncw.getAllChannles(nap_file_name) >>> nap_tab["key_value"].contents """ if os.path.exists(nap_file_name): chan_table = nap_core.load(nap_file_name) return (chan_table)
def getAllChannles(self, nap_file_name): """ This function will return all channel data from a channel file. :param nap_file_name: Filepath to read napalm file. :type nap_file_name: string :return: Napalm Object :rtype: NapalmBuffer Example >>> import kip.kip_napalm_class as knc >>> kncw = knc.Napalm() >>> nap_tab = kncw.getAllChannles(nap_file_name) >>> nap_tab["key_value"].contents """ if os.path.exists(nap_file_name): chan_table = nap_core.load(nap_file_name) return(chan_table)
def testTableSerialize(ext): # create test data on disk filepath = "/tmp/tbl1." + ext t = createUberTable() n.save(t, filepath) for delayload in [True, False]: t2 = n.load(filepath, delayload) assert (n.areEqual(t, t2))
def testTableSerialize(ext): # create test data on disk filepath = "/tmp/tbl1." + ext t = createUberTable() n.save(t, filepath) for delayload in [True,False]: t2 = n.load(filepath, delayload) assert(n.areEqual(t,t2))
def testFile(): # test1 fail = False try: b = n.load("some_nonexistant_file.foo") except n.NapalmFileError: fail = True assert (fail) # test2 f = open("/tmp/notnapalm.txt", "w") f.write("this is not a napalm file.") f.close() fail = False try: b = n.load("/tmp/notnapalm.txt") except n.NapalmSerializeError: fail = True assert (fail)
def testFile(): # test1 fail = False try: b = n.load("some_nonexistant_file.foo") except n.NapalmFileError: fail = True assert(fail) # test2 f = open("/tmp/notnapalm.txt", "w") f.write("this is not a napalm file.") f.close() fail = False try: b = n.load("/tmp/notnapalm.txt") except n.NapalmSerializeError: fail = True assert(fail)
def testV3fBufSerialize(ext): # create test data on disk filepath = "/tmp/v3fbuf." + ext b = n.V3fBuffer(100) b[50] = p.V3f(3.3, 4.4, 5.5) n.save(b, filepath) b2 = n.load(filepath) assert(type(b) == type(b2)) assert(len(b) == len(b2)) vdiff = b[50] - b2[50] assert(vdiff.length() < 0.001)
def testV3fBufSerialize(ext): # create test data on disk filepath = "/tmp/v3fbuf." + ext b = n.V3fBuffer(100) b[50] = p.V3f(3.3, 4.4, 5.5) n.save(b, filepath) b2 = n.load(filepath) assert (type(b) == type(b2)) assert (len(b) == len(b2)) vdiff = b[50] - b2[50] assert (vdiff.length() < 0.001)
def header(map_file_name): """ This function will return a dict of header details from map file :param map_file_name: Filepath of napalm channel data :type map_file_name: string :return: header details :rtype: dict """ map_table = nap_core.load(map_file_name) map_header = map_table["header"] return map_header
def testSerializeBruteForce(): fileprefix = "/tmp/tblbrute." t = createUberTable() # save files = [] for ext in fileTypes: for compression in [0, 1, 2]: filepath = fileprefix + str(compression) + '.' + ext n.save(t, filepath, compression) files.append(filepath) # load for delayload in [True, False]: for f in files: t2 = n.load(f, delayload) assert (n.areEqual(t, t2))
def read(self, filename): """ This function will return napalm table data from napalm file. :param nap_file_name: Filepath to read napalm file. :type nap_file_name: string :return: Napalm Table Structure :rtype: ObjectTable """ if filename: nap_table = nap_core.load(filename) return nap_table
def testSerializeBruteForce(): fileprefix = "/tmp/tblbrute." t = createUberTable() # save files = [] for ext in fileTypes: for compression in [0,1,2]: filepath = fileprefix + str(compression) + '.' + ext n.save(t, filepath, compression) files.append(filepath) # load for delayload in [True,False]: for f in files: t2 = n.load(f, delayload) assert(n.areEqual(t,t2))
def testDelayLoadAndPreCloning(ext): # create test data on disk filepath = "/tmp/clpre." + ext sz = 13 t_ = n.ObjectTable() b_ = n.IntBuffer(sz) b2_ = b_.clone() t_[1] = b_ t_[2] = b2_ n.save(t_, filepath) # test1: Make sure that when buffers are loaded, their cloned relationships are kept intact t = n.load(filepath) assert(t.keys() == t_.keys()) assert(t[1].hasSharedStore(t[2])) t[2][0] # force resident via zeroeth element read assert(t[1].clientSize() == sz) assert(t[1].hasSharedStore(t[2]))
def testDelayLoadAndPreCloning(ext): # create test data on disk filepath = "/tmp/clpre." + ext sz = 13 t_ = n.ObjectTable() b_ = n.IntBuffer(sz) b2_ = b_.clone() t_[1] = b_ t_[2] = b2_ n.save(t_, filepath) # test1: Make sure that when buffers are loaded, their cloned relationships are kept intact t = n.load(filepath) assert (t.keys() == t_.keys()) assert (t[1].hasSharedStore(t[2])) t[2][0] # force resident via zeroeth element read assert (t[1].clientSize() == sz) assert (t[1].hasSharedStore(t[2]))
def testDelayLoad(ext): # create test data on disk filepath = "/tmp/dl1." + ext sz = 100 t_ = n.ObjectTable() for i in range(10): t_[i] = n.IntBuffer(sz) n.save(t_, filepath) # test1: make sure a buffer's data isn't made resident until it's accessed t = n.load(filepath) expected_count = sz for i in t.iteritems(): i[1][0] # force resident via zeroeth element read count = 0 for j in t.iteritems(): count += j[1].clientSize() assert (count == expected_count) expected_count += sz
def testDelayLoad(ext): # create test data on disk filepath = "/tmp/dl1." + ext sz = 100 t_ = n.ObjectTable() for i in range(10): t_[i] = n.IntBuffer(sz) n.save(t_, filepath) # test1: make sure a buffer's data isn't made resident until it's accessed t = n.load(filepath) expected_count = sz for i in t.iteritems(): i[1][0] # force resident via zeroeth element read count = 0 for j in t.iteritems(): count += j[1].clientSize() assert(count == expected_count) expected_count += sz
def testIntBufSerialize(ext): # create test data on disk filepath = "/tmp/intbuf." + ext sz = 50 b = n.IntBuffer(sz) b[5] = 5 n.save(b, filepath) b2 = n.load(filepath) if ext in delayLoadableFileTypes: assert (b2.clientSize() == 0) else: assert (b2.clientSize() == sz) assert (type(b) == type(b2)) assert (len(b) == len(b2)) assert (b2[5] == b[5]) if ext in delayLoadableFileTypes: assert (b2.clientSize() == sz)
def testIntBufSerialize(ext): # create test data on disk filepath = "/tmp/intbuf." + ext sz = 50 b = n.IntBuffer(sz) b[5] = 5 n.save(b, filepath) b2 = n.load(filepath) if ext in delayLoadableFileTypes: assert(b2.clientSize() == 0) else: assert(b2.clientSize() == sz) assert(type(b) == type(b2)) assert(len(b) == len(b2)) assert(b2[5] == b[5]) if ext in delayLoadableFileTypes: assert(b2.clientSize() == sz)
def read(self, nap_file_name = None, map_file_name = None, offset_value = 0): """ This function will return the curve object based on the channel and map file and this function added for backward compatable module. :param nap_file_name: Filepath for reading napalm data :type nap_file_name: string :param map_file_name: Filepath for reading napalm channel data :type map_file_name: string :param offset_value: Animation key offset value :type offset_value: int .. warning:: This function not suppose to call from any API call . use :func:`getCurves` for creating curves. .. seealso:: * :func:`getCurves` .. versionchanged:: 0.0.5 Fixed the map_file_name fixed. .. todo:: More in-line comment should be added :return: Curve Object :rtype: list """ curve_objects = [] if nap_file_name: if not map_file_name: map_file_name = build_map_file_name(nap_file_name) if os.path.exists(map_file_name): map_table = nap_core.load(map_file_name) map_tbl_keys = map_table.keys() chan_table = nap_core.load(nap_file_name) for obj_cnt in range(0, len(map_tbl_keys)): kipCurveClassObject = ClassObject() kipCurveObject = CurveClass() if map_tbl_keys[obj_cnt] == "header": continue get_obj_table = map_table[map_tbl_keys[obj_cnt]] obj_tbl_keys = get_obj_table.keys() eye_value = get_obj_table["eye_val"] for each_curv_cnt in range(0, len(obj_tbl_keys)): attr_name = obj_tbl_keys[each_curv_cnt] if attr_name == "eye_val": continue curve_dict = get_obj_table[obj_tbl_keys[each_curv_cnt]] curve_type = attr_name[:-1] curve_attr = attr_name get_all_channels = napalm_func.getAllChannles(nap_file_name) time_keys = get_all_channels[curve_dict["time"]].contents key_value = get_all_channels[curve_dict["key_value"]].contents in_angle = get_all_channels[curve_dict["in_angle"]].contents out_angle = get_all_channels[curve_dict["out_angle"]].contents in_weight = get_all_channels[curve_dict["in_weight"]].contents out_weight = get_all_channels[curve_dict["out_weight"]].contents in_tan_type = get_all_channels[curve_dict["in_tan_type"]].contents out_tan_type = get_all_channels[curve_dict["out_tan_type"]].contents in_slope = get_all_channels[curve_dict["in_slope"]].contents out_slope = get_all_channels[curve_dict["out_slope"]].contents key = CurveKeyClass() for time_fr in time_keys: key_index = time_keys.index(time_fr) key.time.append(time_keys[key_index]+offset_value) key.value.append(key_value[key_index]) key.in_angle.append(in_angle[key_index]) key.out_angle.append(out_angle[key_index]) key.in_weight.append(in_weight[key_index]) key.out_weight.append(out_weight[key_index]) key.in_tan_type.append(in_tan_type[key_index]) key.out_tan_type.append(out_tan_type[key_index]) key.in_slope.append(in_slope[key_index]) key.out_slope.append(out_slope[key_index]) kipCurveObject.keys.append(key) kipCurveObject.parm.append(curve_type) kipCurveObject.parm_attr.append(curve_attr) kipCurveClassObject.name = map_tbl_keys[obj_cnt] kipCurveClassObject.type = eye_value kipCurveClassObject.animation.append(kipCurveObject) curve_objects.append(kipCurveClassObject.output()) return curve_objects else: rodin_logger.error("%s is not exists in filesystem"%map_file_name)
def read(self, nap_file_name=None, map_file_name=None, offset_value=0): """ This function will return the curve object based on the channel and map file and this function added for backward compatable module. :param nap_file_name: Filepath for reading napalm data :type nap_file_name: string :param map_file_name: Filepath for reading napalm channel data :type map_file_name: string :param offset_value: Animation key offset value :type offset_value: int .. warning:: This function not suppose to call from any API call . use :func:`getCurves` for creating curves. .. seealso:: * :func:`getCurves` .. versionchanged:: 0.0.5 Fixed the map_file_name fixed. .. todo:: More in-line comment should be added :return: Curve Object :rtype: list """ curve_objects = [] if nap_file_name: if not map_file_name: map_file_name = build_map_file_name(nap_file_name) if os.path.exists(map_file_name): map_table = nap_core.load(map_file_name) map_tbl_keys = map_table.keys() chan_table = nap_core.load(nap_file_name) for obj_cnt in range(0, len(map_tbl_keys)): kipCurveClassObject = ClassObject() kipCurveObject = CurveClass() if map_tbl_keys[obj_cnt] == "header": continue get_obj_table = map_table[map_tbl_keys[obj_cnt]] obj_tbl_keys = get_obj_table.keys() eye_value = get_obj_table["eye_val"] for each_curv_cnt in range(0, len(obj_tbl_keys)): attr_name = obj_tbl_keys[each_curv_cnt] if attr_name == "eye_val": continue curve_dict = get_obj_table[obj_tbl_keys[each_curv_cnt]] curve_type = attr_name[:-1] curve_attr = attr_name get_all_channels = napalm_func.getAllChannles( nap_file_name) time_keys = get_all_channels[ curve_dict["time"]].contents key_value = get_all_channels[ curve_dict["key_value"]].contents in_angle = get_all_channels[ curve_dict["in_angle"]].contents out_angle = get_all_channels[ curve_dict["out_angle"]].contents in_weight = get_all_channels[ curve_dict["in_weight"]].contents out_weight = get_all_channels[ curve_dict["out_weight"]].contents in_tan_type = get_all_channels[ curve_dict["in_tan_type"]].contents out_tan_type = get_all_channels[ curve_dict["out_tan_type"]].contents in_slope = get_all_channels[ curve_dict["in_slope"]].contents out_slope = get_all_channels[ curve_dict["out_slope"]].contents key = CurveKeyClass() for time_fr in time_keys: key_index = time_keys.index(time_fr) key.time.append(time_keys[key_index] + offset_value) key.value.append(key_value[key_index]) key.in_angle.append(in_angle[key_index]) key.out_angle.append(out_angle[key_index]) key.in_weight.append(in_weight[key_index]) key.out_weight.append(out_weight[key_index]) key.in_tan_type.append(in_tan_type[key_index]) key.out_tan_type.append(out_tan_type[key_index]) key.in_slope.append(in_slope[key_index]) key.out_slope.append(out_slope[key_index]) kipCurveObject.keys.append(key) kipCurveObject.parm.append(curve_type) kipCurveObject.parm_attr.append(curve_attr) kipCurveClassObject.name = map_tbl_keys[obj_cnt] kipCurveClassObject.type = eye_value kipCurveClassObject.animation.append(kipCurveObject) curve_objects.append(kipCurveClassObject.output()) return curve_objects else: rodin_logger.error("%s is not exists in filesystem" % map_file_name)