def testDelayLoadAndPostCloning(ext): # create test data on disk filepath = "/tmp/clpost." + ext sz = 1000 b_ = n.IntBuffer(sz) b_[60] = 600 assert(b_.clientSize() == sz) n.save(b_, filepath) # test1: Make sure that cloning a buffer doesn't force any data resident. Also make # sure that if either buffer forces data resident via a read, both buffers get the # same resident data. b = n.load(filepath) assert(b.clientSize() == 0) b2 = b.clone() assert(b.clientSize() == 0) assert(b2.clientSize() == 0) assert(b[60] == 600) # will cause buffer to load into mem assert(b.clientSize() == sz) assert(b2.clientSize() == sz) assert(b.hasSharedStore(b2)) # test2: Make sure that if a buffer's data is non-resident, and then it is cloned, # then the clone is accessed for writing, that the original buffer's data is NOT # made resident. c = n.load(filepath) assert(c.uniqueStore()) assert(c.clientSize() == 0) c2 = c.clone() assert(c2.clientSize() == 0) c2[6] = 66 # will cause data to load for c2, but still not for c...! assert(c.clientSize() == 0) assert(c2.clientSize() == sz)
def testDelayLoadAndPostCloning(ext): # create test data on disk filepath = "/tmp/clpost." + ext sz = 1000 b_ = n.IntBuffer(sz) b_[60] = 600 assert (b_.clientSize() == sz) n.save(b_, filepath) # test1: Make sure that cloning a buffer doesn't force any data resident. Also make # sure that if either buffer forces data resident via a read, both buffers get the # same resident data. b = n.load(filepath) assert (b.clientSize() == 0) b2 = b.clone() assert (b.clientSize() == 0) assert (b2.clientSize() == 0) assert (b[60] == 600) # will cause buffer to load into mem assert (b.clientSize() == sz) assert (b2.clientSize() == sz) assert (b.hasSharedStore(b2)) # test2: Make sure that if a buffer's data is non-resident, and then it is cloned, # then the clone is accessed for writing, that the original buffer's data is NOT # made resident. c = n.load(filepath) assert (c.uniqueStore()) assert (c.clientSize() == 0) c2 = c.clone() assert (c2.clientSize() == 0) c2[6] = 66 # will cause data to load for c2, but still not for c...! assert (c.clientSize() == 0) assert (c2.clientSize() == sz)
def testTableSerialize(ext): # create test data on disk filepath = "/tmp/tbl1." + ext t = createUberTable() n.save(t, filepath) for delayload in [True,False]: t2 = n.load(filepath, delayload) assert(n.areEqual(t,t2))
def testTableSerialize(ext): # create test data on disk filepath = "/tmp/tbl1." + ext t = createUberTable() n.save(t, filepath) for delayload in [True, False]: t2 = n.load(filepath, delayload) assert (n.areEqual(t, t2))
def testV3fBufSerialize(ext): # create test data on disk filepath = "/tmp/v3fbuf." + ext b = n.V3fBuffer(100) b[50] = p.V3f(3.3, 4.4, 5.5) n.save(b, filepath) b2 = n.load(filepath) assert(type(b) == type(b2)) assert(len(b) == len(b2)) vdiff = b[50] - b2[50] assert(vdiff.length() < 0.001)
def testV3fBufSerialize(ext): # create test data on disk filepath = "/tmp/v3fbuf." + ext b = n.V3fBuffer(100) b[50] = p.V3f(3.3, 4.4, 5.5) n.save(b, filepath) b2 = n.load(filepath) assert (type(b) == type(b2)) assert (len(b) == len(b2)) vdiff = b[50] - b2[50] assert (vdiff.length() < 0.001)
def testSerializeBruteForce(): fileprefix = "/tmp/tblbrute." t = createUberTable() # save files = [] for ext in fileTypes: for compression in [0,1,2]: filepath = fileprefix + str(compression) + '.' + ext n.save(t, filepath, compression) files.append(filepath) # load for delayload in [True,False]: for f in files: t2 = n.load(f, delayload) assert(n.areEqual(t,t2))
def testSerializeBruteForce(): fileprefix = "/tmp/tblbrute." t = createUberTable() # save files = [] for ext in fileTypes: for compression in [0, 1, 2]: filepath = fileprefix + str(compression) + '.' + ext n.save(t, filepath, compression) files.append(filepath) # load for delayload in [True, False]: for f in files: t2 = n.load(f, delayload) assert (n.areEqual(t, t2))
def testDelayLoadAndPreCloning(ext): # create test data on disk filepath = "/tmp/clpre." + ext sz = 13 t_ = n.ObjectTable() b_ = n.IntBuffer(sz) b2_ = b_.clone() t_[1] = b_ t_[2] = b2_ n.save(t_, filepath) # test1: Make sure that when buffers are loaded, their cloned relationships are kept intact t = n.load(filepath) assert(t.keys() == t_.keys()) assert(t[1].hasSharedStore(t[2])) t[2][0] # force resident via zeroeth element read assert(t[1].clientSize() == sz) assert(t[1].hasSharedStore(t[2]))
def testDelayLoadAndPreCloning(ext): # create test data on disk filepath = "/tmp/clpre." + ext sz = 13 t_ = n.ObjectTable() b_ = n.IntBuffer(sz) b2_ = b_.clone() t_[1] = b_ t_[2] = b2_ n.save(t_, filepath) # test1: Make sure that when buffers are loaded, their cloned relationships are kept intact t = n.load(filepath) assert (t.keys() == t_.keys()) assert (t[1].hasSharedStore(t[2])) t[2][0] # force resident via zeroeth element read assert (t[1].clientSize() == sz) assert (t[1].hasSharedStore(t[2]))
def testDelayLoad(ext): # create test data on disk filepath = "/tmp/dl1." + ext sz = 100 t_ = n.ObjectTable() for i in range(10): t_[i] = n.IntBuffer(sz) n.save(t_, filepath) # test1: make sure a buffer's data isn't made resident until it's accessed t = n.load(filepath) expected_count = sz for i in t.iteritems(): i[1][0] # force resident via zeroeth element read count = 0 for j in t.iteritems(): count += j[1].clientSize() assert(count == expected_count) expected_count += sz
def testDelayLoad(ext): # create test data on disk filepath = "/tmp/dl1." + ext sz = 100 t_ = n.ObjectTable() for i in range(10): t_[i] = n.IntBuffer(sz) n.save(t_, filepath) # test1: make sure a buffer's data isn't made resident until it's accessed t = n.load(filepath) expected_count = sz for i in t.iteritems(): i[1][0] # force resident via zeroeth element read count = 0 for j in t.iteritems(): count += j[1].clientSize() assert (count == expected_count) expected_count += sz
def testIntBufSerialize(ext): # create test data on disk filepath = "/tmp/intbuf." + ext sz = 50 b = n.IntBuffer(sz) b[5] = 5 n.save(b, filepath) b2 = n.load(filepath) if ext in delayLoadableFileTypes: assert (b2.clientSize() == 0) else: assert (b2.clientSize() == sz) assert (type(b) == type(b2)) assert (len(b) == len(b2)) assert (b2[5] == b[5]) if ext in delayLoadableFileTypes: assert (b2.clientSize() == sz)
def testIntBufSerialize(ext): # create test data on disk filepath = "/tmp/intbuf." + ext sz = 50 b = n.IntBuffer(sz) b[5] = 5 n.save(b, filepath) b2 = n.load(filepath) if ext in delayLoadableFileTypes: assert(b2.clientSize() == 0) else: assert(b2.clientSize() == sz) assert(type(b) == type(b2)) assert(len(b) == len(b2)) assert(b2[5] == b[5]) if ext in delayLoadableFileTypes: assert(b2.clientSize() == sz)
def writeNapalm(self, nap_file_name, curve_object, debug=False, map_file_name=None, software=None, app_version=None): """ This function will write curve object to napalm file and napalm channel file. :param nap_file_name: Filepath for writting napalm data :type nap_file_name: string :param curve_object: Curve object to convert as napalm file :type curve_object: curve class object :param debug: This option will turn on the debug output :type debug: bool :param map_file_name: Filepath for writting napalm channel data :type map_file_name: string .. note:: This function will support only few tangents and list is * flat * linear * spline * fixed * clamped .. warning:: This function will endup with error if you pass wrong curve object structure. .. seealso:: * :func:`writeMappingTable` .. versionchanged:: 0.0.5 Fixed the map_file_name fixed. .. todo:: More in-line comment should be added :return: Write Status,Nap File Path,Map File Path :rtype: bool,string,string Example >>> <ObjectTable @ 0x241f730> 0: <FloatBuffer at 0x23826e0 (FloatCpuStore[4] at 0x2417190)> 1: <StringBuffer at 0x24a48f0 (StringCpuStore[4] at 0x239c330)> 2: <FloatBuffer at 0x2411040 (FloatCpuStore[4] at 0x24bed30)> 3: <StringBuffer at 0x2410c80 (StringCpuStore[4] at 0x2410cc0)> 4: <FloatBuffer at 0x239a630 (FloatCpuStore[4] at 0x239a670)> 5: <FloatBuffer at 0x2398120 (FloatCpuStore[4] at 0x2398160)> 6: <FloatBuffer at 0x239bb80 (FloatCpuStore[4] at 0x239bbc0)> 7: <FloatBuffer at 0x2388c20 (FloatCpuStore[4] at 0x2388c60)> 8: <FloatBuffer at 0x2388e90 (FloatCpuStore[4] at 0x238af50)> 9: <FloatBuffer at 0x238b150 (FloatCpuStore[4] at 0x238b190)> 10: <FloatBuffer at 0x22405c0 (FloatCpuStore[4] at 0x2240600)> """ self.napalm_data = curve_object nap_main_table = nap_core.ObjectTable() nap_status = False nap_file = None map_file = None counter_index = 0 for each_node in self.napalm_data: curve_class = each_node[2] object_node = [] object_dict = {} for eachCurve in curve_class: flot_attr_list = ['time', 'key_value', 'in_weight', 'out_weight', 'in_angle', \ 'out_angle', 'in_slope', 'out_slope'] curve_attr = str(eachCurve[1]) map_data = {} for key in eachCurve[-1].keys(): dict_key_val = eachCurve[-1][key] if key in flot_attr_list: nap_main_table[counter_index] = nap_core.FloatBuffer( len(dict_key_val)) else: nap_main_table[counter_index] = nap_core.StringBuffer( len(dict_key_val)) nap_main_table[counter_index].contents = dict_key_val map_data.update({key: counter_index}) counter_index += 1 object_node.append([curve_attr, map_data]) object_dict.update({each_node[0]: [each_node[1], object_node]}) self.mapping_data.append(object_dict) if debug: nap_core.dump(nap_main_table) try: nap_core.save(nap_main_table, nap_file_name) map_file = self.writeMappingTable(nap_file_name, map_file_name, \ software = software, app_version = app_version) nap_file = nap_file_name nap_status = True self.napalm_data = [] self.mapping_data = [] except: traceback.print_exc() nap_status = False return (nap_status, map_file, nap_file)
def writeMappingTable(self, nap_file_name, map_file_name = None, software = None, app_version = None): """ This function will write curve object to napalm file and napalm channel file. :param nap_file_name: Filepath for writting napalm data' :type nap_file_name: string :param map_file_name: Filepath for writting napalm channel data :type map_file_name: string .. note:: This function will write out mapping data for the channel. Example >>> "pSphere2": <ObjectTable @ 0x1cfe910> "eye_val": "right" "rotateX": <ObjectTable @ 0x1cfea90> "in_angle": 99 "in_slope": 98 "in_tan_type": 93 "in_weight": 92 "key_value": 90 "out_angle": 94 "out_slope": 96 "out_tan_type": 91 "out_weight": 95 "time": 97 .. seealso:: * :func:`writeNapalm` .. versionchanged:: 0.0.5 Fixed the map_file_name fixed. .. todo:: More in-line comment should be added :return: Map File Path :rtype: string """ if not map_file_name: ext_spliter = os.path.splitext(os.path.basename(nap_file_name)) get_file_ext = ext_spliter[-1] set_file_base = "%s_map%s" % (ext_spliter[0], get_file_ext) map_file_name = "%s/%s" % (os.path.dirname(nap_file_name), set_file_base) map_main_table = nap_core.ObjectTable() for each_map_obj in self.mapping_data : object_keys = each_map_obj.keys() map_obj_table = nap_core.ObjectTable() for each_key in object_keys: obj_key_val = each_map_obj[each_key][-1] eye_value = each_map_obj[each_key][-0] map_obj_table["eye_val"] = eye_value for each_curve in obj_key_val: nap_map_table = nap_core.ObjectTable() dic_val = each_curve[-1].keys() for each_dict_key in dic_val: nap_map_table[each_dict_key] = each_curve[-1][each_dict_key] map_obj_table[each_curve[0]] = nap_map_table map_main_table[each_key] = map_obj_table header_table = nap_core.ObjectTable() software_arg = str(software).split(",")[0] version = str(software).split(",")[1] date_time = time.strftime("%m/%d/%y-%H-%M-%S", time.localtime()) header_table["nap_file"] = nap_file_name header_table["date_time"] = date_time header_table["kip_version"] = os.getenv("DRD_KIP_VERSION") header_table["app_version"] = app_version header_table["client_software"] = software_arg header_table["client_version"] = version header_table["user"] = os.getenv("USER") map_main_table["header"] = header_table try: nap_core.save(map_main_table, map_file_name) map_file_name = map_file_name except: traceback.print_exc() map_file_name = None return map_file_name
def writeNapalm(self, nap_file_name, curve_object, debug = False, map_file_name = None, software = None, app_version = None): """ This function will write curve object to napalm file and napalm channel file. :param nap_file_name: Filepath for writting napalm data :type nap_file_name: string :param curve_object: Curve object to convert as napalm file :type curve_object: curve class object :param debug: This option will turn on the debug output :type debug: bool :param map_file_name: Filepath for writting napalm channel data :type map_file_name: string .. note:: This function will support only few tangents and list is * flat * linear * spline * fixed * clamped .. warning:: This function will endup with error if you pass wrong curve object structure. .. seealso:: * :func:`writeMappingTable` .. versionchanged:: 0.0.5 Fixed the map_file_name fixed. .. todo:: More in-line comment should be added :return: Write Status,Nap File Path,Map File Path :rtype: bool,string,string Example >>> <ObjectTable @ 0x241f730> 0: <FloatBuffer at 0x23826e0 (FloatCpuStore[4] at 0x2417190)> 1: <StringBuffer at 0x24a48f0 (StringCpuStore[4] at 0x239c330)> 2: <FloatBuffer at 0x2411040 (FloatCpuStore[4] at 0x24bed30)> 3: <StringBuffer at 0x2410c80 (StringCpuStore[4] at 0x2410cc0)> 4: <FloatBuffer at 0x239a630 (FloatCpuStore[4] at 0x239a670)> 5: <FloatBuffer at 0x2398120 (FloatCpuStore[4] at 0x2398160)> 6: <FloatBuffer at 0x239bb80 (FloatCpuStore[4] at 0x239bbc0)> 7: <FloatBuffer at 0x2388c20 (FloatCpuStore[4] at 0x2388c60)> 8: <FloatBuffer at 0x2388e90 (FloatCpuStore[4] at 0x238af50)> 9: <FloatBuffer at 0x238b150 (FloatCpuStore[4] at 0x238b190)> 10: <FloatBuffer at 0x22405c0 (FloatCpuStore[4] at 0x2240600)> """ self.napalm_data = curve_object nap_main_table = nap_core.ObjectTable() nap_status = False nap_file = None map_file = None counter_index = 0 for each_node in self.napalm_data: curve_class = each_node[2] object_node = [] object_dict = {} for eachCurve in curve_class: flot_attr_list = ['time', 'key_value', 'in_weight', 'out_weight', 'in_angle', \ 'out_angle', 'in_slope', 'out_slope'] curve_attr = str(eachCurve[1]) map_data = {} for key in eachCurve[-1].keys(): dict_key_val = eachCurve[-1][key] if key in flot_attr_list: nap_main_table[counter_index] = nap_core.FloatBuffer(len(dict_key_val)) else: nap_main_table[counter_index] = nap_core.StringBuffer(len(dict_key_val)) nap_main_table[counter_index].contents = dict_key_val map_data.update({key:counter_index}) counter_index += 1 object_node.append([curve_attr, map_data]) object_dict.update({each_node[0]:[each_node[1], object_node]}) self.mapping_data.append(object_dict) if debug: nap_core.dump(nap_main_table) try: nap_core.save(nap_main_table, nap_file_name) map_file = self.writeMappingTable(nap_file_name, map_file_name, \ software = software, app_version = app_version) nap_file = nap_file_name nap_status = True self.napalm_data = [] self.mapping_data = [] except: traceback.print_exc() nap_status = False return (nap_status, map_file, nap_file)
def writeMappingTable(self, nap_file_name, map_file_name=None, software=None, app_version=None): """ This function will write curve object to napalm file and napalm channel file. :param nap_file_name: Filepath for writting napalm data' :type nap_file_name: string :param map_file_name: Filepath for writting napalm channel data :type map_file_name: string .. note:: This function will write out mapping data for the channel. Example >>> "pSphere2": <ObjectTable @ 0x1cfe910> "eye_val": "right" "rotateX": <ObjectTable @ 0x1cfea90> "in_angle": 99 "in_slope": 98 "in_tan_type": 93 "in_weight": 92 "key_value": 90 "out_angle": 94 "out_slope": 96 "out_tan_type": 91 "out_weight": 95 "time": 97 .. seealso:: * :func:`writeNapalm` .. versionchanged:: 0.0.5 Fixed the map_file_name fixed. .. todo:: More in-line comment should be added :return: Map File Path :rtype: string """ if not map_file_name: ext_spliter = os.path.splitext(os.path.basename(nap_file_name)) get_file_ext = ext_spliter[-1] set_file_base = "%s_map%s" % (ext_spliter[0], get_file_ext) map_file_name = "%s/%s" % (os.path.dirname(nap_file_name), set_file_base) map_main_table = nap_core.ObjectTable() for each_map_obj in self.mapping_data: object_keys = each_map_obj.keys() map_obj_table = nap_core.ObjectTable() for each_key in object_keys: obj_key_val = each_map_obj[each_key][-1] eye_value = each_map_obj[each_key][-0] map_obj_table["eye_val"] = eye_value for each_curve in obj_key_val: nap_map_table = nap_core.ObjectTable() dic_val = each_curve[-1].keys() for each_dict_key in dic_val: nap_map_table[each_dict_key] = each_curve[-1][ each_dict_key] map_obj_table[each_curve[0]] = nap_map_table map_main_table[each_key] = map_obj_table header_table = nap_core.ObjectTable() software_arg = str(software).split(",")[0] version = str(software).split(",")[1] date_time = time.strftime("%m/%d/%y-%H-%M-%S", time.localtime()) header_table["nap_file"] = nap_file_name header_table["date_time"] = date_time header_table["kip_version"] = os.getenv("DRD_KIP_VERSION") header_table["app_version"] = app_version header_table["client_software"] = software_arg header_table["client_version"] = version header_table["user"] = os.getenv("USER") map_main_table["header"] = header_table try: nap_core.save(map_main_table, map_file_name) map_file_name = map_file_name except: traceback.print_exc() map_file_name = None return map_file_name