def generate(self, include_node=False): messages_deps = "" run_deps = "" build_deps = "" for include in self.mav_generator.parser.includes: messages_deps += include.package_name + os.linesep build_deps += "\t" + "<build_depend>" + include.package_name + "</build_depend>" + os.linesep run_deps += "\t" + "<run_depend>" + include.package_name + "</run_depend>" + os.linesep raw_message = "" if (include_node): raw_message = MAV_RAW_DATA_MSG + ".msg" cmakelists = cmakelists_template.replace(CMAKE_PKG_ADD_MESSAGE_PLACEHOLDER, self.__gen_cmake_add_messages()).\ replace(CMAKE_PKG_ADD_RAW_MESSAGE_PLACEHOLDER, raw_message).\ replace(CMAKE_PKG_ADD_NODE_PLACEHOLDER, self.__gen_cmake_node(include_node)).\ replace(CMAKE_PKG_GENERATE_MESSAGES_PLACEHOLDER, messages_deps).\ replace(CMAKE_PKG_DEPS_PLACEHOLDER, messages_deps).\ replace(PKG_NAME_PLACEHOLDER, self.mav_generator.package_name) generate_file( self.mav_generator.package_dir + os.sep + "CMakeLists.txt", cmakelists) package_xml = package_xml_template.replace(PKG_XML_RUN_DEPS_PLACEHOLDER, run_deps).\ replace(PKG_XML_BUILD_DEPS_PLACEHOLDER, build_deps).\ replace(PKG_NAME_PLACEHOLDER, self.mav_generator.package_name) generate_file(self.mav_generator.package_dir + os.sep + "package.xml", package_xml)
def generate(self, include_node=False): messages_deps = "" run_deps = "" build_deps = "" for include in self.mav_generator.parser.includes: messages_deps += include.package_name + os.linesep build_deps += "\t" + "<build_depend>" + include.package_name + "</build_depend>" + os.linesep run_deps += "\t" + "<run_depend>" + include.package_name + "</run_depend>" + os.linesep raw_message="" if (include_node): raw_message=MAV_RAW_DATA_MSG+".msg" cmakelists = cmakelists_template.replace(CMAKE_PKG_ADD_MESSAGE_PLACEHOLDER, self.__gen_cmake_add_messages()).\ replace(CMAKE_PKG_ADD_RAW_MESSAGE_PLACEHOLDER, raw_message).\ replace(CMAKE_PKG_ADD_NODE_PLACEHOLDER, self.__gen_cmake_node(include_node)).\ replace(CMAKE_PKG_GENERATE_MESSAGES_PLACEHOLDER, messages_deps).\ replace(CMAKE_PKG_DEPS_PLACEHOLDER, messages_deps).\ replace(PKG_NAME_PLACEHOLDER, self.mav_generator.package_name) generate_file(self.mav_generator.package_dir + os.sep + "CMakeLists.txt", cmakelists) package_xml = package_xml_template.replace(PKG_XML_RUN_DEPS_PLACEHOLDER, run_deps).\ replace(PKG_XML_BUILD_DEPS_PLACEHOLDER, build_deps).\ replace(PKG_NAME_PLACEHOLDER, self.mav_generator.package_name) generate_file(self.mav_generator.package_dir + os.sep + "package.xml", package_xml)
def __init__(self, definitions , output_dir): """ """ if (definitions is None): raise "Invalid definitions file: cannot be NoneType" if (output_dir is None): raise "Invalid Output dir: cannot be NoneType" self.output_dir = output_dir filename, _ = os.path.splitext(os.path.basename(definitions)) # setup ROS packege name self.package_name = generate_pkg_name(filename) # setup ROS package paths self.package_dir = output_dir + os.sep + self.package_name self.include_dir = self.package_dir + os.sep + "include" + os.sep + self.package_name self.source_dir = self.package_dir + os.sep + "src" self.msg_dir = self.package_dir + os.sep + "msg" # force path creation mk_dirs(self.package_dir) mk_dirs(self.include_dir) # to avoid ROS catkin compilation error if the include folder is empty generate_file(self.include_dir + os.sep + ".keep","" ) mk_dirs(self.source_dir) mk_dirs(self.msg_dir) # create parser instance self.parser = MAVParser(self.package_name, definitions)
def predict(): source_data,target_data,test_data,word2id=utils.load_data() embeddings=utils.load_embeddings(word2id) print "测试集大小 %d" % len(test_data) results=[] #HybridCNNSS g1 = Graph('HybridCNNSS', 'HybridCNNSS1', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS2', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS3', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS4', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS5', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS6', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS7', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS8', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS9', embeddings) results.append(g1.run(test_data)) g1 = Graph('HybridCNNSS', 'HybridCNNSS10', embeddings) results.append(g1.run(test_data)) predicts=[] for predict in np.stack(results,axis=1): predicts.append(1.0*sum(predict)/len(predict)) utils.generate_file(predicts)
def test2(self): print("Test 2 running.............") self.webservice.start_all_services() # Precondition client_service = client(self.webservice.proxy_port) # Generate a file with some data path = './test2.txt' text = "test2 data" namespace = 'test2' generate_file(path, text) status = client_service.put(path, namespace) assert status is True, "Put failed" # End of precondition # Randomly kill k - 1 nodes self.webservice.kill_random_minions(replication_factor - 1) retrieved_data = client_service.get(namespace) # Compare stored and retrieved value assert text == retrieved_data, ("Data corrupted!") print("[Test 2 passed] k - 1 minion offline successful!") removefiles([path]) self.webservice.cleanup()
def __init__(self, definitions, output_dir): """ """ if (definitions is None): raise "Invalid definitions file: cannot be NoneType" if (output_dir is None): raise "Invalid Output dir: cannot be NoneType" self.output_dir = output_dir filename, _ = os.path.splitext(os.path.basename(definitions)) # setup ROS packege name self.package_name = generate_pkg_name(filename) # setup ROS package paths self.package_dir = output_dir + os.sep + self.package_name self.include_dir = self.package_dir + os.sep + "include" + os.sep + self.package_name self.source_dir = self.package_dir + os.sep + "src" self.msg_dir = self.package_dir + os.sep + "msg" # force path creation mk_dirs(self.package_dir) mk_dirs(self.include_dir) # to avoid ROS catkin compilation error if the include folder is empty generate_file(self.include_dir + os.sep + ".keep", "") mk_dirs(self.source_dir) mk_dirs(self.msg_dir) # create parser instance self.parser = MAVParser(self.package_name, definitions)
def generate( self ): header_name = "mavlink2ros" generate_file( self.mav_generator.include_dir + os.sep + header_name + ".h" , self.__gen_header_file( header_name ) ) generate_file( self.mav_generator.source_dir + os.sep + self.mav_generator.package_name + "_node.cpp" , self.__gen_source_file() ) pass
def main(): events.extend(parse_file("Files/Trading/")) events.extend(parse_file("Files/Deposits/")) events.extend(parse_file("Files/Buy/")) events.extend(parse_file("Files/Staking/")) events.extend(parse_file("Files/Conversions/")) events.extend(parse_file("Files/Airdrops/")) generate_file(events)
def test_compress_decompress(teardown_env, filesize): generate_file(filesize=filesize) compress('_test') decompress('_test_decomp') source_file_data = get_content('_test') dest_file_data = get_content('_test_decomp') assert source_file_data == dest_file_data
def main(): generate_file(1024 * 100) elapsed_time: float = timeit.timeit( 'compress_decompress()', setup='from __main__ import compress_decompress', number=100, ) print(f'time = {elapsed_time} sec') remove_file('_test') remove_file('_test_decomp') remove_file('test.huf')
def generate_xdpd(fields): add_fields_properties(fields) header = fields[0]['header'] location = XDPD_GNU_LINUX_DIR + '/io/packet_classifiers/' generate_file(location + 'packetclassifier.h', generate_packet_classifier_h(fields)) generate_file(location + 'static_pktclassifier.h', generate_static_pktclassifier_h(fields)) generate_file(location + 'static_pktclassifier.cc', generate_static_pktclassifier_c(fields)) location = XDPD_GNU_LINUX_DIR + '/pipeline-imp/' generate_file(location + 'packet.cc', generate_packet_c(fields)) location = XDPD_OPENFLOW + '/openflow12/' generate_file(location + 'of12_translation_utils.cc', generate_translation_utils_c(fields))
def test6(self): print("Test 6 running.............") self.webservice.start_all_services() # Precondition client_service = client(self.webservice.proxy_port) path2 = './test2.txt' text2 = "test2 data" dest_name2 = 'test2' generate_file(path2, text2) path1 = './test1.txt' text1 = "test1 data" dest_name1 = 'test1' generate_file(path1, text1) # upload 2 files client_service.put(path2, dest_name2) client_service.put(path1, dest_name1) # Randomly kill k - 1 nodes self.webservice.kill_random_minions(replication_factor - 1) # show minion status self.webservice.minion_status_report() # show file report report print("Before fixing.") self.webservice.file_status_report() # k replication fix self.webservice.fix_k_way_replication() print("After fixing.") # show show report report self.webservice.file_status_report() removefiles([path1, path2]) self.webservice.cleanup()
def test4(self): print("Test 4 running.............") self.webservice.start_all_services() # Randomly kill k - 1 nodes self.webservice.kill_random_minions(replication_factor - 1) self.webservice.minion_status_report() path1 = './test1.txt' text1 = "this is test1" namespace1 = 'test1' client_service = client(self.webservice.proxy_port) generate_file(path1, text1) client_service.put(path1, namespace1) result1 = client_service.get(namespace1) assert result1 == text1, "Get or put not working after killing some minions" print("[Test 4 passed] After killing some minions, put and get continue to work!") removefiles([path1]) self.webservice.cleanup()
def generate_rofl_frames(fields): fields = approve_fields_with_attribute(fields, 'field') add_fields_properties(fields) header = fields[0]['header'] location = ROFL_DIR + '/common/protocols/' generate_file(location + 'f%sframe.h' % header, generate_common_protocol_frame_h(fields)) generate_file(location + 'f%sframe.cc' % header, generate_common_protocol_frame_c(fields)) generate_file(location + 'Makefile.am', generate_common_protocol_makefile(fields))
def test5(self): print("Test 5 running.............") self.webservice.start_all_services() # Precondition test client_service = client(self.webservice.proxy_port) # Generate a file with some data path = './test2.txt' text = "test2 data" dest_name = 'test2' generate_file(path, text) # perform user operations client_service.put(path, dest_name) self.webservice.kill_main_master() result = client_service.get(dest_name) assert result == text, "Get or put not working after killing main master" print("[Test 5 passed]: Main master down! Get and put continue to work!") removefiles([path]) self.webservice.cleanup()
def generate(self): filename = self.mav_generator.msg_dir + os.sep +MAV_RAW_DATA_MSG+".msg" generate_file(filename, mav_raw_message_template) header_name = "mavlink2ros" generate_file(self.mav_generator.include_dir + os.sep + header_name + ".h" , self.__gen_header_file(header_name)) generate_file(self.mav_generator.source_dir + os.sep + self.mav_generator.package_name + "_node.cpp" , self.__gen_source_file()) pass
def generate(self): filename = self.mav_generator.msg_dir + os.sep + MAV_RAW_DATA_MSG + ".msg" generate_file(filename, mav_raw_message_template) header_name = "mavlink2ros" generate_file( self.mav_generator.include_dir + os.sep + header_name + ".h", self.__gen_header_file(header_name)) generate_file( self.mav_generator.source_dir + os.sep + self.mav_generator.package_name + "_node.cpp", self.__gen_source_file()) pass
def test1(self): print("Test 1 running.............") self.webservice.start_all_services() client_service = client(self.webservice.proxy_port) # Initialize 3 files path1 = './test1.txt' path2 = './test2.txt' path3 = './test3.txt' text1 = "this is test1" text2 = "this is test2" text3 = "this is test3" namespace1 = 'test1' namespace2 = 'test2' # generate 3 files generate_file(path1, text1) generate_file(path2, text2) generate_file(path3, text3) # upload 3 files. file 3 should overwrite file 2 client_service.put(path1, namespace1) client_service.put(path2, namespace2) # # # Get 3 files result1 = client_service.get(namespace1) assert result1 == text1, "Get or put not working! File content not same" self.testNameSpaceOverwrite(text3, namespace2, client_service, 0, path3) client_service.delete(namespace2) result2 = client_service.get(namespace2) assert result2 == "", "Delete not working" result1 = client_service.get(namespace1) assert result1 == text1, "Other file got affected after deleting a file" print("[Test 1 passed]. Basic client put, get, delete working!") # remove generated file removefiles([path1, path2, path3]) self.webservice.cleanup()
def generate( self ): msg_dir = self.pkg_dir + os.sep + "msg" mk_dirs( msg_dir ) for msg in self.messages: filename = msg_dir + os.sep + msg.name + ".msg" generate_file( filename, msg.to_msg() )
def generate_rofl_actions(fields): add_fields_properties(fields) header = fields[0]['header'] location = ROFL_DIR + '/datapath/pipeline/openflow/openflow1x/pipeline/' generate_file(location + 'of1x_action.h', generate_openflow_pipeline_action_h(fields)) generate_file(location + 'of1x_action.c', generate_openflow_pipeline_action_c(fields)) location = ROFL_DIR + '/common/openflow/experimental/actions/' generate_file(location + '%s_actions.h' % header, generate_experimental_action_h(fields)) generate_file(location + '%s_actions.cc' % header, generate_experimental_action_c(fields)) generate_file(location + 'Makefile.am', generate_experimental_makefile(fields)) generate_file(ROFL_DIR + '/datapath/pipeline/platform/packet_actions_autogenerated.h', generate_datapath_pipeline_platform_actions_h(fields)) experimental_ids = {} for field in fields: if 'field' in field: continue if 'experimental_id' in field: if field['header'] not in experimental_ids: experimental_ids[field['header']] = [] full_action_name = "%s_%s" % (field['action'], field['header']) experimental_ids[field['header']].append({'action':full_action_name, 'experimental_id':field['experimental_id']}) else: experimental_ids[field['header']].append({'action':full_action_name, 'experimental_id':4}) # TODO FIX return experimental_ids
def __copy_modified_files(): generate_file(ROFL_DIR + "/common/endianess_other.h", read_template(MODIFIED_DIR + "/endianess_other.h")) generate_file(ROFL_DIR + "/common/Makefile.am", read_template(MODIFIED_DIR + "/rofl_common_Makefile.am")) generate_file(ROFL_DIR + "/common/openflow/cofaction.h", read_template(MODIFIED_DIR + "/cofaction.h")) generate_file(ROFL_DIR + "/common/openflow/cofaction.cc", read_template(MODIFIED_DIR + "/cofaction.cc")) generate_file(ROFL_DIR + "/datapath/pipeline/openflow/openflow1x/pipeline/Makefile.am", read_template(MODIFIED_DIR + "/rofl_datapath_pipeline_Makefile.am")) generate_file(ROFL_DIR + "/datapath/pipeline/openflow/openflow1x/pipeline/of1x_match.h", read_template(MODIFIED_DIR + "/of1x_match.h")) generate_file(ROFL_DIR + "/datapath/pipeline/openflow/openflow1x/pipeline/of1x_match.c", read_template(MODIFIED_DIR + "/of1x_match.c")) generate_file(ROFL_DIR + "/datapath/pipeline/platform/Makefile.am", read_template(MODIFIED_DIR + "/rofl_datapath_platform_Makefile.am")) generate_file(ROFL_DIR + "/datapath/pipeline/platform/packet.h", read_template(MODIFIED_DIR + "/packet.h"))
def test_compress(teardown_env, content, result): generate_file(content=content, random=False) compress('_test') input_content: bytes = get_content('test.huf') assert input_content == result
def generate(self): msg_dir = self.pkg_dir + os.sep + "msg" mk_dirs(msg_dir) for msg in self.messages: filename = msg_dir + os.sep + msg.name + ".msg" generate_file(filename, msg.to_msg())