def logger(env_key='LOGGING_TYPE'): logging_type = os.getenv(env_key, 'local') log_formatter = logging.Formatter( '%(asctime)s [%(levelname)-8s] [%(filename)s:%(lineno)d] %(message)s', datefmt='%d/%m/%Y %H:%M:%S') root_logger = logging.getLogger() root_logger.setLevel(logging.INFO) info_file_handler = logging.FileHandler(Config().log_file("info.log")) info_file_handler.setFormatter(log_formatter) info_file_handler.setLevel(logging.INFO) root_logger.addHandler(info_file_handler) error_file_handler = logging.FileHandler(Config().log_file("error.log")) error_file_handler.setFormatter(log_formatter) error_file_handler.setLevel(logging.ERROR) root_logger.addHandler(error_file_handler) if logging_type == 'local': consoleHandler = logging.StreamHandler() consoleHandler.setFormatter(log_formatter) consoleHandler.setLevel(logging.INFO) root_logger.addHandler(consoleHandler)
def testMerging(self): c1 = Config.Config() c2 = Config.Config() c1.load(mergeConf1) c2.load(mergeConf2) assert c1.item == "value1" c1.merge(c2) assert c1.item == "value2" assert "item1" in c1.group assert "item2" in c1.group assert "subitem1" in c1.group.subgroup assert "subitem2" in c1.group.subgroup assert "group2" in c1
def setUp(self): # Create a configuration self.config = Config.Config() self.config["library"] = "testlib" # Create a basic library for testing self.library = Library.Library("test") voidType = Library.Type("void") intType = Library.Type("int") f = Library.Function("func1", voidType) p = Library.Parameter("parameter", intType) f.parameters[p.name] = p self.library.functions[f.name] = f f = Library.Function("func2", intType) p1 = Library.Parameter("p1", intType) p2 = Library.Parameter("p2", intType) f.parameters[p1.name] = p1 f.parameters[p2.name] = p2 self.library.functions[f.name] = f # Register some types self.library.typeMap["int"] = "int" self.library.typeMap["void"] = "void" # Define platform properties self.platform = SymbianPlatform(self.config)
def testDuplicateItems(self): c = Config.Config() try: c.load("foo: { dup dup }") raise RuntimeError("ValueError not raised.") except ValueError: pass
def test_section_web(self): config = Config() config.web.port = 8080 out = SerializeConfig.config(config) out_dict = json.loads(out) self.assertIn("web", out_dict) self.assertEqual(8080, out_dict["web"]["port"])
def setUp(self): self.logger = logging.getLogger(TestAutoQueue.__name__) handler = logging.StreamHandler(sys.stdout) self.logger.addHandler(handler) self.logger.setLevel(logging.DEBUG) formatter = logging.Formatter( "%(asctime)s - %(levelname)s - %(name)s - %(message)s") handler.setFormatter(formatter) self.context = MagicMock() self.context.config = Config() self.context.config.autoqueue.enabled = True self.context.config.autoqueue.patterns_only = True self.context.config.autoqueue.auto_extract = True self.context.logger = self.logger self.controller = MagicMock() self.controller.get_model_files_and_add_listener = MagicMock() self.controller.queue_command = MagicMock() self.model_listener = None self.initial_model = [] def get_model(): return self.initial_model def get_model_and_capture_listener(listener: IModelListener): self.model_listener = listener return get_model() self.controller.get_model_files.side_effect = get_model self.controller.get_model_files_and_add_listener.side_effect = get_model_and_capture_listener
def writefile(self, filename, sha1v): file = self.getfile(filename) path = self.dir + filename if os.path.isfile(path): input = FileUtil.open(path, "r") oldfile = input.read() input.close() else: oldfile = None output = FileUtil.open(path, "wb") output.write(file) print 'Update ' + filename + ' OK!' output.close() input = FileUtil.open(path, "rb") sha1vv = FileUtil.get_file_sha1(input) #print sha1v.strip() #print sha1vv.strip() input.close() if sha1v.strip() == sha1vv.strip(): print 'Verify ' + filename + ' OK!' else: print 'Verify ' + filename + ' Fail!' if oldfile: output = FileUtil.open(path, "wb") output.write(oldfile) output.close() print 'Recover ' + filename + ' OK!' if filename.strip() == '/autoupdate.ini'.strip(): newconfig = Config(__config__) newconfig.writeconfig('autoupdate', 'server', common.AUTOUPDATE_SERVER_STR) print 'ReWrite /autoupdate.ini OK!' common.reloadini() print 'ReLoad /autoupdate.ini OK!'
def __init__(self): rospy.init_node('Gateway', log_level=rospy.DEBUG) server_config_file = rospy.get_param("~server_config_file") self.config = Config(server_config_file) self.pf = ProtocolFactory(self.config) self.run_id = rospy.get_param("run_id") print("runid = ", self.run_id) self.node_list = rosnode.get_node_names() self.timer = Timer() self.monitor = Monitor(self.node_list, self.timer) self._server_request = {} # stores server_request self._event_bus = Queue() self._heartbeat_timeout_job = None self._tele_report_job = None self._report_car_job = None self._report_task_job = None self._service = DrivingTaskService(self._event_bus) self.__client = Connector(self._event_bus, self.config.host, self.config.port) self._handler_map = {} self._event_handler_map = {} self._add_command_handler() self._add_event_handler() self._web_server = WebServer(self.monitor, self._service, self.config) self._tele_control_service = TeleControlService()
def __init__(self, dataset_name, cls_type="duck"): self.config = Config(dataset_name='linemod', cls_type=cls_type) self.bs_utils = Basic_Utils(self.config) self.dataset_name = dataset_name self.xmap = np.array([[j for i in range(640)] for j in range(480)]) self.ymap = np.array([[i for i in range(640)] for j in range(480)]) self.trancolor = transforms.ColorJitter(0.2, 0.2, 0.2, 0.05) self.norm = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.224]) self.obj_dict = self.config.lm_obj_dict self.cls_type = cls_type self.cls_id = self.obj_dict[cls_type] print("cls_id in lm_dataset.py", self.cls_id) self.root = os.path.join(self.config.lm_root, 'Linemod_preprocessed') self.cls_root = os.path.join(self.root, "data/%02d/" % self.cls_id) self.rng = np.random meta_file = open(os.path.join(self.cls_root, 'gt.yml'), "r") self.meta_lst = yaml.load(meta_file) if dataset_name == 'train': self.add_noise = True real_img_pth = os.path.join(self.cls_root, "train.txt") self.real_lst = self.bs_utils.read_lines(real_img_pth) rnd_img_pth = os.path.join( self.root, "renders/{}/file_list.txt".format(cls_type)) try: self.rnd_lst = self.bs_utils.read_lines(rnd_img_pth) except: # No synthetic rendered data. print( "Train without rendered data from https://github.com/ethnhe/raster_triangle" ) self.rnd_lst = [] fuse_img_pth = os.path.join( self.root, "fuse/{}/file_list.txt".format(cls_type)) try: self.fuse_lst = self.bs_utils.read_lines(fuse_img_pth) except: # No fuse dataset print( "Train without fuse data from https://github.com/ethnhe/raster_triangle" ) self.fuse_lst = [] self.all_lst = self.real_lst + self.rnd_lst + self.fuse_lst else: self.add_noise = False self.pp_data = None if os.path.exists(self.config.preprocessed_testset_pth ) and self.config.use_preprocess: print('Loading valtestset.') with open(self.config.preprocessed_testset_pth, 'rb') as f: self.pp_data = pkl.load(f) self.all_lst = [i for i in range(len(self.pp_data))] print('Finish loading valtestset.') else: tst_img_pth = os.path.join(self.cls_root, "test.txt") self.tst_lst = self.bs_utils.read_lines(tst_img_pth) self.all_lst = self.tst_lst print("{}_dataset_size: ".format(dataset_name), len(self.all_lst))
def start_aircast(hostname, port): sample_queue = Queue() io_loop = tornado.ioloop.IOLoop.current() stream_url = "http://{}:{}{}".format(hostname, port, STREAM_ROUTE) caster = Caster(stream_url) config = Config(sample_rate=44100, channels=2, bits_per_sample=16) broadcaster = Broadcaster(config, sample_queue, io_loop) shairport = Shairport(caster.device_name, config, sample_queue) app = make_app(broadcaster) def shairport_status_cb(event, _): if event == 'playing': caster.start_stream() shairport.add_callback(shairport_status_cb) broadcaster.start() shairport.start() app.listen(port) logger.info("AirCast ready. Advertising as '%s'", caster.device_name) try: io_loop.start() except KeyboardInterrupt: pass finally: io_loop.stop() shairport.stop() broadcaster.stop() shairport.join(5) broadcaster.join(5)
def test_section_general(self): config = Config() config.general.debug = True out = SerializeConfig.config(config) out_dict = json.loads(out) self.assertIn("general", out_dict) self.assertEqual(True, out_dict["general"]["debug"])
def test_section_lftp(self): config = Config() config.lftp.remote_address = "server.remote.com" config.lftp.remote_username = "******" config.lftp.remote_port = 3456 config.lftp.remote_path = "/remote/server/path" config.lftp.local_path = "/local/server/path" config.lftp.remote_path_to_scan_script = "/remote/server/path/to/script" config.lftp.num_max_parallel_downloads = 6 config.lftp.num_max_parallel_files_per_download = 7 config.lftp.num_max_connections_per_root_file = 2 config.lftp.num_max_connections_per_dir_file = 3 config.lftp.num_max_total_connections = 4 out = SerializeConfig.config(config) out_dict = json.loads(out) self.assertIn("lftp", out_dict) self.assertEqual("server.remote.com", out_dict["lftp"]["remote_address"]) self.assertEqual("user-on-remote-server", out_dict["lftp"]["remote_username"]) self.assertEqual(3456, out_dict["lftp"]["remote_port"]) self.assertEqual("/remote/server/path", out_dict["lftp"]["remote_path"]) self.assertEqual("/local/server/path", out_dict["lftp"]["local_path"]) self.assertEqual("/remote/server/path/to/script", out_dict["lftp"]["remote_path_to_scan_script"]) self.assertEqual(6, out_dict["lftp"]["num_max_parallel_downloads"]) self.assertEqual( 7, out_dict["lftp"]["num_max_parallel_files_per_download"]) self.assertEqual(2, out_dict["lftp"]["num_max_connections_per_root_file"]) self.assertEqual(3, out_dict["lftp"]["num_max_connections_per_dir_file"]) self.assertEqual(4, out_dict["lftp"]["num_max_total_connections"])
def _create_default_config() -> Config: """ Create a config with default values :return: """ config = Config() config.general.debug = False config.lftp.remote_address = Seedsync.__CONFIG_DUMMY_VALUE config.lftp.remote_username = Seedsync.__CONFIG_DUMMY_VALUE config.lftp.remote_port = 22 config.lftp.remote_path = Seedsync.__CONFIG_DUMMY_VALUE config.lftp.local_path = Seedsync.__CONFIG_DUMMY_VALUE config.lftp.remote_path_to_scan_script = "/tmp/scanfs" config.lftp.num_max_parallel_downloads = 2 config.lftp.num_max_parallel_files_per_download = 4 config.lftp.num_max_connections_per_root_file = 4 config.lftp.num_max_connections_per_dir_file = 4 config.lftp.num_max_total_connections = 16 config.controller.interval_ms_remote_scan = 30000 config.controller.interval_ms_local_scan = 10000 config.controller.interval_ms_downloading_scan = 1000 config.controller.extract_path = "/tmp" config.controller.use_local_path_as_extract_path = True config.web.port = 8800 config.autoqueue.enabled = True config.autoqueue.patterns_only = False config.autoqueue.auto_extract = True return config
def create_blueprint(communicator): robot_status = Blueprint('robot_status', __name__) zyre_communicator = communicator config = Config() @robot_status.route('/') @robot_status.route('/robot_status') def robot_info(): session['uid'] = uuid.uuid4() return render_template('robot_status.html') @socketio.on('connect', namespace='/robot_status') def on_connect(): robots = config.get_robots() emit('deployed_robots', json.dumps(robots)) global status_thread with status_thread_lock: if status_thread is None: status_thread = socketio.start_background_task( target=get_robot_status, robot_ids=robots) def get_robot_status(robot_ids): while True: for robot in robot_ids: status_msg = zyre_communicator.get_status(robot) socketio.emit('status_msg', status_msg, namespace='/robot_status') socketio.sleep(0.1) return robot_status
def parseConfig(config, template=None): """ Parses the given tracer configuration and returns a tree of configuration symbols. @param config Config file text @param template Optional configuration to use a template. @return: A dictionary tree of configuration values """ if not template: template = Config.Config() items = template for itemName, item in configFile.parseString(config): if not item: Log.warn("Empty top-level item '%s'." % itemName) if itemName in items and isinstance(item, Config.Group): items[itemName].update(item) elif itemName in items and isinstance(item, Config.List): items[itemName] += item else: items[itemName] = item return items
def testGroups(self): c = Config.Config() c.load(groupConf) assert "group" in c assert len(c.group) == 2 assert c.group["list1"] == ["sublist1", "sublist2"] assert c.group["list2"] == ["sublist1", "sublist2"]
def testEmptyGroupAndList(self): c = Config.Config() c.load(emptyGroupAndList) assert "emptygroup" in c assert "emptylist" in c assert len(c.emptylist) == 0 assert len(c.emptygroup) == 0
def log_true_graph(self): # plt.rcParams['figure.figsize'] = [15, 15] pos = nx.shell_layout(self.get_nx_graph(), self.shells) nx.draw(self.get_nx_graph(), cmap=plt.get_cmap('jet'), with_labels=True, pos=pos) plt.savefig(Config().data_file('ground_truth_graph.png')) plt.clf()
def test_section_autoqueue(self): config = Config() config.autoqueue.enabled = True config.autoqueue.patterns_only = False out = SerializeConfig.config(config) out_dict = json.loads(out) self.assertIn("autoqueue", out_dict) self.assertEqual(True, out_dict["autoqueue"]["enabled"]) self.assertEqual(False, out_dict["autoqueue"]["patterns_only"])
def __init__(self, options=None, fileName=None): self.config = Config.Config() self.platform = None self.library = None self.targets = {} self.options = options if fileName: self.load(fileName)
def create_blueprint(communicator): central_operator_console = Blueprint('central_operator_console', __name__) zyre_communicator = communicator config = Config() @central_operator_console.route('/central_operator_console') def index(): session['uid'] = uuid.uuid4() return render_template('central_operator_console.html') @central_operator_console.route('/central_operator_console/get_query_list', methods=['GET']) def get_query_list(): feedback_msg = '' try: queries = config.get_queries() print(queries) except Exception as exc: print('[get_experiment_list] %s' % str(exc)) feedback_msg = 'An error occurred while retrieving the experiment list' return jsonify(queries=queries, message=feedback_msg) @central_operator_console.route( '/central_operator_console/send_query_request', methods=['GET', 'POST']) def send_query_request(): '''send query request to fms query interface via ZyreWebCommunicator through pyre message ''' query_type = request.args.get('query_id', '', type=str) query_type = query_type.upper().replace("_", "-") print(query_type) robot_id = request.args.get('robot_id', '', type=str) task_id = request.args.get('task_id', '', type=str) query_msg = {'header': {}, 'payload': {}} query_msg['header']['type'] = query_type query_msg['header']['timestamp'] = datetime.now().timestamp() query_msg['header']['metamodel'] = 'ropod-msg-schema.json' query_msg['header']['msgId'] = uuid.uuid4() query_msg['payload']['senderId'] = str(session['uid']) query_msg['payload']['robotId'] = robot_id query_msg['payload']['taskId'] = task_id print(query_msg) # query_msg = json.dumps(query_msg, indent=2, default=str) query_result = zyre_communicator.get_query_data(query_msg) if query_result is None: return jsonify(response=" ", message="Received no response from query interface") print("received results") return jsonify(response=query_result['payload'], message="") return central_operator_console
def test_has_section(self): config = Config() self.assertTrue(config.has_section("general")) self.assertTrue(config.has_section("lftp")) self.assertTrue(config.has_section("controller")) self.assertTrue(config.has_section("web")) self.assertTrue(config.has_section("autoqueue")) self.assertFalse(config.has_section("nope")) self.assertFalse(config.has_section("from_file")) self.assertFalse(config.has_section("__init__"))
def __init__(self, weights_path): # initialize configs and model object self.config = Config(dataset_name='ycb') self.bs_utils = Basic_Utils(self.config) self.model = self.define_network(weights_path) self.rgb = None self.cld = None self.cld_rgb_nrm = None self.choose = None self.cls_id_lst = None
def main(unused_argv): config = Config(FLAGS.sz, FLAGS.map, -1) # 进行参数的设置 os.makedirs('weights/' + config.full_id(), exist_ok=True) cfg_path = 'weights/%s/config.json' % config.full_id() # 保存参数的位置 config.build(cfg_path) # 建立和设置参数 if FLAGS.human: human() else: agent(config)
def RunEpguide(): """ glowna petla aplikacji, odczytuje konfiguracje, uruchamia operacje """ config = Config() config.ParseCommandLine(sys.argv) if config.options.use_config: config.ReadConfigFile() epguide = EpGuide(config) epguide.Execute()
def testLists(self): c = Config.Config() c.load(listConf) assert "list" in c assert len(c.list) == 5 assert c.list[0] == "item1" assert c.list[1] == "item2" assert c.list[2] == {"foo": "bar"} assert c.list[3] == ["sublist1", "sublist2"] assert c.list[4].attrs["attr"] == "value"
def __init__(self, dataset_name, cls_type="duck", DEBUG=False): self.DEBUG = DEBUG self.config = Config(ds_name='linemod', cls_type=cls_type) self.bs_utils = Basic_Utils(self.config) self.dataset_name = dataset_name self.xmap = np.array([[j for i in range(640)] for j in range(480)]) self.ymap = np.array([[i for i in range(640)] for j in range(480)]) self.trancolor = transforms.ColorJitter(0.2, 0.2, 0.2, 0.05) self.norm = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.224]) self.obj_dict = self.config.lm_obj_dict self.cls_type = cls_type self.cls_id = self.obj_dict[cls_type] print("cls_id in lm_dataset.py", self.cls_id) self.root = os.path.join(self.config.lm_root, 'Linemod_preprocessed') self.cls_root = os.path.join(self.root, "data/%02d/" % self.cls_id) self.rng = np.random meta_file = open(os.path.join(self.cls_root, 'gt.yml'), "r") self.meta_lst = yaml.load(meta_file) if dataset_name == 'train': self.add_noise = True real_img_pth = os.path.join(self.cls_root, "train.txt") self.real_lst = self.bs_utils.read_lines(real_img_pth) rnd_img_ptn = os.path.join(self.root, 'renders/%s/*.pkl' % cls_type) self.rnd_lst = glob(rnd_img_ptn) print("render data length: ", len(self.rnd_lst)) if len(self.rnd_lst) == 0: warning = "Warning: " warning += "Trainnig without rendered data will hurt model performance \n" warning += "Please generate rendered data from https://github.com/ethnhe/raster_triangle.\n" print(colored(warning, "red", attrs=['bold'])) fuse_img_ptn = os.path.join(self.root, 'fuse/%s/*.pkl' % cls_type) self.fuse_lst = glob(fuse_img_ptn) print("fused data length: ", len(self.fuse_lst)) if len(self.fuse_lst) == 0: warning = "Warning: " warning += "Trainnig without fused data will hurt model performance \n" warning += "Please generate fused data from https://github.com/ethnhe/raster_triangle.\n" print(colored(warning, "red", attrs=['bold'])) self.all_lst = self.real_lst + self.rnd_lst + self.fuse_lst self.minibatch_per_epoch = len( self.all_lst) // self.config.mini_batch_size else: self.add_noise = False tst_img_pth = os.path.join(self.cls_root, "test.txt") self.tst_lst = self.bs_utils.read_lines(tst_img_pth) self.all_lst = self.tst_lst print("{}_dataset_size: ".format(dataset_name), len(self.all_lst))
def testShadowing(self): c = Config.Config() c.load(conf1) c.load(conf2) assert "item1" in c assert "item2" in c assert "subitem1" in c.group assert "subitem2" in c.group assert c.list[0] == "listitem1" assert c.list[1] == "listitem2" assert "group.subitem1" in c self.assertEquals(c.common, "bar")
def testMergeConflict(self): mc1 = """ item: [ subitem ] """ mc2 = """ item: { subitem } """ c1 = Config.Config() c2 = Config.Config() c1.load(mc1) c2.load(mc2) try: c1.merge(c2) raise RuntimeError("ValueError not raised.") except ValueError: pass
def test_section_controller(self): config = Config() config.controller.interval_ms_remote_scan = 1234 config.controller.interval_ms_local_scan = 5678 config.controller.interval_ms_downloading_scan = 9012 out = SerializeConfig.config(config) out_dict = json.loads(out) self.assertIn("controller", out_dict) self.assertEqual(1234, out_dict["controller"]["interval_ms_remote_scan"]) self.assertEqual(5678, out_dict["controller"]["interval_ms_local_scan"]) self.assertEqual( 9012, out_dict["controller"]["interval_ms_downloading_scan"])