def startup(self): result = True vehicle = API_Engine().get_vehicle() vehicle["did"] = ConfigStore().get_did() ConfigStore().set_vehicle(vehicle) # Check for PiCAN connection self.can = can_interface.CANHandler() self.can.startup() # Check for GPS connection self.gps = gps_interface.GPSHandler() self.gps.startup() return True
def test_config_get_new_pool_id(mock_get_config): def get_max_cos_id(alloc_type): if 'mba' in alloc_type: return 9 else: return 31 with mock.patch('common.PQOS_API.get_max_cos_id', new=get_max_cos_id): config_store = ConfigStore() mock_get_config.return_value = CONFIG assert 9 == config_store.get_new_pool_id({"mba":10}) assert 9 == config_store.get_new_pool_id({"mba":20, "cbm":"0xf0"}) assert 31 == config_store.get_new_pool_id({"cbm":"0xff"}) assert 31 == config_store.get_new_pool_id({"l2cbm":"0xff"}) assert 31 == config_store.get_new_pool_id({"l2cbm":"0xff"}) assert 31 == config_store.get_new_pool_id({"l2cbm":"0xff", "cbm":"0xf0"}) mock_get_config.return_value = CONFIG_POOLS assert 8 == config_store.get_new_pool_id({"mba":10}) assert 8 == config_store.get_new_pool_id({"mba":20, "cbm":"0xf0"}) assert 30 == config_store.get_new_pool_id({"cbm":"0xff"}) assert 30 == config_store.get_new_pool_id({"l2cbm":"0xff"}) assert 30 == config_store.get_new_pool_id({"l2cbm":"0xff", "cbm":"0xf0"})
def test_get_global_attr_power_profiles_verify(mock_get_config, cfg, default, result): mock_get_config.return_value = cfg config_store = ConfigStore() assert config_store.get_global_attr('power_profiles_verify', default) == result
def __init__(self): def startup(self): result = True vehicle = API_Engine().get_vehicle() vehicle["did"] = ConfigStore().get_did() ConfigStore().set_vehicle(vehicle) # Check for PiCAN connection try: self.can = can_interface.CanHandler() if not self.can.startup(): return False except: return False # Check for GPS connection try: self.gps = gps_interface.GPSHandler() if not self.gps.setup(): return False except: return False API_Engine().start()
def test_config_default_pool(mock_get_num_cores): mock_get_num_cores.return_value = 16 config_store = ConfigStore() config = CONFIG.copy() # just in case, remove default pool from config for pool in config['pools']: if pool['id'] == 0: config['pools'].remove(pool) break # no default pool in config assert not config_store.is_default_pool_defined(config) # add default pool to config config_store.add_default_pool(config) assert config_store.is_default_pool_defined(config) # test that config now contains all cores (cores configured + default pool cores) all_cores = range(common.PQOS_API.get_num_cores()) for pool in config['pools']: all_cores = [core for core in all_cores if core not in pool['cores']] assert not all_cores # remove default pool from config for pool in config['pools']: if pool['id'] == 0: config['pools'].remove(pool) break # no default pool in config assert not config_store.is_default_pool_defined(config)
def test_config_default_pool_mba_bw(): config_store = ConfigStore() config = deepcopy(CONFIG) # just in case, remove default pool from config for pool in config['pools']: if pool['id'] == 0: config['pools'].remove(pool) break # no default pool in config assert not config_store.is_default_pool_defined(config) # add default pool to config config_store.add_default_pool(config) assert config_store.is_default_pool_defined(config) pool_mba_bw = None for pool in config['pools']: if pool['id'] == 0: assert not 'mba' in pool assert not 'cbm' in pool pool_mba_bw = pool['mba_bw'] break assert pool_mba_bw == 2**32 - 1
def run(self): while True: time.sleep(5) if self.instance.pid_data or self.instance.pos_data: pids = [] for pid in self.instance.pid_data.items(): pids.append({"pid": pid[0], "data": pid[1]}) pid_body = {'did': ConfigStore().get_did(), 'pids': pids} if self.instance.pos_data: pid_body['latititude'] = self.instance.pos_data['latitude'] pid_body['longitude'] = self.instance.pos_data['longitude'] self.do_request("put", url=ConfigStore().get_server_uri() + '/vehicle/pid/' + ConfigStore().get_vid(), body=pid_body) self.instance.pid_data = {} self.instance.pos_data = {}
def test_config_reset(): from copy import deepcopy with mock.patch('common.PQOS_API.get_cores') as mock_get_cores,\ mock.patch('config.ConfigStore.load') as mock_load,\ mock.patch('caps.mba_supported', return_value = True) as mock_mba,\ mock.patch('caps.cat_l3_supported', return_value = True),\ mock.patch('caps.cat_l2_supported', return_value = True),\ mock.patch('common.PQOS_API.get_max_l3_cat_cbm', return_value = 0xFFF),\ mock.patch('common.PQOS_API.get_max_l2_cat_cbm', return_value = 0xFF),\ mock.patch('common.PQOS_API.check_core', return_value = True),\ mock.patch('pid_ops.is_pid_valid', return_value = True): mock_load.return_value = deepcopy(CONFIG) mock_get_cores.return_value = range(8) config_store = ConfigStore() config_store.from_file("/tmp/appqos_test.config") config_store.process_config() assert len(config_store.get_pool_attr('cores', None)) == 8 assert config_store.get_pool_attr('cbm', 0) == 0xFFF assert config_store.get_pool_attr('l2cbm', 0) == 0xFF assert config_store.get_pool_attr('mba', 0) == 100 # test get_pool_attr assert config_store.get_pool_attr('non_exisiting_key', None) == None # reset mock and change return values # more cores this time (8 vs. 16) mock_get_cores.return_value = range(16) mock_get_cores.reset_mock() # use CONFIG_NO_MBA this time, as MBA is reported as not supported mock_load.return_value = deepcopy(CONFIG_NO_MBA) mock_load.reset_mock() mock_mba.return_value = False # verify that reset reloads config from file and Default pool is # recreated with different set of cores # (get_num_cores mocked to return different values) config_store.reset() mock_load.assert_called_once_with("/tmp/appqos_test.config") mock_get_cores.assert_called_once() assert len(config_store.get_pool_attr('cores', None)) == 16 assert config_store.get_pool_attr('cbm', 0) == 0xFFF assert config_store.get_pool_attr('mba', 0) is None
def test_config_recreate_default_pool(def_pool_def): config_store = ConfigStore() with mock.patch('config.ConfigStore.is_default_pool_defined', mock.MagicMock(return_value=def_pool_def)) as mock_is_def_pool_def,\ mock.patch('config.ConfigStore.remove_default_pool') as mock_rm_def_pool,\ mock.patch('config.ConfigStore.add_default_pool') as mock_add_def_pool: config_store.recreate_default_pool() if def_pool_def: mock_rm_def_pool.assert_called_once() else: mock_rm_def_pool.assert_not_called() mock_add_def_pool.assert_called_once()
def test_config_is_any_pool_defined(mock_get_config): config_store = ConfigStore() config = deepcopy(CONFIG_POOLS) mock_get_config.return_value = config assert config_store.is_any_pool_defined() == True for pool in config['pools'][:]: print(pool) if not pool['id'] == 0: config['pools'].remove(pool) print(config) mock_get_config.return_value = config assert not config_store.is_any_pool_defined()
def load_options(self): # TODO: move config to MemoryAddress class? # (depends on how monitoring of addresses should work) lines = self.address.get_lines() # ConfigStore does checks and type conversions based on names # of Hatari config sections and keys, so this needs to use # same names to avoid asserts, and it can't e.g. save # follow_pc option value, which will keep as run-time one defaults = { "[Debugger]": { "nDisasmLines": lines, } } userconfdir = ".hatari" config = ConfigStore(userconfdir, defaults) configpath = config.get_filepath(".debugui.cfg") config.load(configpath) # set defaults try: self.address.set_lines(config.get("[Debugger]", "nDisasmLines")) except (KeyError, AttributeError): ErrorDialog(None).run( "Debug UI configuration mismatch!\nTry again after removing: '%s'." % configpath) self.config = config
from google.protobuf import json_format from PIL import Image import adapter as adp from box import crop_image_for_box from classifier import Classifier from config import ConfigStore from detector import FinDetector # TODO: Centralize it into config. DEFAULT_CFG_KEY = 'dolphin' app = Flask(__name__) fin_detector = FinDetector() fin_classifier = Classifier(ConfigStore().get(DEFAULT_CFG_KEY)) @app.route('/prediction/image/', methods=['POST']) def pred_image(): if request.method != 'POST': return redirect(request.url) if 'file' not in request.files: logging.error('No file in request:', request.files) return redirect(request.url) req_file = request.files['file'] if req_file.filename == '': logging.error('Filename is empty in request:', request.files) return redirect(request.url)
def test_config_pid_to_pool(mock_get_config, pid, pool_id): mock_get_config.return_value = CONFIG config_store = ConfigStore() assert config_store.pid_to_pool(pid) == pool_id
def test_config_app_to_pool(mock_get_config, app, pool_id): mock_get_config.return_value = CONFIG config_store = ConfigStore() assert config_store.app_to_pool(app) == pool_id
def test_config_pid_to_app(mock_get_config, pid, app): mock_get_config.return_value = CONFIG config_store = ConfigStore() assert config_store.pid_to_app(pid) == app
def test_get_mba_ctrl_enabled(mock_get_config, cfg, result): mock_get_config.return_value = cfg config_store = ConfigStore() assert config_store.get_mba_ctrl_enabled() == result
def test_get_rdt_iface(mock_get_config, cfg, result): mock_get_config.return_value = cfg config_store = ConfigStore() assert config_store.get_rdt_iface() == result
Usage: benchmark.py [options] Options: --imgdir=DIR The directory contains images [default: ./data/detector/train/image/] --annodir=DIR The directory contains annotations [default: ./data/detector/train/annotation/] --outdir=DIR The directory containing output images. --match=TYPE The type for match function other than equal (fin, ku) [default: equal] """ if __name__ == '__main__': args = docopt(usage, help=True) img_folder = os.path.abspath(args['--imgdir']) anno_folder = os.path.abspath(args['--annodir']) out_folder = os.path.abspath(args.get('--outdir', '')) config = ConfigStore().get(DEFAULT_CFG_KEY) classifier = Classifier(config) detector = FinDetector() drawer = BoxDrawer() img_files = [x for x in os.listdir(img_folder)] anno_files = {x: True for x in os.listdir(anno_folder)} if len(img_files) != len(anno_files): print('No match for image and annotation files.') sys.exit(0) # data is the list of all the images and their corresponding annotations. data = [] for img_file in img_files: anno_file = xml_fname_from_jpg(img_file)
def get_vehicle(self): result = self.do_request("get", url=ConfigStore().get_server_uri() + '/vehicle/init/' + ConfigStore().get_vid()) return result.json()
def startup(self): result = True vehicle = API_Engine().get_vehicle() vehicle["did"] = ConfigStore().get_did()
if __name__ == "__main__": a = argparse.ArgumentParser() a.add_argument("--image", help="path to image") a.add_argument("--image_url", help="url to image") a.add_argument("--model") a.add_argument('--config', help='Config key') args = a.parse_args() if not args.image and not args.image_url: a.print_help() sys.exit(1) if args.image: img = Image.open(args.image) else: response = requests.get(args.image_url) img = Image.open(BytesIO(response.content)) cfg_store = ConfigStore() config = cfg_store.get(args.config) model = load_model(config['model']) labels = config['labels'] preds = predict(model, img, target_size) result = [(x, y) for (x, y) in zip(labels, preds)] # Sort by probability result = sorted(result, key=lambda item: item[1], reverse=True)[:TOP_N] pprint(result)