def on_connected(self, *args, **kwargs): if self._args.V: print_version() sys.exit(0) if self._args.config: self.ctxt.cfg = load_sq_config(validate=True, config_file=self._args.config) else: self.ctxt.cfg = load_sq_config(validate=True) if not self.ctxt.cfg: print('ERROR: No suzieq configuration found') print('Create a suzieq-cfg.yml under the homedir or current dir') print('OR pass a path to the config file via -c argument') sys.exit(1) self.ctxt.schemas = Schema(self.ctxt.cfg["schema-directory"]) cfg = self.ctxt.cfg self.ctxt.engine = cfg.get('ux', {}).get('engine', 'pandas') if self.ctxt.engine == 'rest': # See if we can extract the REST info from the REST part restcfg = cfg.get('rest', {}) self.ctxt.rest_server_ip = restcfg.get('address', '127.0.0.1') self.ctxt.reset_server_port = restcfg.get('address', '80') if restcfg.get('no-https', 'False') == 'False': self.ctxt.transport = 'https' else: self.ctxt.transport = 'http' self.ctxt.rest_api_key = restcfg.get('API_KEY', '')
def __init__(self, engine_name: str = '', hostname: typing.List[str] = None, start_time: str = '', end_time: str = '', view: str = '', namespace: typing.List[str] = None, columns: typing.List[str] = None, context=None, table: str = '', config_file=None) -> None: if not context: self.ctxt = SqContext(cfg=load_sq_config(validate=True, config_file=config_file), engine=engine_name) self.ctxt.schemas = Schema(self.ctxt.cfg["schema-directory"]) else: self.ctxt = context if not self.ctxt.cfg: self.ctxt.cfg = load_sq_config(validate=True, config_file=config_file) self.ctxt.schemas = Schema(self.ctxt.cfg["schema-directory"]) if not self.ctxt.engine: self.ctxt.engine = engine_name self._cfg = self.ctxt.cfg self._schema = SchemaForTable(table, self.ctxt.schemas) self._table = table self._sort_fields = self._schema.key_fields() self._convert_args = {} self.namespace = namespace or self.ctxt.namespace or [] self.hostname = hostname or self.ctxt.hostname or [] self.start_time = start_time or self.ctxt.start_time self.end_time = end_time or self.ctxt.end_time view = view or self.ctxt.view if self.start_time and self.end_time and not view: self.view = 'all' else: self.view = view or 'latest' self.columns = columns or ['default'] self._unique_def_column = ['hostname'] if engine_name and engine_name != '': self.engine = get_sqengine(engine_name, self._table)(self) elif self.ctxt.engine: self.engine = get_sqengine(self.ctxt.engine, self._table)(self) if not self.engine: raise ValueError('Unknown analysis engine') self.summarize_df = pd.DataFrame() self._addnl_filter = self._addnl_fields = [] self._valid_get_args = self._valid_assert_args = [] self._valid_arg_vals = self._valid_find_args = [] self._valid_summarize_args = []
async def test_coalescer_start(coalescer_cfg): """Launch a mock coalescer in order to check if it is possible to start it. The mock listen on TCP port 8303 waiting for a connection from this test. """ cfg_file = coalescer_cfg cfg = load_sq_config(config_file=cfg_file) cl = CoalescerLauncher(cfg_file, cfg, MOCK_COALESCER) # Start coalescer task = asyncio.create_task(cl.start_and_monitor_coalescer()) # Waiting for coalescer to start await asyncio.sleep(1) try: # Try to reach the coalescer mock _, writer = await asyncio.open_connection('127.0.0.1', 8303) writer.close() await writer.wait_closed() # Check if the task is still running if task.done(): raise Exception('The start_and_monitor_coalescer task terminated ' 'but the coalescer is still running.') except ConnectionRefusedError: pytest.fail('Unable to connect to the coalescer mock') except Exception as e: # pylint: disable=broad-except pytest.fail(str(e)) finally: task.cancel() await task
def _coalescer_init(pq_dir: str): """Basic Coalescer test :param pq_dir: the input parquet dir to be copied, this is the root of the parquet dir (tests/data/nxos/parquet-out, for example) :returns: temporary dir where the parquet data has been copied to :rtype: TemporaryDirectory :returns: Temporary config file :rtype: NamedTemporaryFile """ # Create a temp dir # pylint: disable=consider-using-with temp_dir = TemporaryDirectory() # Copy the directory we want to copy copy_tree(pq_dir, temp_dir.name) config = load_sq_config(config_file=create_dummy_config_file()) config['data-directory'] = f'{temp_dir.name}/' # pylint: disable=consider-using-with tmpfile = NamedTemporaryFile(suffix='.yml', delete=False) with open(tmpfile.name, 'w') as f: yaml.dump(config, f) return temp_dir, tmpfile
async def test_coalescer_keep_on_failing(coalescer_cfg): """Try to start a coalescer which keep on failing """ cfg_file = coalescer_cfg cfg = load_sq_config(config_file=cfg_file) cl = CoalescerLauncher(cfg_file, cfg, MOCK_COALESCER) monitor_process_fn = get_async_task_mock() dummy_process = MagicMock() dummy_process.returncode = 1 start_coalescer_fn = get_async_task_mock(dummy_process) try: with patch.multiple(CoalescerLauncher, _start_coalescer=start_coalescer_fn), \ patch.multiple(coalescer_launcher_module, monitor_process=monitor_process_fn): await asyncio.wait_for(cl.start_and_monitor_coalescer(), 5) # Check if multiple attempts have been performed attempts_done = start_coalescer_fn.call_count assert attempts_done == cl.max_attempts, \ f'Expected {cl.max_attempts} attempts, {attempts_done} done' except asyncio.TimeoutError: pytest.fail( 'The coalescer launcher task expected to fail but it does not')
async def test_coalescer_wrong_attempts_number(coalescer_cfg): """Test if an exception is raised if a wrong number of attempts is passed to the coalescer launcher """ cfg_file = coalescer_cfg cfg = load_sq_config(config_file=cfg_file) with pytest.raises(PollingError, match=r'The number of attempts*'): CoalescerLauncher(cfg_file, cfg, MOCK_COALESCER, max_attempts=0)
def manager_cfg(): """Get a dummy configuration file """ args = {} args['config'] = create_dummy_config_file() args['config-dict'] = load_sq_config(args['config']) yield args os.remove(args['config'])
def test_poller_object_init_validation(poller_args): """Test Poller object user_args validation """ cfg = load_sq_config(poller_args.config) # Test invalid ssh config file poller_args.ssh_config_file = 'invalid' with pytest.raises(SqPollerConfError): Poller(poller_args, cfg)
def get_configured_api_key(): cfg = load_sq_config(config_file=app.cfg_file) try: api_key = cfg['rest']['API_KEY'] except KeyError: print('missing API_KEY in config file') sys.exit(1) return api_key
def test_server_exec(): '''Can we can get a valid response with & without https''' # Generate a random port port = randint(9000, 10000) # We need to change the port used to avoid conflicts cfgfile = create_dummy_config_file() sqcfg = load_sq_config(cfgfile) if 'rest' not in sqcfg: sqcfg['rest'] = {'port': port} else: sqcfg['rest']['port'] = port with open(cfgfile, 'w') as f: f.write(yaml.safe_dump(sqcfg)) server_cmd_args = f'{suzieq_rest_server_path} -c {cfgfile}'.split() # pylint: disable=consider-using-with proc = subprocess.Popen(server_cmd_args) # Try a request from the server sleep(5) resp = requests.get(f'https://localhost:{port}/api/docs', verify=False) assert (resp.status_code == 200) # Try a non-https request from the server sleep(5) try: resp = requests.get(f'http://localhost:{port}/api/docs', verify=False) assert (resp.status_code != 200) except requests.exceptions.ConnectionError: pass proc.kill() # Now test without https server_cmd_args = ( f'{suzieq_rest_server_path} -c {cfgfile} --no-https'.split()) # pylint: disable=consider-using-with proc = subprocess.Popen(server_cmd_args) # Try a request from the server sleep(5) resp = requests.get(f'http://localhost:{port}/api/docs', verify=False) assert (resp.status_code == 200) # Try a https request from the server sleep(5) try: resp = requests.get(f'https://localhost:{port}/api/docs', verify=False) assert (resp.status_code != 200) except requests.exceptions.ConnectionError: pass proc.kill() os.remove(cfgfile)
def create_config(testvar): if 'data-directory' in testvar: # We need to create a tempfile to hold the config tf = conftest.create_dummy_config_file() tmpconfig = load_sq_config(tf) tmpconfig['data-directory'] = testvar['data-directory'] with open(tf, 'w') as f: f.write(yaml.dump(tmpconfig)) return tf
async def test_poller_run(poller_args): """Check if all the services are launched after calling Poller.run() """ cfg = load_sq_config(poller_args.config) poller = Poller(poller_args, cfg) mks = await run_poller_with_mocks(poller) # Check if all the functions have been called for mk in mks: mks[mk].assert_called()
def test_coalescer_launcher_init_default_bin(coalescer_cfg): """Test coalescer initialization with default bin argument """ cfg_file = coalescer_cfg cfg = load_sq_config(config_file=cfg_file) cl = CoalescerLauncher(cfg_file, cfg) # Check if parameters have been correctly initialized assert cfg_file == cl.config_file assert cfg == cl.cfg assert cl.coalescer_bin
def test_coalescer_launcher_init(coalescer_cfg): """Test coalescer initialization """ cfg_file = coalescer_cfg cfg = load_sq_config(config_file=cfg_file) coalescer_bin = 'coalescer.py' cl = CoalescerLauncher(cfg_file, cfg, coalescer_bin) # Check if parameters have been correctly initialized assert cfg_file == cl.config_file assert cfg == cl.cfg assert coalescer_bin == cl.coalescer_bin
def test_poller_inventory_init(poller_args): """Test if all the parameters are correctly passed to the Inventory """ cfg = load_sq_config(poller_args.config) poller_args.ssh_config_file = 'config/file' cfg['poller']['connect-timeout'] = 30 with patch.multiple(Poller, _validate_poller_args=MagicMock()): poller = Poller(poller_args, cfg) inv = poller.inventory # Check if all the values are valid assert inv.ssh_config_file == poller_args.ssh_config_file assert inv.connect_timeout == cfg['poller']['connect-timeout']
def test_poller_service_manager_init(poller_args): """Test if all the parameters are correctly passed to the ServiceManager """ cfg = load_sq_config(poller_args.config) poller_args.run_once = 'gather' cfg['poller']['period'] = 30 poller = Poller(poller_args, cfg) mgr = poller.service_manager # Check if all the values are valid assert mgr.service_directory == cfg['service-directory'] assert mgr.schema_dir == cfg['schema-directory'] assert mgr.default_interval == cfg['poller']['period'] assert mgr.run_mode == poller_args.run_once
def create_config(t_dir, suzieq_dir): '''Create dummy config''' # We need to create a tempfile to hold the config tmpconfig = load_sq_config(conftest.create_dummy_config_file()) tmpconfig['data-directory'] = f"{t_dir}/parquet-out" tmpconfig['service-directory'] = \ f"{suzieq_dir}/{tmpconfig['service-directory']}" tmpconfig['schema-directory'] = \ f"{suzieq_dir}/{tmpconfig['schema-directory']}" fname = f'{t_dir}/suzieq-cfg.yml' with open(fname, 'w') as f: f.write(yaml.dump(tmpconfig)) return fname
async def test_add_pop_poller_task(poller_args): """Test the methods for adding and removing the poller tasks """ cfg = load_sq_config(poller_args.config) poller = Poller(poller_args, cfg) # Test add_poller_task() tasks = [asyncio.Future(), asyncio.Future()] await poller._add_poller_task(tasks) assert poller.waiting_tasks == tasks # Test _pop_waiting_poller_tasks() received = await poller._pop_waiting_poller_tasks() # Check if there aren't poller tasks assert not poller.waiting_tasks # Check if the received tasks are the expected ones assert received == tasks
def _coalescer_basic_test(pq_dir, namespace, path_src, path_dest): """Basic coalescer test Copy the parquet dir from the directory provided to a temp dir, coalesce and ensure that everything looks the same. This second part is done by ensuring table show looks the same before and after coalescing, and since we're assuming a run-once parquet input, there shouldn't be any duplicate entries that are being coalesced. We also run path before and after coalescing. path encompasseds many different tables and so with a single command we test multiple tables are correctly rendered. :param pq_dir: The original parquet dir :param namespace: The namespace to be used for checking info :param path_src: The source IP of the path :param path_dest: The destination IP of the path :returns: :rtype: """ temp_dir, tmpfile = _coalescer_init(pq_dir) tablesobj = get_sqobject('tables')(config_file=tmpfile.name) pre_tables_df = tablesobj.get() pathobj = get_sqobject('path')(config_file=tmpfile.name) pre_path_df = pathobj.get(namespace=[namespace], source=path_src, dest=path_dest) cfg = load_sq_config(config_file=tmpfile.name) do_coalesce(cfg, None) _verify_coalescing(temp_dir) post_tables_df = tablesobj.get() assert_df_equal(pre_tables_df, post_tables_df, None) post_path_df = pathobj.get(namespace=[namespace], source=path_src, dest=path_dest) assert_df_equal(pre_path_df, post_path_df, None) _coalescer_cleanup(temp_dir, tmpfile)
def test_controller_invalid_args(config_file: str, args: Dict): """Test controller with invalid configuration Args: config_file (str): config file args (Dict): controller arguments """ config = load_sq_config(config_file=config_file) if args['inventory'] is None: config['poller']['inventory-file'] = '/non-existent-inventory.yml' elif args['workers'] == 4: config['poller']['inventory-timeout'] = -1 parse_args = generate_argparse(args) with pytest.raises(SqPollerConfError): Controller(parse_args, config)
def generate_controller(args: Dict, inv_file: str = None, conf_file: str = '') -> Controller: """Generate a Controller object Args: args (Dict): controller input args inv_file (str, optional): controller inventory file. Defaults to None. conf_file (str, optional): controller config file. Defaults to ''. Returns: Controller: the newly created Controller """ if conf_file == '': conf_file = create_dummy_config_file() config = load_sq_config(config_file=conf_file) args = update_args(args, inv_file, conf_file) parse_args = generate_argparse(args) return Controller(parse_args, config)
def test_controller_valid_args(config_file: str, inv_file: str, args: Dict): """Test controller with valid configuration Args: config_file (str): configuration file inv_file (str): inventory args (Dict): input arguments """ c = generate_controller(args, inv_file, config_file) config = load_sq_config(config_file=config_file) poller_config = config['poller'] assert c._input_dir == args['input_dir'] if args['debug']: assert c.run_once == 'debug' elif args['input_dir']: assert c.run_once == 'input-dir' else: assert c.run_once == args['run_once'] if not args['input_dir']: assert c._config['source']['path'] == args['inventory'] assert c._config['manager']['type'] == poller_config['manager']['type'] assert c._config['chunker']['type'] == poller_config['chunker']['type'] assert c.inventory_timeout == poller_config['inventory-timeout'] if not args['run_once']: assert c._no_coalescer == args['no_coalescer'] else: assert c._no_coalescer is True assert c.period == args['update_period'] manager_args = ['debug', 'exclude-services', 'outputs', 'config', 'output-dir', 'service-only', 'ssh-config-file', 'workers'] for ma in manager_args: args_key = ma.replace('-', '_') assert c._config['manager'][ma] == args[args_key]
def test_transform(input_file): '''Test transformation is captured by coalescer''' to_transform = Yaml2Class(input_file) try: data_directory = to_transform.transform.data_directory except AttributeError: print('Invalid transformation file, no data directory') pytest.fail('AttributeError', pytrace=True) # Make a copy of the data directory temp_dir, tmpfile = _coalescer_init(data_directory) cfg = load_sq_config(config_file=tmpfile.name) schemas = Schema(cfg['schema-directory']) # pylint: disable=too-many-nested-blocks, no-member for ele in to_transform.transform.transform: query_str_list = [] # Each transformation has a record => write's happen per record for record in ele.record: changed_fields = set() new_df = pd.DataFrame() tables = [x for x in dir(record) if not x.startswith('_')] for table in tables: # Lets read the data in now that we know the table tblobj = get_sqobject(table) pq_db = get_sqdb_engine(cfg, table, None, None) columns = schemas.fields_for_table(table) mod_df = tblobj(config_file=tmpfile.name).get(columns=columns) for key in getattr(record, table): query_str = key.match chg_df = pd.DataFrame() if query_str != "all": try: chg_df = mod_df.query(query_str) \ .reset_index(drop=True) except Exception as ex: # pylint: disable=broad-except assert (not ex) query_str_list.append(query_str) else: chg_df = mod_df _process_transform_set(key.set, chg_df, changed_fields) if new_df.empty: new_df = chg_df elif not chg_df.empty: new_df = pd.concat([new_df, chg_df]) if new_df.empty: continue # Write the records now _write_verify_transform(new_df, table, pq_db, SchemaForTable(table, schemas), tmpfile.name, query_str_list, changed_fields) # Now we coalesce and verify it works pre_table_df = get_sqobject('tables')(config_file=tmpfile.name).get() do_coalesce(cfg, None) _verify_coalescing(temp_dir) post_table_df = get_sqobject('tables')(config_file=tmpfile.name).get() assert_df_equal(pre_table_df, post_table_df, None) # Run additional tests on the coalesced data for ele in to_transform.transform.verify: table = [x for x in dir(ele) if not x.startswith('_')][0] tblobj = get_sqobject(table) for tst in getattr(ele, table): start_time = tst.test.get('start-time', '') end_time = tst.test.get('end-time', '') columns = tst.test.get('columns', ['default']) df = tblobj(config_file=tmpfile.name, start_time=start_time, end_time=end_time).get(columns=columns) if not df.empty and 'query' in tst.test: query_str = tst.test['query'] df = df.query(query_str).reset_index(drop=True) if 'assertempty' in tst.test: assert (df.empty) elif 'shape' in tst.test: shape = tst.test['shape'].split() if shape[0] != '*': assert (int(shape[0]) == df.shape[0]) if shape[1] != '*': assert (int(shape[1]) == df.shape[1]) else: assert (not df.empty) _coalescer_cleanup(temp_dir, tmpfile)
def worker_main(): """The routine that kicks things off including arg parsing """ # Get supported output, 'gather' cannot be manually selected supported_outputs = OutputWorker.get_plugins() if supported_outputs.get('gather', None): del supported_outputs['gather'] supported_outputs = list(supported_outputs.keys()) parser = argparse.ArgumentParser() parser.add_argument( '-i', '--input-dir', type=str, help='Directory where run-once=gather data is' ) parser.add_argument( '-o', '--outputs', nargs='+', default=['parquet'], choices=supported_outputs, type=str, help='Output formats to write to: parquet. Use ' 'this option multiple times for more than one output', ) parser.add_argument( '-s', '--service-only', type=str, help='Only run this space separated list of services', ) parser.add_argument( '-x', '--exclude-services', type=str, help='Exclude running this space separated list of services', ) parser.add_argument( '-c', '--config', type=str, help='alternate config file' ) parser.add_argument( '--run-once', type=str, choices=['gather', 'process', 'update'], help=argparse.SUPPRESS, ) parser.add_argument( '--output-dir', type=str, default=f'{os.path.abspath(os.curdir)}/sqpoller-output', help=argparse.SUPPRESS, ) parser.add_argument( '--ssh-config-file', type=str, default=None, help='Path to ssh config file, that you want to use' ) parser.add_argument( '--worker-id', type=str, default='0', help=argparse.SUPPRESS, ) userargs = parser.parse_args() uvloop.install() cfg = load_sq_config(config_file=userargs.config) if not cfg: print('Could not load config file, aborting') sys.exit(1) try: asyncio.run(start_worker(userargs, cfg)) except (KeyboardInterrupt, RuntimeError): pass except Exception: # pylint: disable=broad-except traceback.print_exc() sys.exit(0)
def controller_main(): """The routine that kicks things off including arg parsing """ parser = argparse.ArgumentParser() # Get supported output, 'gather' cannot be manually selected supported_outputs = OutputWorker.get_plugins() if supported_outputs.get('gather', None): del supported_outputs['gather'] supported_outputs = list(supported_outputs) # Two inputs are possible: # 1. Suzieq inventory file # 2. Input directory source_arg = parser.add_mutually_exclusive_group() source_arg.add_argument('-I', '--inventory', type=str, help='Input inventory file') source_arg.add_argument( '-i', '--input-dir', type=str, help=('Directory where run-once=gather data is. Process the data in ' 'directory as they were retrieved by the hosts')) parser.add_argument('-c', '--config', help='Controller configuration file', type=str) parser.add_argument( '--debug', action='store_true', help='Build the node list and exit without polling the nodes') parser.add_argument( '-x', '--exclude-services', type=str, help='Exclude running this space separated list of services', ) parser.add_argument( '--no-coalescer', default=False, action='store_true', help='Do not start the coalescer', ) parser.add_argument( '-o', '--outputs', nargs='+', default=['parquet'], choices=supported_outputs, type=str, help='Output formats to write to: parquet. Use ' 'this option multiple times for more than one output', ) parser.add_argument( "--output-dir", type=str, default=f'{os.path.abspath(os.curdir)}/sqpoller-output', help=argparse.SUPPRESS, ) parser.add_argument( '--run-once', type=str, choices=['gather', 'process', 'update'], help=('''The poller do not run forever, three modes are available: (1) gather: store the output as it has been collected, (2) process: performs some processing on the data. Both cases store the results in a plain output file, one for each service, and exit. (3) update: poll the nodes only once, write the result and stop''')) parser.add_argument( '-s', '--service-only', type=str, help='Only run this space separated list of services', ) parser.add_argument('--ssh-config-file', type=str, default=None, help='Path to ssh config file, that you want to use') parser.add_argument( '-p', '--update-period', help='How frequently the inventory updates [DEFAULT=3600]', type=int) parser.add_argument( '-w', '--workers', type=int, help='The number of workers polling the nodes', ) parser.add_argument('-V', '--version', action='store_true', help='Print suzieq version') args = parser.parse_args() if args.version: print_version() sys.exit(0) uvloop.install() cfg = load_sq_config(config_file=args.config) if not cfg: print("Could not load config file, aborting") sys.exit(1) try: asyncio.run(start_controller(args, cfg)) except (KeyboardInterrupt, RuntimeError): pass except Exception: # pylint: disable=broad-except traceback.print_exc()
def _test_data(topology, proto, scenario, testvar): # pylint: disable=redefined-outer-name name = f'{topology}_{proto}_{scenario}' testvar['data-directory'] = f"{parquet_dir}/{name}/parquet-out" dummy_config = load_sq_config(conftest.create_dummy_config_file()) _test_sqcmds(dummy_config, testvar)
def coalescer_main(): parser = argparse.ArgumentParser() parser.add_argument( "-s", "--service-only", type=str, help="Only run this space separated list of services", ) parser.add_argument( "-x", "--exclude-services", type=str, help="Exclude running this space separated list of services", ) parser.add_argument("-c", "--config", type=str, help="alternate config file") parser.add_argument( "--run-once", default=False, help='Run the coalescer once and exit', action='store_true', ) parser.add_argument( "-p", "--period", type=str, help=('Override the period specified in config file with this. ' 'Format is <period><m|h|d|w>. 1h is 1 hour, 2w is 2 weeks etc.')) parser.add_argument("--no-sqpoller", action='store_true', help=argparse.SUPPRESS) userargs = parser.parse_args() cfg = load_sq_config(config_file=userargs.config) if not cfg: print(f'Invalid Suzieq config file {userargs.config}') sys.exit(1) logfile, loglevel, logsize, log_stdout = get_log_params( 'coalescer', cfg, '/tmp/sq-coalescer.log') logger = init_logger('suzieq.coalescer', logfile, loglevel, logsize, log_stdout) # Ensure we're the only compacter coalesce_dir = cfg.get('coalescer', {})\ .get('coalesce-directory', f'{cfg.get("data-directory")}/coalesced') fd = ensure_single_instance(f'{coalesce_dir}/.sq-coalescer.pid', False) if not fd: print('ERROR: Another coalescer process present') logger.error('Another coalescer process present') sys.exit(errno.EBUSY) timestr = userargs.period or (cfg.get('coalescer', { 'period': '1h' }).get('period', '1h')) schemas = Schema(cfg.get('schema-directory')) if userargs.service_only or userargs.exclude_services: tables = [ x for x in schemas.tables() if (schemas.type_for_table(x) != "derivedRecord") ] if userargs.service_only: tables = [x for x in tables if x in userargs.service_only.split()] if userargs.exclude_services: tables = [ x for x in tables if x not in userargs.exclude_services.split() ] else: tables = [] run_coalescer(cfg, tables, timestr, userargs.run_once, logger, userargs.no_sqpoller or False) os.truncate(fd, 0) try: fcntl.flock(fd, fcntl.LOCK_UN) os.close(fd) except OSError: pass sys.exit(0)
def rest_main(*args) -> None: """The main function for the REST server Args: config_file (str): The suzieq config file no_https (bool): If true, disable https """ if not args: args = sys.argv parser = argparse.ArgumentParser(args) parser.add_argument("-c", "--config", type=str, help="alternate config file", default=None) parser.add_argument( "--no-https", help="Turn off HTTPS", default=False, action='store_true', ) parser.add_argument( "--version", "-V", help="print Suzieq version", default=False, action='store_true', ) userargs = parser.parse_args() if userargs.version: print_version() sys.exit(0) config_file = sq_get_config_file(userargs.config) app = app_init(config_file) cfg = load_sq_config(config_file=config_file) try: _ = cfg['rest']['API_KEY'] except KeyError: print('missing API_KEY in config file') exit(1) logcfg, loglevel = get_log_config_level(cfg) no_https = cfg.get('rest', {}).get('no-https', False) or userargs.no_https srvr_addr = cfg.get('rest', {}).get('address', '127.0.0.1') srvr_port = cfg.get('rest', {}).get('port', 8000) if no_https: uvicorn.run( app, host=srvr_addr, port=srvr_port, ) else: ssl_keyfile, ssl_certfile = get_cert_files(cfg) uvicorn.run(app, host=srvr_addr, port=srvr_port, ssl_keyfile=ssl_keyfile, ssl_certfile=ssl_certfile)