def setup(use=None, clargs=None): if clargs is None: logging.debug("loading configuration from command line") parser = argparse.ArgumentParser() parser.add_argument("-k", dest="insecure", action="store_true") parser.add_argument("-r", dest="readjson", action="store_true") parser.add_argument("-m", dest="metadata", action="store_true") parser.add_argument("-j", dest="json", action="store_true") parser.add_argument("-g", dest="github", action="store_true") parser.add_argument("-o", dest="outputfile") parser.add_argument(dest="configdir") clargs = parser.parse_args() for attr in dir(clargs): if not attr.startswith("_"): logging.debug(" option clargs.%s = %s" % (attr, getattr(clargs, attr))) conf_source_uri = "file://" + os.path.abspath(clargs.configdir) elif clargs.github: logging.debug("loading configuration from github repo") # make sure we ask github and that our repo-string is clean github_repo = clargs.configdir github_repo = github_repo.replace("[email protected]:", "") github_repo = github_repo.replace("https://github.com/", "") if not re.match("^[a-z0-9\-\.\/]*$", github_repo): raise Exception("suspicious characters in github repository name: " + github_repo) repo_url = "https://github.com/{}".format(github_repo) logging.debug("github repo/branch is %s/%s" % (repo_url, clargs.repobranch)) clargs.configdir = tempfile.mkdtemp() try: # issue with start from pycharm: need to set GIT_PYTHON_GIT_EXECUTABLE if clargs.repobranch: repo = git.Repo.clone_from(repo_url, clargs.configdir, branch=clargs.repobranch) else: repo = git.Repo.clone_from(repo_url, clargs.configdir) except Exception as e: logger.info("Failed to clone github repo " + github_repo) if not os.path.exists(clargs.configdir): raise Exception("could not create config dir from github repo {}".format(github_repo)) clargs.readjson = True conf_source_uri = repo_url logging.debug("Configuration source: " + conf_source_uri) if clargs.readjson: json_file = os.path.join(clargs.configdir, "generated", "config.json") try: with open(json_file) as fp: data = json.load(fp) logging.debug("JSON configuration read from " + json_file) except Exception as e: e = configloader.ConfigFileNotReadable(e.errno, e.strerror, e.filename) raise # configloader.exit_on_mandatory_config_file(e) - does not work in web mode CONF = JsonConfig(data, clargs.configdir) else: loader = configloader.ConfigLoader(clargs.configdir) try: CONF = loader.conf_CONF() logging.debug("Python configuration read from " + clargs.configdir) except configloader.ConfigFileNotReadable as e: configloader.exit_on_mandatory_config_file(e) # support for YAML config files # try: # with open(clargs.toolconf, 'r') as fd: # conf = yaml.safe_load(fd) # except FileNotFoundError as e: # raise Exception('unable to open tool configuration file: cwd=' + os.getcwd() + ', ' + str(e)) # try: # for yf in conf['flows']: # fdef = load_flows(fdef, yf, use) # except KeyError: # pass # TODO: is it really OK not to have any flows? setattr(CONF, "SOURCE_URI", conf_source_uri) flow_definitions = {"Flows": {}, "Order": [], "Desc": {}} for flow_file in CONF.FLOWS: logging.debug("Loading test flow from " + flow_file) flow_definitions = load_flows(flow_definitions, flow_file, use) # Filter flows based on profile keep = [] for key, val in flow_definitions["Flows"].items(): for p in CONF.FLOWS_PROFILES: if p in val["profiles"]: keep.append(key) for key in list(flow_definitions["Flows"].keys()): if key not in keep: del flow_definitions["Flows"][key] spconf = copy.deepcopy(CONF.CONFIG) acnf = list(spconf.values())[0] mds = metadata.load(True, acnf, CONF.METADATA, "sp") setup_logger(logger) ch = [] """ TODO: This code still bows to the idea of having multiple comhandlers. Needs cleanup. """ try: if CONF.CONTENT_HANDLER_INTERACTION: rb = robobrowser.factory(CONF.CONTENT_HANDLER_INTERACTION) ch.append(rb) comhandler = ComHandler(ch) if not CONF.DO_NOT_VALIDATE_TLS: comhandler.verify_ssl = False comhandler.set_triggers(CONF.CONTENT_HANDLER_TRIGGER) except KeyError: comhandler = None mako_path = mako.__path__[0] + os.sep staticfiles_path = staticfiles.__path__[0] + os.sep kwargs = { "base_url": copy.copy(CONF.BASE), "spconf": spconf, "flows": flow_definitions["Flows"], "order": flow_definitions["Order"], "desc": flow_definitions["Desc"], "metadata": mds, "profile": CONF.FLOWS_PROFILES, "msg_factory": saml_message_factory, "check_factory": get_check, "profile_handler": ProfileHandler, "cache": {}, "map_prof": map_prof, "make_entity": make_entity, "trace_cls": Trace, "conv_args": {"entcat": collect_ec()}, "com_handler": comhandler, "conf": CONF, "response_cls": Response, "template_root": mako_path, "static": staticfiles_path, } try: kwargs["entity_id"] = CONF.ENTITY_ID except KeyError: kwargs["disco_srv"] = conf["disco_srv"] kwargs["insecure"] = CONF.DO_NOT_VALIDATE_TLS return clargs, kwargs, CONF
def urlhandler_swconf(self, environ, local_webenv, session, start_response, webio): """ switch config by user request url param: ?github=<name of the github repo>&email=<user email>&branch=<repobranch> """ formdata = parse_qs(environ['QUERY_STRING']) resp = dict([(k, v[0]) for k, v in formdata.items()]) try: ac_file_name = local_webenv['conf'].ACCESS_CONTROL_FILE except Exception as e: ac_file_name = None if ac_file_name: try: ac_file = WebUserAccessControlFile(local_webenv['conf'].ACCESS_CONTROL_FILE) except Exception as e: return webio.sorry_response(local_webenv['base_url'], e, context='reading ' + local_webenv['conf'].ACCESS_CONTROL_FILE) has_access = ac_file.test(resp['github'], resp['email']) if not has_access: return webio.sorry_response(local_webenv['base_url'], 'permission denied', context="access checking: email does not match repo user.") # reading from github should set readjson, but to be sure ... setup_cargs=type('setupcarg', (object,), {'github': True, 'configdir': resp['github'], 'readjson': True })() if 'branch' in resp: setattr(setup_cargs, 'repobranch', resp['branch']) else: setattr(setup_cargs, 'repobranch', None) try: user_cargs, user_kwargs, user_CONF = setup('wb', setup_cargs) except FileNotFoundError as e: return webio.sorry_response(local_webenv['base_url'], e, context="Configuration Setup via URL parameter - trying to read generated/config.json") except ConfigError as e: errstr = e.error_details_as_string() print('Error: {}'.format(errstr)) return webio.sorry_response(local_webenv['base_url'], errstr, context="configuration setup", exception=traceback.format_exc()) except Exception as e: return webio.sorry_response(local_webenv['base_url'], e, context="Configuration Setup", exception=traceback.format_exc()) """ picking the config stuff that the user is allowed to override """ local_webenv['conf'] = user_CONF local_webenv['flows'] = user_kwargs['flows'] """ Todo: having this not cluttered would be nicer In other words: refactoring of setup.py """ local_webenv['entity_id'] = local_webenv['conf'].ENTITY_ID local_webenv["insecure"] = local_webenv['conf'].DO_NOT_VALIDATE_TLS local_webenv["profile"] = local_webenv['conf'].FLOWS_PROFILES import copy from saml2test import metadata spconf = copy.deepcopy(user_CONF.CONFIG) acnf = list(spconf.values())[0] mds = metadata.load(True, acnf, user_CONF.METADATA, 'sp') local_webenv["metadata"] = mds # new webenv into session session['webenv'] = local_webenv sh = SessionHandler(**local_webenv) sh.session_init() session['session_info'] = sh webio = WebIO(session=sh, **local_webenv) webio.environ = environ webio.start_response = start_response tester = Tester(webio, sh, **local_webenv) return tester.display_test_list()
def setup(use='cl'): parser = argparse.ArgumentParser() parser.add_argument('-d', dest="debug", action='store_true') parser.add_argument('-D', dest="dump", action='store_true') parser.add_argument('-e', dest="entity_id") parser.add_argument('-f', dest='flows') parser.add_argument('-i', dest="interaction") parser.add_argument('-k', dest="insecure", action='store_true') parser.add_argument('-l', dest="log_name") parser.add_argument('-p', dest="profile", action='append') parser.add_argument('-t', dest="testid") parser.add_argument('-y', dest='yamlflow', action='append') parser.add_argument('-T', dest="target_info") parser.add_argument( '-c', dest="ca_certs", help=("CA certs to use to verify HTTPS server certificates, ", "if HTTPS is used and no server CA certs are defined then ", "no cert verification will be done")) parser.add_argument(dest="config") cargs = parser.parse_args() fdef = {'Flows': {}, 'Order': [], 'Desc': []} for flow_def in cargs.yamlflow: spec = parse_yaml_conf(flow_def) fdef['Flows'].update(spec['Flows']) for param in ['Order', 'Desc']: try: fdef[param].extend(spec[param]) except KeyError: pass # Filter based on profile keep = [] for key, val in fdef['Flows'].items(): for p in cargs.profile: if p in val['profiles']: keep.append(key) for key in list(fdef['Flows'].keys()): if key not in keep: del fdef['Flows'][key] CONF = importlib.import_module(cargs.config) idpconf = copy.deepcopy(CONF.CONFIG) acnf = list(idpconf.values())[0] mds = metadata.load(True, acnf, CONF.METADATA, 'idp') stream = open(cargs.target_info, 'r') target_info = yaml.safe_load(stream) stream.close() if cargs.log_name: setup_logger(logger, cargs.log_name) elif cargs.testid: setup_logger(logger, "{}.log".format(cargs.testid)) else: setup_logger(logger) kwargs = {"base_url": copy.copy(CONF.BASE), 'idpconf': idpconf, "flows": fdef['Flows'], "orddesc": fdef['Order'], "desc": fdef['Desc'], 'metadata': mds, "profile": cargs.profile, "msg_factory": saml_message_factory, "check_factory": get_check, 'ca_certs': cargs.ca_certs, "cache": {}, "entity_id": cargs.entity_id, "profile_handler": None, 'map_prof': None, 'make_entity': make_entity, 'trace_cls': Trace, 'conv_args': {'entcat': collect_ec(), 'target_info': target_info}} opargs = {} if cargs.debug: opargs["debug"] = True if cargs.dump: opargs["dump"] = True if cargs.interaction: kwargs['interaction_conf'] = importlib.import_module( cargs.interaction).INTERACTION if cargs.insecure: kwargs["insecure"] = True return cargs.testid, kwargs, opargs