Example #1
0
def make_wrapped_observer(observer, log_level_name):
    log_level = LogLevel.lookupByName(log_level_name.lower())
    observer = LegacyLogObserverWrapper(observer.emit)
    observer = wrap_observer(observer)
    predicate = LogLevelFilterPredicate(defaultLogLevel=log_level)
    observer = FilteringLogObserver(observer, [predicate])
    return observer
Example #2
0
    def addObserver(self, other):
        """
        Add a new observer.

        @type other: Provider of L{ILogObserver}
        @param other: A callable object that will be called with each new log
            message (a dict).
        """
        wrapped = LegacyLogObserverWrapper(other)
        self._legacyObservers.append(wrapped)
        self._observerPublisher.addObserver(wrapped)
Example #3
0
    def _startLogging(self, other, setStdout):
        """
        Begin logging to the L{LogBeginner} associated with this
        L{LogPublisher}.

        @param other: the observer to log to.
        @type other: L{LogBeginner}

        @param setStdout: if true, send standard I/O to the observer as well.
        @type setStdout: L{bool}
        """
        wrapped = LegacyLogObserverWrapper(other)
        self._legacyObservers.append(wrapped)
        self._logBeginner.beginLoggingTo([wrapped], True, setStdout)
Example #4
0
def make_legacy_daily_logfile_observer(path, logoutputlevel):
    """
    Make a L{DefaultSystemFileLogObserver}.
    """
    from crossbar.twisted.processutil import DefaultSystemFileLogObserver
    from twisted.logger import LegacyLogObserverWrapper
    from twisted.python.logfile import DailyLogFile

    logfd = DailyLogFile.fromFullPath(os.path.join(path, 'node.log'))
    flo = LegacyLogObserverWrapper(
        DefaultSystemFileLogObserver(logfd,
                                     system="{:<10} {:>6}".format(
                                         "Controller", os.getpid())).emit)

    def _log(event):

        level = event["log_level"]

        if logoutputlevel == "none":
            return
        elif logoutputlevel == "quiet":
            # Quiet: Only print warnings and errors to stderr.
            if level not in (LogLevel.warn, LogLevel.error, LogLevel.critical):
                return
        elif logoutputlevel == "standard":
            # Standard: For users of Crossbar
            if level not in (LogLevel.info, LogLevel.warn, LogLevel.error,
                             LogLevel.critical):
                return
        elif logoutputlevel == "verbose":
            # Verbose: for developers
            # Adds the class source.
            if event.get("cb_level") == "trace":
                return
        elif logoutputlevel == "trace":
            # Verbose: for developers
            # Adds "trace" output
            pass
        else:
            assert False, "Shouldn't ever get here."

        # Forward the event
        flo(event)

    return _log
Example #5
0
def get_observers(main_config, twistd_user, log_group):
    log_debug = main_config.get_bool("debug", False)
    log_to_file = main_config.get_bool("log_file", False)
    log_stdout = main_config.get_bool("log_stdout", False)
    log_syslog = main_config.get_bool("log_syslog", False)
    log_auth_events = main_config.get_bool("log_auth_events", False)
    log_sso_events = main_config.get_bool("log_sso_events", True)

    # Log to file if nothing else is turned on
    log_to_file = log_to_file or not (log_to_file or log_syslog or log_stdout)

    log_dir = main_config.get_str("log_dir", "log")
    log_max_size = main_config.get_int("log_max_size", 10 * (1 << 20))
    log_max_files = main_config.get_int("log_max_files", 6)
    if log_max_files == 0:
        # we need to pass None explicitly if we want there to be no limit
        # 0 would just mean no logfiles would get kept...
        log_max_files = None

    observers = []
    if log_to_file:
        log_txt = create_log_file(
            "authproxy.log",
            log_dir,
            log_max_size,
            log_max_files,
            twistd_user,
            log_group,
        )
        text_observer = textFileLogObserver(log_txt)
        text_filter = FilteringLogObserver(text_observer, [only_default_log_predicate])
        observers.append(text_filter)

    if log_stdout:
        stdout_observer = textFileLogObserver(sys.stdout)
        filtered_stdout = FilteringLogObserver(
            stdout_observer, [only_default_log_predicate]
        )
        observers.append(filtered_stdout)

    if log_syslog:
        if syslog is None:
            raise config_error.ConfigError("syslog not supported on Windows")

        facility_dict = {
            "LOG_KERN": pySyslog.LOG_KERN,
            "LOG_USER": pySyslog.LOG_USER,
            "LOG_MAIL": pySyslog.LOG_MAIL,
            "LOG_DAEMON": pySyslog.LOG_DAEMON,
            "LOG_AUTH": pySyslog.LOG_AUTH,
            "LOG_LPR": pySyslog.LOG_LPR,
            "LOG_NEWS": pySyslog.LOG_NEWS,
            "LOG_UUCP": pySyslog.LOG_UUCP,
            "LOG_CRON": pySyslog.LOG_CRON,
            "LOG_SYSLOG": pySyslog.LOG_SYSLOG,
            "LOG_LOCAL0": pySyslog.LOG_LOCAL0,
            "LOG_LOCAL1": pySyslog.LOG_LOCAL1,
            "LOG_LOCAL2": pySyslog.LOG_LOCAL2,
            "LOG_LOCAL3": pySyslog.LOG_LOCAL3,
            "LOG_LOCAL4": pySyslog.LOG_LOCAL4,
            "LOG_LOCAL5": pySyslog.LOG_LOCAL5,
            "LOG_LOCAL6": pySyslog.LOG_LOCAL6,
            "LOG_LOCAL7": pySyslog.LOG_LOCAL7,
        }
        syslog_facilitystr = main_config.get_str("syslog_facility", "LOG_USER")
        syslog_facility = facility_dict.get(syslog_facilitystr, None)
        if syslog_facility is None:
            raise config_error.ConfigError(
                "Unknown syslog_facility: {0}".format(syslog_facilitystr)
            )

        syslog_observer = syslog.SyslogObserver("Authproxy", facility=syslog_facility)
        wrapped_syslog_observer = LegacyLogObserverWrapper(syslog_observer.emit)
        syslog_filtering_observer = FilteringLogObserver(
            wrapped_syslog_observer, [only_default_log_predicate],
        )
        observers.append(syslog_filtering_observer)

    if log_debug:
        debug_predicate = LogLevelFilterPredicate(LogLevel.debug)
        for i, observer in enumerate(observers):
            observers[i] = FilteringLogObserver(observer, [debug_predicate])

    if log_auth_events:
        auth_log_file = create_log_file(
            "authevents.log",
            log_dir,
            log_max_size,
            log_max_files,
            twistd_user,
            log_group,
        )
        auth_observer = jsonFileLogObserver(auth_log_file, "")
        observers.append(FilteringLogObserver(auth_observer, [auth_type_predicate]))

    if log_sso_events:
        sso_log_file = create_log_file(
            "ssoevents.log",
            log_dir,
            log_max_size,
            log_max_files,
            twistd_user,
            log_group,
        )
        sso_observer = jsonFileLogObserver(sso_log_file, "")
        observers.append(FilteringLogObserver(sso_observer, [sso_type_predicate]))

    return observers
Example #6
0
def create_application(args=None, twistd_user=None, log_group=None):
    home_dir = util.get_home_dir()
    os.chdir(home_dir)
    is_logging_insecure = False

    if syslog is not None:
        facility_dict = {
            'LOG_KERN': pySyslog.LOG_KERN,
            'LOG_USER': pySyslog.LOG_USER,
            'LOG_MAIL': pySyslog.LOG_MAIL,
            'LOG_DAEMON': pySyslog.LOG_DAEMON,
            'LOG_AUTH': pySyslog.LOG_AUTH,
            'LOG_LPR': pySyslog.LOG_LPR,
            'LOG_NEWS': pySyslog.LOG_NEWS,
            'LOG_UUCP': pySyslog.LOG_UUCP,
            'LOG_CRON': pySyslog.LOG_CRON,
            'LOG_SYSLOG': pySyslog.LOG_SYSLOG,
            'LOG_LOCAL0': pySyslog.LOG_LOCAL0,
            'LOG_LOCAL1': pySyslog.LOG_LOCAL1,
            'LOG_LOCAL2': pySyslog.LOG_LOCAL2,
            'LOG_LOCAL3': pySyslog.LOG_LOCAL3,
            'LOG_LOCAL4': pySyslog.LOG_LOCAL4,
            'LOG_LOCAL5': pySyslog.LOG_LOCAL5,
            'LOG_LOCAL6': pySyslog.LOG_LOCAL6,
            'LOG_LOCAL7': pySyslog.LOG_LOCAL7
        }

    # parse command-line args, if appropriate
    primary_only_time = None
    if args:
        option_parser = argparse.ArgumentParser()

        option_parser.add_argument(
            "--primary-only", type=int, nargs='?',
            help="This option disables secondary authentication for the specified number of minutes (default 60)",
            default=None, const=60
        )
        option_parser.add_argument(
            "--logging-insecure",
            action="store_true",
            help="This option enables debug, and prints logs containing passwords and possibly other secrets.",
            default=False
        )
        options = option_parser.parse_args()
        is_logging_insecure = options.logging_insecure
        primary_only_time = options.primary_only

    config_filename = os.path.join('conf', 'authproxy.cfg')
    configuration = config_provider.get_config(config_filename)

    if primary_only_time is not None:
        if primary_only_time > 240:
            print("Primary only mode can only be enabled for a maximum of 4 hours (240 minutes)")
            sys.exit(2)
        else:
            PrimaryOnlyManager.enable_primary_only(primary_only_time)

    main_config = configuration.get_main_section_config()
    if main_config:
        log.msg('Main Configuration:')
        log.config(main_config)

    fips_mode = main_config.get_bool('fips_mode', False)
    if fips_mode:
        fips_manager.enable()

    # handle log configuration
    log_to_file = main_config.get_bool('log_file', False)
    log_stdout = main_config.get_bool('log_stdout', False)
    log_syslog = main_config.get_bool('log_syslog', False)
    log_auth_events = main_config.get_bool('log_auth_events', False)
    log_sso_events = main_config.get_bool('log_sso_events', True)

    if is_logging_insecure:
        globalLogPublisher.addObserver(textFileLogObserver(sys.stdout))
    else:
        if log_to_file or not (log_to_file or log_syslog or log_stdout):
            log_dir = main_config.get_str('log_dir', 'log')
            log_max_size = main_config.get_int('log_max_size', 10 * (1 << 20))
            log_max_files = main_config.get_int('log_max_files', 6)
            if log_max_files == 0:
                # we need to pass None explicitly if we want there to be no limit
                # 0 would just mean no logfiles would get kept...
                log_max_files = None

            log_file = create_log_file('authproxy.log', log_dir, log_max_size,
                                       log_max_files, twistd_user, log_group)
            log_file_observer = textFileLogObserver(log_file)

            if log_auth_events:
                auth_log_file = create_log_file('authevents.log', log_dir, log_max_size, log_max_files, twistd_user, log_group)
                auth_observer = FileLogObserver(auth_log_file, log.format_auth_event)
            else:
                auth_observer = log.no_op_observer

            if log_sso_events:
                sso_log_file = create_log_file('ssoevents.log', log_dir, log_max_size, log_max_files, twistd_user, log_group)
                sso_observer = FileLogObserver(sso_log_file, log.format_sso_event)
            else:
                sso_observer = log.no_op_observer

            auth_filtering_observer = FilteringLogObserver(auth_observer,
                                                           [log.auth_type_predicate],
                                                           log.no_op_observer)

            globalLogPublisher.addObserver(auth_filtering_observer)

            sso_filtering_observer = FilteringLogObserver(sso_observer,
                                                          [log.sso_type_predicate],
                                                          log.no_op_observer)

            globalLogPublisher.addObserver(sso_filtering_observer)

            # the default authproxy.log
            log_file_observer = FilteringLogObserver(log_file_observer,
                                                     [log.only_default_log_predicate],
                                                     log.no_op_observer)

            globalLogPublisher.addObserver(log_file_observer)

        if log_stdout:
            std_out_observer = textFileLogObserver(sys.stdout)
            std_out_filter = FilteringLogObserver(log.no_op_observer,
                                                  [log.auth_type_predicate],
                                                  std_out_observer)
            globalLogPublisher.addObserver(std_out_filter)
        if log_syslog:
            if syslog is None:
                raise config_error.ConfigError('syslog not supported on Windows')
            syslog_facilitystr = main_config.get_str('syslog_facility', 'LOG_USER')
            syslog_facility = facility_dict.get(syslog_facilitystr, None)
            if syslog_facility is None:
                raise config_error.ConfigError('Unknown syslog_facility: {0}'.format(syslog_facilitystr))
            syslog_observer = syslog.SyslogObserver('Authproxy', facility=syslog_facility)
            wrapped_syslog_observer = LegacyLogObserverWrapper(syslog_observer.emit)
            syslog_filtering_observer = FilteringLogObserver(log.no_op_observer,
                                                             [log.auth_type_predicate],
                                                             wrapped_syslog_observer)
            globalLogPublisher.addObserver(syslog_filtering_observer)

    # Global debug mode
    if is_logging_insecure:
        debug_mode = True
    else:
        debug_mode = main_config.get_bool('debug', False)

    http.set_debug(debug_mode)
    http.set_is_logging_insecure(is_logging_insecure)

    # Create main application.
    application = Application('duoauthproxy')
    LogReadyService().setServiceParent(application)

    fips_mode = fips_manager.status()
    if fips_mode:
        log.msg("FIPS mode {0} is enabled with {1}".format(fips_mode, fips_manager.get_openssl_version()))
    else:
        log.msg("FIPS mode is not enabled")

    # get ca certs file
    http_ca_certs_file = main_config.get_str('http_ca_certs_file', '')
    if http_ca_certs_file:
        http_ca_certs_file = util.resolve_file_path(http_ca_certs_file)
    else:
        http_ca_certs_file = os.path.join('conf', const.DEFAULT_HTTP_CERTS_FILE)

    # read ca certs
    if not os.path.isfile(http_ca_certs_file):
        http_ca_certs_file = os.path.join('conf', http_ca_certs_file)
    with open(http_ca_certs_file, 'r') as bundle_fp:
        http.set_ca_certs(ssl_verify.load_ca_bundle(bundle_fp))

    # get proxy settings
    http_proxy_host = main_config.get_str('http_proxy_host', '')
    http_proxy_port = main_config.get_int('http_proxy_port', 80)
    if http_proxy_host:
        http.set_proxy(http_proxy_host, http_proxy_port)

    sections = section.parse_sections(configuration, is_logging_insecure)
    module_factory = section.ModuleFactory(sections, application)
    modules_by_type = module_factory.make_modules()

    if not any(modules_by_type.values()):
        raise config_error.ConfigError('No integrations in config file.')

    # Setup forwarding/server pairs by port
    for port, interface in modules_by_type.get('server', []):
        server_networks = {}
        server_names = {}
        for section_name, server_module, server_config in modules_by_type['server'][(port, interface)]:
            client_name = configuration.get_section_client(section_name)

            if not client_name:
                if server_module.Module.no_client:
                    modules_by_type['client']['no_client'] = None
                    client_name = 'no_client'
                else:
                    raise config_error.ConfigError('Neither module %s or main has "client" value' % section_name)

            if section_name.startswith('ldap_server_auto') and not client_name.startswith('ad_client'):
                raise config_error.ConfigError('ad_client is required by ldap_server_auto. No ad_client found in config file. ')

            if client_name != 'radius_client' \
                    and server_config.get_str('pass_through_attr_names', ''):
                raise config_error.ConfigError('Can only pass through radius attributes if using a radius client')
            server_instance = server_module.Module(server_config,
                                                   modules_by_type['client'][client_name],
                                                   section_name)
            server_instance.setServiceParent(application)

            if section_name.startswith('radius_server_'):
                server_networks[server_instance] = parse_radius_secrets(server_config).keys()
                server_names[server_instance] = section_name

        if server_names:
            forward_module = forward_serv
            forward_instance = forward_module.Module(
                port=port,
                servers=server_networks,
                server_names=server_names,
                interface=interface,
                debug=debug_mode,
            )
            forward_instance.setServiceParent(application)

    # set user-agent
    sections = ','.join(sorted(set(configuration.list_sections())))
    user_agent = "duoauthproxy/{0} ({1}; Python{2}; {3})".format(
        get_version(),
        platform.platform(),
        platform.python_version(),
        sections)
    http.set_user_agent(user_agent)

    # Authproxy uses globalLogPublisher to emit events. Defining a no-op emitter will squelch the creation
    # of the unwatned twistd default logging mechanisms.
    def no_op_emitter(eventDict):
        pass

    application.setComponent(ILogObserver, no_op_emitter)

    return application