Exemple #1
0
    def start(self):
        from logging import getLogger, FileHandler, Formatter, DEBUG
        self.log = getLogger(self.name)
        self.log.setLevel(DEBUG)

        formatter = Formatter(
            '%(asctime)s [%(process)d:%(threadName)s] %(levelname)-8s %(name)s:  %(message)s'
        )
        # Unique log handler (single file)
        handler = FileHandler(self.uniquefile, "w", encoding=ENCODING)
        handler.setLevel(DEBUG)
        handler.setFormatter(formatter)
        self.log.addHandler(handler)

        # If you suspect that the diff stuff isn't working, un comment the next
        # line.  You should see this show up once per-process.
        # self.log.info("Here is a line that should only be in the first output.")

        # Setup output used for testing
        handler = self.getLogHandler(self.sharedfile)
        handler.setLevel(DEBUG)
        handler.setFormatter(formatter)
        self.log.addHandler(handler)

        if self.use_queue:
            from concurrent_log_handler.queue import setup_logging_queues
            setup_logging_queues()

        # If this ever becomes a real "Thread", then remove this line:
        self.run()
Exemple #2
0
LOCALE_PATHS = [os.path.join(BASE_DIR, "locale")]

TIME_ZONE = os.getenv("PAPERLESS_TIME_ZONE", "UTC")

USE_I18N = True

USE_L10N = True

USE_TZ = True

###############################################################################
# Logging                                                                     #
###############################################################################

setup_logging_queues()

os.makedirs(LOGGING_DIR, exist_ok=True)

LOGROTATE_MAX_SIZE = os.getenv("PAPERLESS_LOGROTATE_MAX_SIZE", 1024 * 1024)
LOGROTATE_MAX_BACKUPS = os.getenv("PAPERLESS_LOGROTATE_MAX_BACKUPS", 20)

LOGGING = {
    "version": 1,
    "disable_existing_loggers": False,
    'formatters': {
        'verbose': {
            'format': '[{asctime}] [{levelname}] [{name}] {message}',
            'style': '{',
        },
        'simple': {
def main():
    parser = configargparse.ArgParser(default_config_files=[
        "/etc/openvpn-auth-azure-ad/config.conf",
        "~/.openvpn-auth-azure-ad",
    ])

    parser.add_argument(
        "-c",
        "--config",
        is_config_file=True,
        help="path of config file",
        env_var="AAD_CONFIG_PATH",
    )
    parser.add_argument(
        "-V",
        "--version",
        action="version",
        version="%(prog)s {version}".format(version=__version__),
    )
    parser.add_argument(
        "-t",
        "--threads",
        default=10,
        env_var="AAD_THREAD_COUNT",
        help="Amount of threads to handle authentication",
        type=int,
    )

    parser_authentication = parser.add_argument_group(
        "OpenVPN User Authentication")
    parser_authentication.add_argument(
        "-a",
        "--authenticators",
        default=AADAuthenticatorFlows.DEVICE_TOKEN,
        help=
        "Enable authenticators. Multiple authenticators can be separated with comma",
        env_var="AAD_AUTHENTICATORS",
    )
    parser_authentication.add_argument(
        "--auth-token",
        action="store_true",
        help="Use auth token to re-authenticate clients",
        env_var="AAD_AUTH_TOKEN",
    )
    parser_authentication.add_argument(
        "--auth-token-livetime",
        type=int,
        default=86400,
        help="Livetime of auth tokens in seconds",
        env_var="AAD_AUTH_TOKEN_LIFETIME",
    )
    parser_authentication.add_argument(
        "--remember-user",
        action="store_true",
        help=
        "If user authenticated once, the users refresh token is used to reauthenticate silently if possible.",
        env_var="AAD_REMEMBER_USER",
    )
    parser_authentication.add_argument(
        "--verify-common-name",
        action="store_true",
        help="Check if common_name matches Azure AD UPN",
        env_var="AAD_VERIFY_COMMON_NAME",
    )

    parser_openvpn = parser.add_argument_group(
        "OpenVPN Management Interface settings")
    parser_openvpn.add_argument(
        "-H",
        "--ovpn-host",
        help="Host of OpenVPN management interface.",
        env_var="AAD_OVPN_HOST",
    )
    parser_openvpn.add_argument(
        "-P",
        "--ovpn-port",
        help="Port of OpenVPN management interface.",
        env_var="AAD_OVPN_PORT",
        type=int,
    )
    parser_openvpn.add_argument(
        "-s",
        "--ovpn-socket",
        help="Path of socket or OpenVPN management interface.",
        env_var="AAD_OVPN_SOCKET_PATH",
    )
    parser_openvpn.add_argument(
        "-p",
        "--ovpn-password",
        help="Passwort for OpenVPN management interface.",
        env_var="AAD_OVPN_PASSWORD",
    )

    parser_aad = parser.add_argument_group("Azure AD settings")
    parser_aad.add_argument(
        "--client-id",
        required=True,
        help="Client ID of application.",
        env_var="AAD_CLIENT_ID",
    )
    parser_aad.add_argument(
        "--token-authority",
        default=os.environ.get(
            "authority",
            default="https://login.microsoftonline.com/organizations"),
        env_var="AAD_TOKEN_AUTHORITY",
        help=
        "A URL that identifies a token authority. It should be of the format "
        "https://login.microsoftonline.com/your_tenant. By default, we will use "
        "https://login.microsoftonline.com/organizations",
    )
    parser_aad.add_argument(
        "--graph-endpoint",
        default="https://graph.microsoft.com/v1.0/",
        env_var="AAD_GRAPH_ENDPOINT",
        help="Endpoint of the graph API. See: "
        "https://developer.microsoft.com/en-us/graph/graph-explorer",
    )

    parser_prometheus = parser.add_argument_group("Prometheus settings")
    parser_prometheus.add_argument(
        "--prometheus",
        action="store_true",
        env_var="AAD_PROMETHEUS_ENABLED",
        help="Enable prometheus statistics",
    )
    parser_prometheus.add_argument(
        "--prometheus-listen-addr",
        env_var="AAD_PROMETHEUS_LISTEN_HOST",
        default="",
        help="prometheus listen addr",
    )
    parser_prometheus.add_argument(
        "--prometheus-listen-port",
        type=int,
        env_var="AAD_PROMETHEUS_PORT",
        help=" prometheus statistics",
        default=9723,
    )
    parser_prometheus.add_argument(
        "--log-level",
        default=logging.INFO,
        type=lambda x: getattr(logging, x),
        env_var="AAD_LOG_LEVEL",
        help="Configure the logging level.",
    )

    options = parser.parse_args()

    # convert all configured loggers to use a background thread
    setup_logging_queues()

    logging.basicConfig(level=options.log_level,
                        format="%(asctime)s %(levelname)s %(message)s")

    if options.prometheus:
        start_http_server(options.prometheus_listen_port,
                          options.prometheus_listen_addr)
        i = Info("openvpn_auth_azure_ad_version",
                 "info of openvpn-auth-azure-ad")
        i.info({"version": __version__})

    app = msal.PublicClientApplication(options.client_id,
                                       authority=options.token_authority)

    authenticator = AADAuthenticator(
        app,
        options.graph_endpoint,
        options.authenticators,
        options.verify_common_name,
        options.auth_token,
        options.auth_token_livetime,
        options.remember_user,
        options.threads,
        options.ovpn_host,
        options.ovpn_port,
        options.ovpn_socket,
        options.ovpn_password,
    )

    authenticator.run()
def my_logging_setup(log_name='example.log', use_async=False):
    """
    An example of setting up logging in Python using a JSON dictionary to configure it.
    You can also use an outside .conf text file; see ConcurrentLogHandler/README.md

    If you want to use async logging, call this after your main logging setup as shown below:

    concurrent_log_handler.queue.setup_logging_queues()
    """

    # Import this to install logging.handlers.ConcurrentRotatingFileHandler
    # The noinspection thing is so PyCharm doesn't think we're using this for no reason
    # noinspection PyUnresolvedReferences
    import concurrent_log_handler

    logging_config = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'default': {
                'format': '%(asctime)s %(levelname)s %(name)s %(message)s'
            },
            'example2': {
                'format':
                '[%(asctime)s][%(levelname)s][%(filename)s:%(lineno)s]'
                '[%(process)d][%(message)s]',
            }
        },

        # Set up our concurrent logger handler. Need one of these per unique file.
        'handlers': {
            'my_concurrent_log': {
                'level': 'DEBUG',
                'class':
                'concurrent_log_handler.ConcurrentRotatingFileHandler',

                # Example of a custom format for this log.
                'formatter': 'example2',
                # 'formatter': 'default',
                'filename': log_name,

                # Optional: set an owner and group for the log file
                # 'owner': ['greenfrog', 'admin'],

                # Sets permissions to owner and group read+write
                'chmod': 0o0660,

                # Note: this is abnormally small to make it easier to demonstrate rollover.
                # A more reasonable value might be 10 MiB or 10485760
                'maxBytes': 240,

                # Number of rollover files to keep
                'backupCount': 10,

                # 'use_gzip': True,
            }
        },

        # Tell root logger to use our concurrent handler
        'root': {
            'handlers': ['my_concurrent_log'],
            'level': 'DEBUG',
        },
    }

    logging.config.dictConfig(logging_config)

    if use_async:
        # To enable background logging queue, call this near the end of your logging setup.
        from concurrent_log_handler.queue import setup_logging_queues
        setup_logging_queues()

    return