Beispiel #1
0
    def testCliLogging(self):
        """CLI initializes logging."""
        config = {"verbose": 3, "enable_loggers": ["test"]}
        init_logging(config)

        _baseLogger = logging.getLogger(BASE_LOGGER_NAME)
        _enabledLogger = get_module_logger("test")
        _disabledLogger = get_module_logger("test2")

        _baseLogger.debug("_baseLogger.debug")
        _baseLogger.info("_baseLogger.info")
        _baseLogger.warning("_baseLogger.warning")
        _baseLogger.error("_baseLogger.error")

        _enabledLogger.debug("_enabledLogger.debug")
        _enabledLogger.info("_enabledLogger.info")
        _enabledLogger.warning("_enabledLogger.warning")
        _enabledLogger.error("_enabledLogger.error")

        _disabledLogger.debug("_disabledLogger.debug")
        _disabledLogger.info("_disabledLogger.info")
        _disabledLogger.warning("_disabledLogger.warning")
        _disabledLogger.error("_disabledLogger.error")

        rootOutput, baseOutput = self.getLogOutput()
        # Printed for debugging, when test fails:
        print("ROOT OUTPUT:\n'{}'\nBASE OUTPUT:\n'{}'".format(
            rootOutput, baseOutput))

        # init_logging() removes all other handlers
        assert rootOutput == ""
        assert baseOutput == ""
Beispiel #2
0
    def testCliLogging(self):
        """CLI initializes logging."""
        config = {"verbose": 3, "enable_loggers": ["test"]}
        init_logging(config)

        _baseLogger = logging.getLogger(BASE_LOGGER_NAME)
        _enabledLogger = get_module_logger("test")
        _disabledLogger = get_module_logger("test2")

        _baseLogger.debug("_baseLogger.debug")
        _baseLogger.info("_baseLogger.info")
        _baseLogger.warning("_baseLogger.warning")
        _baseLogger.error("_baseLogger.error")

        _enabledLogger.debug("_enabledLogger.debug")
        _enabledLogger.info("_enabledLogger.info")
        _enabledLogger.warning("_enabledLogger.warning")
        _enabledLogger.error("_enabledLogger.error")

        _disabledLogger.debug("_disabledLogger.debug")
        _disabledLogger.info("_disabledLogger.info")
        _disabledLogger.warning("_disabledLogger.warning")
        _disabledLogger.error("_disabledLogger.error")

        rootOutput, baseOutput = self.getLogOutput()
        # Printed for debugging, when test fails:
        print("ROOT OUTPUT:\n'{}'\nBASE OUTPUT:\n'{}'".format(rootOutput, baseOutput))

        # init_logging() removes all other handlers
        assert rootOutput == ""
        assert baseOutput == ""
Beispiel #3
0
from wsgidav import compat, util
from wsgidav.dav_error import (
    DAVError,
    HTTP_FORBIDDEN,
    HTTP_INTERNAL_ERROR,
    PRECONDITION_CODE_ProtectedProperty,
)
from wsgidav.dav_provider import DAVCollection, DAVNonCollection, DAVProvider
from wsgidav.util import join_uri

import os
import stat

__docformat__ = "reStructuredText en"

_logger = util.get_module_logger(__name__)

BUFFER_SIZE = 8192

# ============================================================================
# Fake hierarchical repository
# ============================================================================
"""
This is a dummy 'database', that serves as an example source for the
VirtualResourceProvider.

All files listed in resPathList are expected to exist in FILE_FOLDER.
"""
FILE_FOLDER = r"c:\temp\virtfiles"

_resourceData = [
Beispiel #4
0
from wsgidav.lock_manager import (
    generate_lock_token,
    lock_string,
    normalize_lock_root,
    validate_lock,
)
from wsgidav.rw_lock import ReadWriteLock

import os
import shelve
import time


__docformat__ = "reStructuredText"

_logger = util.get_module_logger(__name__)

# TODO: comment's from Ian Bicking (2005)
# @@: Use of shelve means this is only really useful in a threaded environment.
#    And if you have just a single-process threaded environment, you could get
#    nearly the same effect with a dictionary of threading.Lock() objects.  Of course,
#    it would be better to move off shelve anyway, probably to a system with
#    a directory of per-file locks, using the file locking primitives (which,
#    sadly, are not quite portable).
# @@: It would probably be easy to store the properties as pickle objects
# in a parallel directory structure to the files you are describing.
# Pickle is expedient, but later you could use something more readable
# (pickles aren't particularly readable)


# ========================================================================
Beispiel #5
0
from wsgidav.dc.pam_dc import PAMDomainController
from wsgidav.dav_error import DAVError
from wsgidav.util import get_module_logger
from redis import Redis
from time import time, sleep
from datetime import timedelta
from threading import Thread

_logger = get_module_logger(__name__)


# noinspection PyAbstractClass
class PAMLockoutController(PAMDomainController):
    """
    An extension of the PAM domain controller that implements a timed lockout method
    """
    @staticmethod
    def get_real_remote_addr(environ: dict) -> str:
        env = {
            k.lower().replace('-', '_').replace(' ', '_'): v
            for k, v in environ.items()
            if isinstance(v, str) and (('.' in v) or (':' in v))
        }
        if 'http_x_forwarded_for' in env:
            return env['http_x_forwarded_for'].split(',')[0].strip()

        if 'http_remote_addr' in env:
            return env['http_remote_addr'].strip()

        return env.get('remote_addr', '').strip()
Beispiel #6
0
import time
import six
import requests
from os.path import dirname
from wsgidav.dc.base_dc import BaseDomainController
from wsgidav.util import get_module_logger
from wsgidav.dav_error import DAVError, HTTP_FORBIDDEN, HTTP_METHOD_NOT_ALLOWED

_log = get_module_logger("pytho_dc")


def expiring_lru_cache(maxsize=128, timeout=60):

    if six.PY2:
        from repoze.lru import lru_cache as lru_cache_py2
        return lru_cache_py2(maxsize=maxsize, timeout=timeout)

    if six.PY3:
        from functools import lru_cache, wraps

        def py3_lru_cache(func):
            lru_cache(maxsize=maxsize)
            def cachedfunc(*args, **kwds):
                del kwds['_ttl_hash']
                return func(*args, **kwds)

            wraps(func)
            def wrapper(*args, **kwds):
                # the same value withing <timeout> time period.
                ttl_hash = int(time.time()) // timeout
                return cachedfunc(*args, _ttl_hash=ttl_hash, **kwds)