Beispiel #1
0
def sign_response(request, response):
    """
    This decorator is used to sign the response. It adds the nonce from the
    request, if it exist and adds the nonce and the signature to the response.

    .. note:: This only works for JSON responses. So if we fail to decode the
       JSON, we just pass on.

    The usual way to use it is, to wrap the after_request, so that we can also
    sign errors.

    @postrequest(sign_response, request=request)
    def after_request(response):

    :param request: The Request object
    :param response: The Response object
    """
    if current_app.config.get("PI_NO_RESPONSE_SIGN"):
        return response

    priv_file_name = current_app.config.get("PI_AUDIT_KEY_PRIVATE")
    try:
        with open(priv_file_name, 'rb') as priv_file:
            priv_key = priv_file.read()
        sign_object = Sign(priv_key, public_key=None)
    except (IOError, ValueError, TypeError) as e:
        log.info('Could not load private key from '
                 'file {0!s}: {1!r}!'.format(priv_file_name, e))
        log.debug(traceback.format_exc())
        return response

    request.all_data = get_all_params(request.values, request.data)
    # response can be either a Response object or a Tuple (Response, ErrorID)
    response_value = 200
    response_is_tuple = False
    if type(response).__name__ == "tuple":
        response_is_tuple = True
        response_value = response[1]
        response_object = response[0]
    else:
        response_object = response
    try:
        content = json.loads(response_object.data)
        nonce = request.all_data.get("nonce")
        if nonce:
            content["nonce"] = nonce

        content["signature"] = sign_object.sign(
            json.dumps(content, sort_keys=True))
        response_object.data = json.dumps(content)
    except ValueError:
        # The response.data is no JSON (but CSV or policy export)
        # We do no signing in this case.
        log.info("We only sign JSON response data.")

    if response_is_tuple:
        resp = (response_object, response_value)
    else:
        resp = response_object
    return resp
Beispiel #2
0
    def __init__(self, config=None):
        self.name = "sqlaudit"
        self.config = config or {}
        self.audit_data = {}
        self.sign_data = not self.config.get("PI_AUDIT_NO_SIGN")
        self.sign_object = None
        self.verify_old_sig = self.config.get('PI_CHECK_OLD_SIGNATURES')
        if self.sign_data:
            self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                           self.config.get("PI_AUDIT_KEY_PRIVATE"))
            self.sign_object = Sign(self.private, self.public)

        # We can use "sqlaudit" as the key because the SQLAudit connection
        # string is fixed for a running privacyIDEA instance.
        # In other words, we will not run into any problems with changing connect strings.
        self.engine = get_engine(self.name, self._create_engine)
        # create a configured "Session" class. ``scoped_session`` is not
        # necessary because we do not share session objects among threads.
        # We use it anyway as a safety measure.
        Session = scoped_session(sessionmaker(bind=self.engine))
        self.session = Session()
        # Ensure that the connection gets returned to the pool when the request has
        # been handled. This may close an already-closed session, but this is not a problem.
        register_finalizer(self.session.close)
        self.session._model_changes = {}
Beispiel #3
0
 def __init__(self, config=None):
     super(Audit, self).__init__(config)
     self.name = "sqlaudit"
     self.sign_data = not self.config.get("PI_AUDIT_NO_SIGN")
     self.sign_object = None
     self.verify_old_sig = self.config.get('PI_CHECK_OLD_SIGNATURES')
     if self.sign_data:
         self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                        self.config.get("PI_AUDIT_KEY_PRIVATE"))
         self.sign_object = Sign(self.private, self.public)
     # Read column_length from the config file
     config_column_length = self.config.get("PI_AUDIT_SQL_COLUMN_LENGTH",
                                            {})
     # fill the missing parts with the default from the models
     self.custom_column_length = {
         k:
         (v if k not in config_column_length else config_column_length[k])
         for k, v in column_length.items()
     }
     # We can use "sqlaudit" as the key because the SQLAudit connection
     # string is fixed for a running privacyIDEA instance.
     # In other words, we will not run into any problems with changing connect strings.
     self.engine = get_engine(self.name, self._create_engine)
     # create a configured "Session" class. ``scoped_session`` is not
     # necessary because we do not share session objects among threads.
     # We use it anyway as a safety measure.
     Session = scoped_session(sessionmaker(bind=self.engine))
     self.session = Session()
     # Ensure that the connection gets returned to the pool when the request has
     # been handled. This may close an already-closed session, but this is not a problem.
     register_finalizer(self.session.close)
     self.session._model_changes = {}
Beispiel #4
0
def sign_response(request, response):
    """
    This decorator is used to sign the response. It adds the nonce from the
    request, if it exist and adds the nonce and the signature to the response.

    .. note:: This only works for JSON responses. So if we fail to decode the
       JSON, we just pass on.

    The usual way to use it is, to wrap the after_request, so that we can also
    sign errors.

    @postrequest(sign_response, request=request)
    def after_request(response):

    :param request: The Request object
    :param response: The Response object
    """
    if current_app.config.get("PI_NO_RESPONSE_SIGN"):
        return response

    priv_file_name = current_app.config.get("PI_AUDIT_KEY_PRIVATE")
    try:
        with open(priv_file_name, 'rb') as priv_file:
            priv_key = priv_file.read()
        sign_object = Sign(priv_key, public_key=None)
    except (IOError, ValueError, TypeError) as e:
        log.info('Could not load private key from '
                 'file {0!s}: {1!r}!'.format(priv_file_name, e))
        log.debug(traceback.format_exc())
        return response

    request.all_data = get_all_params(request.values, request.data)
    # response can be either a Response object or a Tuple (Response, ErrorID)
    response_value = 200
    response_is_tuple = False
    if type(response).__name__ == "tuple":
        response_is_tuple = True
        response_value = response[1]
        response_object = response[0]
    else:
        response_object = response
    try:
        content = json.loads(response_object.data)
        nonce = request.all_data.get("nonce")
        if nonce:
            content["nonce"] = nonce

        content["signature"] = sign_object.sign(json.dumps(content, sort_keys=True))
        response_object.data = json.dumps(content)
    except ValueError:
        # The response.data is no JSON (but CSV or policy export)
        # We do no signing in this case.
        log.info("We only sign JSON response data.")

    if response_is_tuple:
        resp = (response_object, response_value)
    else:
        resp = response_object
    return resp
Beispiel #5
0
def sign_response(request, response):
    """
    This decorator is used to sign the response. It adds the nonce from the
    request, if it exist and adds the nonce and the signature to the response.

    .. note:: This only works for JSON responses. So if we fail to decode the
       JSON, we just pass on.

    The usual way to use it is, to wrap the after_request, so that we can also
    sign errors.

    @postrequest(sign_response, request=request)
    def after_request(response):

    :param request: The Request object
    :param response: The Response object
    """
    if current_app.config.get("PI_NO_RESPONSE_SIGN"):
        return response

    priv_file = current_app.config.get("PI_AUDIT_KEY_PRIVATE")
    pub_file = current_app.config.get("PI_AUDIT_KEY_PUBLIC")
    sign_object = Sign(priv_file, pub_file)
    request.all_data = get_all_params(request.values, request.data)
    # response can be either a Response object or a Tuple (Response, ErrorID)
    response_value = 200
    response_is_tuple = False
    if type(response).__name__ == "tuple":
        response_is_tuple = True
        response_value = response[1]
        response_object = response[0]
    else:
        response_object = response
    try:
        content = json.loads(response_object.data)
        nonce = request.all_data.get("nonce")
        if nonce:
            content["nonce"] = nonce

        content["signature"] = sign_object.sign(json.dumps(content))
        response_object.data = json.dumps(content)
    except ValueError:
        # The response.data is no JSON (but CSV or policy export)
        # We do no signing in this case.
        log.info("We only sign JSON response data.")

    if response_is_tuple:
        resp = (response_object, response_value)
    else:
        resp = response_object
    return resp
Beispiel #6
0
def sign_response(request, response):
    """
    This decorator is used to sign the response. It adds the nonce from the
    request, if it exist and adds the nonce and the signature to the response.

    .. note:: This only works for JSON responses. So if we fail to decode the
       JSON, we just pass on.

    The usual way to use it is, to wrap the after_request, so that we can also
    sign errors.

    @postrequest(sign_response, request=request)
    def after_request(response):

    :param request: The Request object
    :param response: The Response object
    """
    if current_app.config.get("PI_NO_RESPONSE_SIGN"):
        return response

    priv_file = current_app.config.get("PI_AUDIT_KEY_PRIVATE")
    pub_file = current_app.config.get("PI_AUDIT_KEY_PUBLIC")
    sign_object = Sign(priv_file, pub_file)
    request.all_data = get_all_params(request.values, request.data)
    # response can be either a Response object or a Tuple (Response, ErrorID)
    response_value = 200
    response_is_tuple = False
    if type(response).__name__ == "tuple":
        response_is_tuple = True
        response_value = response[1]
        response_object = response[0]
    else:
        response_object = response
    try:
        content = json.loads(response_object.data)
        nonce = request.all_data.get("nonce")
        if nonce:
            content["nonce"] = nonce

        content["signature"] = sign_object.sign(json.dumps(content))
        response_object.data = json.dumps(content)
    except ValueError:
        # The response.data is no JSON (but CSV or policy export)
        # We do no signing in this case.
        log.info("We only sign JSON response data.")

    if response_is_tuple:
        resp = (response_object, response_value)
    else:
        resp = response_object
    return resp
Beispiel #7
0
    def read_keys(self, pub, priv):
        """
        Set the private and public key for the audit class. This is achieved by
        passing the entries.

        #priv = config.get("privacyideaAudit.key.private")
        #pub = config.get("privacyideaAudit.key.public")

        :param pub: Public key, used for verifying the signature
        :type pub: string with filename
        :param priv: Private key, used to sign the audit entry
        :type priv: string with filename
        :return: None
        """
        self.sign_object = Sign(priv, pub)
Beispiel #8
0
    def test_07_sign_response(self):
        builder = EnvironBuilder(method='POST', data={}, headers={})
        env = builder.get_environ()
        env["REMOTE_ADDR"] = "192.168.0.1"
        req = Request(env)
        req.values = {
            "user": "******",
            "pass": "******",
            "nonce": "12345678"
        }

        res = {
            "jsonrpc": "2.0",
            "result": {
                "status": True,
                "value": True
            },
            "version": "privacyIDEA test",
            "id": 1
        }
        resp = Response(json.dumps(res))
        from privacyidea.lib.crypto import Sign
        g.sign_object = Sign("tests/testdata/private.pem",
                             "tests/testdata/public.pem")

        new_response = sign_response(req, resp)
        jresult = json.loads(new_response.data)
        self.assertEqual(jresult.get("nonce"), "12345678")
        self.assertEqual(
            jresult.get("signature"),
            "7220461805369685253863294214862525311437731987121534735993146952136348520396812489782945679627890785973634896605293523175424850299832912878523161817380029213546063467888018205435416020286712762804412024065559270543774578319469096483246637875013247101135063221604113204491121777932147776087110152414627230087278622508771143940031542890514380486863296102037208395371717795767683973979032142677315402422403254992482761563612174177151960004042109847122772813717599078313600692433727690239340230353616318691769042290314664126975201679642739717702497638318611217001361093950139025744740660953017413716736691777322916588328"
        )
Beispiel #9
0
    def __init__(self, config=None):
        self.name = "sqlaudit"
        self.config = config or {}
        self.audit_data = {}
        self.sign_data = not self.config.get("PI_AUDIT_NO_SIGN")
        self.sign_object = None
        self.verify_old_sig = get_app_config_value('PI_CHECK_OLD_SIGNATURES')
        if self.sign_data:
            self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                           self.config.get("PI_AUDIT_KEY_PRIVATE"))
            self.sign_object = Sign(self.private, self.public)

        # We can use "sqlaudit" as the key because the SQLAudit connection
        # string is fixed for a running privacyIDEA instance.
        # In other words, we will not run into any problems with changing connect strings.
        self.engine = get_engine(self.name, self._create_engine)
        # create a configured "Session" class. ``scoped_session`` is not
        # necessary because we do not share session objects among threads.
        # We use it anyway as a safety measure.
        Session = scoped_session(sessionmaker(bind=self.engine))
        self.session = Session()
        # Ensure that the connection gets returned to the pool when the request has
        # been handled. This may close an already-closed session, but this is not a problem.
        register_finalizer(self.session.close)
        self.session._model_changes = {}
Beispiel #10
0
def check_signature(subscription):
    """
    This function checks the signature of a subscription. If the signature
    checking fails, a SignatureError / Exception is raised.

    :param subscription: The dict of the subscription
    :return: True
    """
    vendor = subscription.get("by_name").split()[0]
    enckey = get_app_config_value("PI_ENCFILE", "/etc/privacyidea/enckey")
    dirname = os.path.dirname(enckey)
    # In dirname we are searching for <vendor>.pem
    filename = u"{0!s}/{1!s}.pem".format(dirname, vendor)

    try:
        # remove the minutes 00:00:00
        subscription["date_from"] = subscription.get("date_from").strftime(SUBSCRIPTION_DATE_FORMAT)
        subscription["date_till"] = subscription.get("date_till").strftime(SUBSCRIPTION_DATE_FORMAT)
        sign_string = SIGN_FORMAT.format(**subscription)
        with open(filename, 'rb') as key_file:
            sign_obj = Sign(private_key=None, public_key=key_file.read())

        signature = subscription.get('signature', '100')
        r = sign_obj.verify(sign_string, signature, verify_old_sigs=True)
        subscription["date_from"] = datetime.datetime.strptime(
            subscription.get("date_from"),
            SUBSCRIPTION_DATE_FORMAT)
        subscription["date_till"] = datetime.datetime.strptime(
            subscription.get("date_till"),
            SUBSCRIPTION_DATE_FORMAT)
    except Exception as _e:
        log.debug(traceback.format_exc())
        raise SubscriptionError("Verifying the signature of your subscription "
                                "failed.",
                                application=subscription.get("application"))

    if not r:
        raise SubscriptionError("Signature of your subscription does not "
                                "match.",
                                application=subscription.get("application"))

    return r
def check_signature(subscription):
    """
    This function checks the signature of a subscription. If the signature
    checking fails, a SignatureError / Exception is raised.

    :param subscription: The dict of the subscription
    :return: True
    """
    vendor = subscription.get("by_name").split()[0]
    enckey = get_app_config_value("PI_ENCFILE", "/etc/privacyidea/enckey")
    dirname = os.path.dirname(enckey)
    # In dirname we are searching for <vendor>.pem
    filename = u"{0!s}/{1!s}.pem".format(dirname, vendor)

    try:
        # remove the minutes 00:00:00
        subscription["date_from"] = subscription.get("date_from").strftime(SUBSCRIPTION_DATE_FORMAT)
        subscription["date_till"] = subscription.get("date_till").strftime(SUBSCRIPTION_DATE_FORMAT)
        sign_string = SIGN_FORMAT.format(**subscription)
        with open(filename, 'rb') as key_file:
            sign_obj = Sign(private_key=None, public_key=key_file.read())

        signature = subscription.get('signature', '100')
        r = sign_obj.verify(sign_string, signature, verify_old_sigs=True)
        subscription["date_from"] = datetime.datetime.strptime(
            subscription.get("date_from"),
            SUBSCRIPTION_DATE_FORMAT)
        subscription["date_till"] = datetime.datetime.strptime(
            subscription.get("date_till"),
            SUBSCRIPTION_DATE_FORMAT)
    except Exception as _e:
        log.debug(traceback.format_exc())
        raise SubscriptionError("Verifying the signature of your subscription "
                                "failed.",
                                application=subscription.get("application"))

    if not r:
        raise SubscriptionError("Signature of your subscription does not "
                                "match.",
                                application=subscription.get("application"))

    return r
    def test_00_create_sign_object(self):
        # test with invalid key data
        with self.assertRaises(Exception):
            Sign(b'This is not a private key', b'This is not a public key')
        with self.assertRaises(Exception):
            priv_key = open(current_app.config.get("PI_AUDIT_KEY_PRIVATE"), 'rb').read()
            Sign(private_key=priv_key,
                 public_key=b'Still not a public key')
        # this should work
        priv_key = open(current_app.config.get("PI_AUDIT_KEY_PRIVATE"), 'rb').read()
        pub_key = open(current_app.config.get("PI_AUDIT_KEY_PUBLIC"), 'rb').read()
        so = Sign(priv_key, pub_key)
        self.assertEqual(so.sig_ver, 'rsa_sha256_pss')

        # test missing keys
        so = Sign(public_key=pub_key)
        res = so.sign('testdata')
        self.assertEqual(res, '')
        so = Sign(private_key=priv_key)
        res = so.verify('testdata', 'testsig')
        self.assertFalse(res)
    def test_00_create_sign_object(self):
        # test with invalid key data
        with self.assertRaises(Exception):
            Sign(b'This is not a private key', b'This is not a public key')
        with self.assertRaises(Exception):
            priv_key = open(current_app.config.get("PI_AUDIT_KEY_PRIVATE"), 'rb').read()
            Sign(private_key=priv_key,
                 public_key=b'Still not a public key')
        # this should work
        priv_key = open(current_app.config.get("PI_AUDIT_KEY_PRIVATE"), 'rb').read()
        pub_key = open(current_app.config.get("PI_AUDIT_KEY_PUBLIC"), 'rb').read()
        so = Sign(priv_key, pub_key)
        self.assertEqual(so.sig_ver, 'rsa_sha256_pss')

        # test missing keys
        so = Sign(public_key=pub_key)
        res = so.sign('testdata')
        self.assertEqual(res, '')
        so = Sign(private_key=priv_key)
        res = so.verify('testdata', 'testsig')
        self.assertFalse(res)
Beispiel #14
0
    def read_keys(self, pub, priv):
        """
        Set the private and public key for the audit class. This is achieved by
        passing the entries.

        #priv = config.get("privacyideaAudit.key.private")
        #pub = config.get("privacyideaAudit.key.public")

        :param pub: Public key, used for verifying the signature
        :type pub: string with filename
        :param priv: Private key, used to sign the audit entry
        :type priv: string with filename
        :return: None
        """
        self.sign_object = Sign(priv, pub)
    def test_01_sign_and_verify_data(self):
        priv_key = open(current_app.config.get("PI_AUDIT_KEY_PRIVATE"),
                        'rb').read()
        pub_key = open(current_app.config.get("PI_AUDIT_KEY_PUBLIC"),
                       'rb').read()
        so = Sign(priv_key, pub_key)
        data = 'short text'
        sig = so.sign(data)
        self.assertTrue(sig.startswith(so.sig_ver), sig)
        self.assertTrue(so.verify(data, sig))

        data = b'A slightly longer text, this time in binary format.'
        sig = so.sign(data)
        self.assertTrue(so.verify(data, sig))

        # test with text larger than RSA key size
        data = b'\x01\x02' * 5000
        sig = so.sign(data)
        self.assertTrue(so.verify(data, sig))

        # now test a broken signature
        data = 'short text'
        sig = so.sign(data)
        sig_broken = sig[:-1] + '{:x}'.format((int(sig[-1], 16) + 1) % 16)
        self.assertFalse(so.verify(data, sig_broken))

        # test with non hex string
        sig_broken = sig[:-1] + 'x'
        self.assertFalse(so.verify(data, sig_broken))

        # now try to verify old signatures
        # first without enabling old signatures in config
        short_text_sig = 15197717811878792093921885389298262311612396877333963031070812155820116863657342817645537537961773450510020137791036591085713379948816070430789598146539509027948592633362217308056639775153575635684961642110792013775709164803544619582232081442445758263838942315386909453927493644845757192298617925455779136340217255670113943560463286896994555184188496806420559078552626485909484729552861477888246423469461421103010299470836507229490718177625822972845024556897040292571751452383573549412451282884349017186147757238775308192484937929135306435242403555592741059466194258607967889051881221759976135386624406095324595765010
        data = 'short text'
        self.assertFalse(so.verify(data, short_text_sig))

        # now we enable the checking of old signatures
        short_text_sig = 15197717811878792093921885389298262311612396877333963031070812155820116863657342817645537537961773450510020137791036591085713379948816070430789598146539509027948592633362217308056639775153575635684961642110792013775709164803544619582232081442445758263838942315386909453927493644845757192298617925455779136340217255670113943560463286896994555184188496806420559078552626485909484729552861477888246423469461421103010299470836507229490718177625822972845024556897040292571751452383573549412451282884349017186147757238775308192484937929135306435242403555592741059466194258607967889051881221759976135386624406095324595765010
        data = 'short text'
        self.assertTrue(so.verify(data, short_text_sig, verify_old_sigs=True))

        # verify a broken old signature
        broken_short_text_sig = short_text_sig + 1
        self.assertFalse(
            so.verify(data, broken_short_text_sig, verify_old_sigs=True))

        long_data_sig = 991763198885165486007338893972384496025563436289154190056285376683148093829644985815692167116166669178171916463844829424162591848106824431299796818231239278958776853940831433819576852350691126984617641483209392489383319296267416823194661791079316704545017249491961092046751201670544843607206698682190381208022128216306635574292359600514603728560982584561531193227312370683851459162828981766836503134221347324867936277484738573153562229478151744446530191383660477390958159856842222437156763388859923477183453362567547792824054461704970820770533637185477922709297916275611571003099205429044820469679520819043851809079
        long_data = b'\x01\x02' * 5000
        self.assertTrue(
            so.verify(long_data, long_data_sig, verify_old_sigs=True))
    def test_01_sign_and_verify_data(self):
        priv_key = open(current_app.config.get("PI_AUDIT_KEY_PRIVATE"), 'rb').read()
        pub_key = open(current_app.config.get("PI_AUDIT_KEY_PUBLIC"), 'rb').read()
        so = Sign(priv_key, pub_key)
        data = 'short text'
        sig = so.sign(data)
        self.assertTrue(sig.startswith(so.sig_ver), sig)
        self.assertTrue(so.verify(data, sig))

        data = b'A slightly longer text, this time in binary format.'
        sig = so.sign(data)
        self.assertTrue(so.verify(data, sig))

        # test with text larger than RSA key size
        data = b'\x01\x02' * 5000
        sig = so.sign(data)
        self.assertTrue(so.verify(data, sig))

        # now test a broken signature
        data = 'short text'
        sig = so.sign(data)
        sig_broken = sig[:-1] + '{:x}'.format((int(sig[-1], 16) + 1) % 16)
        self.assertFalse(so.verify(data, sig_broken))

        # test with non hex string
        sig_broken = sig[:-1] + 'x'
        self.assertFalse(so.verify(data, sig_broken))

        # now try to verify old signatures
        # first without enabling old signatures in config
        short_text_sig = 15197717811878792093921885389298262311612396877333963031070812155820116863657342817645537537961773450510020137791036591085713379948816070430789598146539509027948592633362217308056639775153575635684961642110792013775709164803544619582232081442445758263838942315386909453927493644845757192298617925455779136340217255670113943560463286896994555184188496806420559078552626485909484729552861477888246423469461421103010299470836507229490718177625822972845024556897040292571751452383573549412451282884349017186147757238775308192484937929135306435242403555592741059466194258607967889051881221759976135386624406095324595765010
        data = 'short text'
        self.assertFalse(so.verify(data, short_text_sig))

        # now we enable the checking of old signatures
        short_text_sig = 15197717811878792093921885389298262311612396877333963031070812155820116863657342817645537537961773450510020137791036591085713379948816070430789598146539509027948592633362217308056639775153575635684961642110792013775709164803544619582232081442445758263838942315386909453927493644845757192298617925455779136340217255670113943560463286896994555184188496806420559078552626485909484729552861477888246423469461421103010299470836507229490718177625822972845024556897040292571751452383573549412451282884349017186147757238775308192484937929135306435242403555592741059466194258607967889051881221759976135386624406095324595765010
        data = 'short text'
        self.assertTrue(so.verify(data, short_text_sig, verify_old_sigs=True))

        # verify a broken old signature
        broken_short_text_sig = short_text_sig + 1
        self.assertFalse(so.verify(data, broken_short_text_sig, verify_old_sigs=True))

        long_data_sig = 991763198885165486007338893972384496025563436289154190056285376683148093829644985815692167116166669178171916463844829424162591848106824431299796818231239278958776853940831433819576852350691126984617641483209392489383319296267416823194661791079316704545017249491961092046751201670544843607206698682190381208022128216306635574292359600514603728560982584561531193227312370683851459162828981766836503134221347324867936277484738573153562229478151744446530191383660477390958159856842222437156763388859923477183453362567547792824054461704970820770533637185477922709297916275611571003099205429044820469679520819043851809079
        long_data = b'\x01\x02' * 5000
        self.assertTrue(so.verify(long_data, long_data_sig, verify_old_sigs=True))
Beispiel #17
0
class Audit(AuditBase):
    """
    This is the SQLAudit module, which writes the audit entries
    to an SQL database table.
    It requires the configuration parameters in pi.cfg:
    * PI_AUDIT_KEY_PUBLIC
    * PI_AUDIT_KEY_PRIVATE

    If you want to host the SQL Audit database in another DB than the
    token DB, you can use:
    * PI_AUDIT_SQL_URI

    It also takes the optional parameters:
    * PI_AUDIT_POOL_SIZE
    * PI_AUDIT_POOL_RECYCLE
    * PI_AUDIT_SQL_TRUNCATE
    * PI_AUDIT_NO_SIGN

    You can use PI_AUDIT_NO_SIGN = True to avoid signing of the audit log.

    If PI_CHECK_OLD_SIGNATURES = True old style signatures (text-book RSA) will
    be checked as well, otherwise they will be marked as 'FAIL'.
    """

    is_readable = True

    def __init__(self, config=None):
        super(Audit, self).__init__(config)
        self.name = "sqlaudit"
        self.sign_data = not self.config.get("PI_AUDIT_NO_SIGN")
        self.sign_object = None
        self.verify_old_sig = self.config.get('PI_CHECK_OLD_SIGNATURES')
        if self.sign_data:
            self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                           self.config.get("PI_AUDIT_KEY_PRIVATE"))
            self.sign_object = Sign(self.private, self.public)
        # Read column_length from the config file
        config_column_length = self.config.get("PI_AUDIT_SQL_COLUMN_LENGTH",
                                               {})
        # fill the missing parts with the default from the models
        self.custom_column_length = {
            k:
            (v if k not in config_column_length else config_column_length[k])
            for k, v in column_length.items()
        }
        # We can use "sqlaudit" as the key because the SQLAudit connection
        # string is fixed for a running privacyIDEA instance.
        # In other words, we will not run into any problems with changing connect strings.
        self.engine = get_engine(self.name, self._create_engine)
        # create a configured "Session" class. ``scoped_session`` is not
        # necessary because we do not share session objects among threads.
        # We use it anyway as a safety measure.
        Session = scoped_session(sessionmaker(bind=self.engine))
        self.session = Session()
        # Ensure that the connection gets returned to the pool when the request has
        # been handled. This may close an already-closed session, but this is not a problem.
        register_finalizer(self.session.close)
        self.session._model_changes = {}

    def _create_engine(self):
        """
        :return: a new SQLAlchemy engine connecting to the database specified in PI_AUDIT_SQL_URI.
        """
        # an Engine, which the Session will use for connection
        # resources
        connect_string = self.config.get(
            "PI_AUDIT_SQL_URI", self.config.get("SQLALCHEMY_DATABASE_URI"))
        log.debug("using the connect string {0!s}".format(
            censor_connect_string(connect_string)))
        try:
            pool_size = self.config.get("PI_AUDIT_POOL_SIZE", 20)
            engine = create_engine(connect_string,
                                   pool_size=pool_size,
                                   pool_recycle=self.config.get(
                                       "PI_AUDIT_POOL_RECYCLE", 600))
            log.debug("Using SQL pool size of {}".format(pool_size))
        except TypeError:
            # SQLite does not support pool_size
            engine = create_engine(connect_string)
            log.debug("Using no SQL pool_size.")
        return engine

    def _truncate_data(self):
        """
        Truncate self.audit_data according to the self.custom_column_length.
        :return: None
        """
        for column, l in self.custom_column_length.items():
            if column in self.audit_data:
                data = self.audit_data[column]
                if isinstance(data, string_types):
                    if column == "policies":
                        # The policies column is shortend per comma entry
                        data = truncate_comma_list(data, l)
                    else:
                        data = data[:l]
                self.audit_data[column] = data

    @staticmethod
    def _create_filter(param, timelimit=None):
        """
        create a filter condition for the logentry
        """
        conditions = []
        param = param or {}
        for search_key in param.keys():
            search_value = param.get(search_key)
            if search_key == "allowed_audit_realm":
                # Add each realm in the allowed_audit_realm list to the
                # search condition
                realm_conditions = []
                for realm in search_value:
                    realm_conditions.append(LogEntry.realm == realm)
                filter_realm = or_(*realm_conditions)
                conditions.append(filter_realm)
            # We do not search if the search value only consists of '*'
            elif search_value.strip() != '' and search_value.strip('*') != '':
                try:
                    if search_key == "success":
                        # "success" is the only integer.
                        search_value = search_value.strip("*")
                        conditions.append(
                            getattr(LogEntry, search_key) == int(search_value))
                    else:
                        # All other keys are compared as strings
                        column = getattr(LogEntry, search_key)
                        if search_key == "date":
                            # but we cast "date" to a string first (required on postgresql)
                            column = cast(column, String)
                        search_value = search_value.replace('*', '%')
                        if '%' in search_value:
                            conditions.append(column.like(search_value))
                        else:
                            conditions.append(column == search_value)
                except Exception as exx:
                    # The search_key was no search key but some
                    # bullshit stuff in the param
                    log.debug("Not a valid searchkey: {0!s}".format(exx))

        if timelimit:
            conditions.append(
                LogEntry.date >= datetime.datetime.now() - timelimit)
        # Combine them with or to a BooleanClauseList
        filter_condition = and_(*conditions)
        return filter_condition

    def get_total(self, param, AND=True, display_error=True, timelimit=None):
        """
        This method returns the total number of audit entries
        in the audit store
        """
        count = 0
        # if param contains search filters, we build the search filter
        # to only return the number of those entries
        filter_condition = self._create_filter(param, timelimit=timelimit)

        try:
            count = self.session.query(LogEntry.id) \
                .filter(filter_condition) \
                .count()
        finally:
            self.session.close()
        return count

    def finalize_log(self):
        """
        This method is used to log the data.
        It should hash the data and do a hash chain and sign the data
        """
        try:
            self.audit_data["policies"] = ",".join(
                self.audit_data.get("policies", []))
            if self.config.get("PI_AUDIT_SQL_TRUNCATE"):
                self._truncate_data()
            if "tokentype" in self.audit_data:
                log.warning(
                    "We have a wrong 'tokentype' key. This should not happen. Fix it!. "
                    "Error occurs in action: {0!r}.".format(
                        self.audit_data.get("action")))
                if not "token_type" in self.audit_data:
                    self.audit_data["token_type"] = self.audit_data.get(
                        "tokentype")
            le = LogEntry(
                action=self.audit_data.get("action"),
                success=int(self.audit_data.get("success", 0)),
                serial=self.audit_data.get("serial"),
                token_type=self.audit_data.get("token_type"),
                user=self.audit_data.get("user"),
                realm=self.audit_data.get("realm"),
                resolver=self.audit_data.get("resolver"),
                administrator=self.audit_data.get("administrator"),
                action_detail=self.audit_data.get("action_detail"),
                info=self.audit_data.get("info"),
                privacyidea_server=self.audit_data.get("privacyidea_server"),
                client=self.audit_data.get("client", ""),
                loglevel=self.audit_data.get("log_level"),
                clearance_level=self.audit_data.get("clearance_level"),
                policies=self.audit_data.get("policies"))
            self.session.add(le)
            self.session.commit()
            # Add the signature
            if self.sign_data and self.sign_object:
                s = self._log_to_string(le)
                sign = self.sign_object.sign(s)
                le.signature = sign
                self.session.merge(le)
                self.session.commit()
        except Exception as exx:  # pragma: no cover
            # in case of a Unicode Error in _log_to_string() we won't have
            # a signature, but the log entry is available
            log.error("exception {0!r}".format(exx))
            log.error("DATA: {0!s}".format(self.audit_data))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()

        finally:
            self.session.close()
            # clear the audit data
            self.audit_data = {}

    def _check_missing(self, audit_id):
        """
        Check if the audit log contains the entries before and after
        the given id.
        
        TODO: We can not check at the moment if the first or the last entries
              were deleted. If we want to do this, we need to store some signed
              meta information:
              1. Which one was the first entry. (use initialize_log)
              2. Which one was the last entry.
        """
        res = False
        try:
            id_bef = self.session.query(
                LogEntry.id).filter(LogEntry.id == int(audit_id) - 1).count()
            id_aft = self.session.query(
                LogEntry.id).filter(LogEntry.id == int(audit_id) + 1).count()
            # We may not do a commit!
            # self.session.commit()
            if id_bef and id_aft:
                res = True
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            # self.session.rollback()
        finally:
            # self.session.close()
            pass

        return res

    @staticmethod
    def _log_to_string(le):
        """
        This function creates a string from the logentry so
        that this string can be signed.
        
        Note: Not all elements of the LogEntry are used to generate the
        string (the Signature is not!), otherwise we could have used pickle

        :param le: LogEntry object containing the data
        :type le: LogEntry
        :rtype str
        """
        s = u"id=%s,date=%s,action=%s,succ=%s,serial=%s,t=%s,u=%s,r=%s,adm=%s," \
            u"ad=%s,i=%s,ps=%s,c=%s,l=%s,cl=%s" % (le.id,
                                                   le.date,
                                                   le.action,
                                                   le.success,
                                                   le.serial,
                                                   le.token_type,
                                                   le.user,
                                                   le.realm,
                                                   le.administrator,
                                                   le.action_detail,
                                                   le.info,
                                                   le.privacyidea_server,
                                                   le.client,
                                                   le.loglevel,
                                                   le.clearance_level)
        return s

    @staticmethod
    def _get_logentry_attribute(key):
        """
        This function returns the LogEntry attribute for the given key value
        """
        sortname = {
            'number': LogEntry.id,
            'action': LogEntry.action,
            'success': LogEntry.success,
            'serial': LogEntry.serial,
            'date': LogEntry.date,
            'token_type': LogEntry.token_type,
            'user': LogEntry.user,
            'realm': LogEntry.realm,
            'administrator': LogEntry.administrator,
            'action_detail': LogEntry.action_detail,
            'info': LogEntry.info,
            'privacyidea_server': LogEntry.privacyidea_server,
            'client': LogEntry.client,
            'loglevel': LogEntry.loglevel,
            'policies': LogEntry.policies,
            'clearance_level': LogEntry.clearance_level
        }
        return sortname.get(key)

    def csv_generator(self, param=None, user=None, timelimit=None):
        """
        Returns the audit log as csv file.
        :param timelimit: Limit the number of dumped entries by time
        :type timelimit: datetime.timedelta
        :param param: The request parameters
        :type param: dict
        :param user: The user, who issued the request
        :return: None. It yields results as a generator
        """
        filter_condition = self._create_filter(param, timelimit=timelimit)
        logentries = self.session.query(LogEntry).filter(
            filter_condition).all()

        for le in logentries:
            audit_dict = self.audit_entry_to_dict(le)
            yield u",".join(
                [u"'{0!s}'".format(x) for x in audit_dict.values()]) + u"\n"

    def get_count(self, search_dict, timedelta=None, success=None):
        # create filter condition
        filter_condition = self._create_filter(search_dict)
        conditions = [filter_condition]
        if success is not None:
            conditions.append(LogEntry.success == success)

        if timedelta is not None:
            conditions.append(
                LogEntry.date >= datetime.datetime.now() - timedelta)

        filter_condition = and_(*conditions)
        log_count = self.session.query(LogEntry).filter(
            filter_condition).count()

        return log_count

    def search(self,
               search_dict,
               page_size=15,
               page=1,
               sortorder="asc",
               timelimit=None):
        """
        This function returns the audit log as a Pagination object.

        :param timelimit: Only audit entries newer than this timedelta will
            be searched
        :type timelimit: timedelta
        """
        page = int(page)
        page_size = int(page_size)
        paging_object = Paginate()
        paging_object.page = page
        paging_object.total = self.get_total(search_dict, timelimit=timelimit)
        if page > 1:
            paging_object.prev = page - 1
        if paging_object.total > (page_size * page):
            paging_object.next = page + 1

        auditIter = self.search_query(search_dict,
                                      page_size=page_size,
                                      page=page,
                                      sortorder=sortorder,
                                      timelimit=timelimit)
        while True:
            try:
                le = next(auditIter)
                # Fill the list
                paging_object.auditdata.append(self.audit_entry_to_dict(le))
            except StopIteration as _e:
                log.debug("Interation stopped.")
                break
            except UnicodeDecodeError as _e:
                # Unfortunately if one of the audit entries fails, the whole
                # iteration stops and we return an empty paging_object.
                # TODO: Check if we can return the other entries in the auditIter
                #  or some meaningful error for the user.
                log.warning('Could not read audit log entry! '
                            'Possible database encoding mismatch.')
                log.debug("{0!s}".format(traceback.format_exc()))

        return paging_object

    def search_query(self,
                     search_dict,
                     page_size=15,
                     page=1,
                     sortorder="asc",
                     sortname="number",
                     timelimit=None):
        """
        This function returns the audit log as an iterator on the result

        :param timelimit: Only audit entries newer than this timedelta will
            be searched
        :type timelimit: timedelta
        """
        logentries = None
        try:
            limit = int(page_size)
            offset = (int(page) - 1) * limit

            # create filter condition
            filter_condition = self._create_filter(search_dict,
                                                   timelimit=timelimit)

            if sortorder == "desc":
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                        desc(self._get_logentry_attribute("number"))).limit(
                            limit).offset(offset)
            else:
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                        asc(self._get_logentry_attribute("number"))).limit(
                            limit).offset(offset)

        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()
        finally:
            self.session.close()

        if logentries is None:
            return iter([])
        else:
            return iter(logentries)

    def clear(self):
        """
        Deletes all entries in the database table.
        This is only used for test cases!
        :return:
        """
        self.session.query(LogEntry).delete()
        self.session.commit()

    def audit_entry_to_dict(self, audit_entry):
        sig = None
        if self.sign_data:
            try:
                sig = self.sign_object.verify(self._log_to_string(audit_entry),
                                              audit_entry.signature,
                                              self.verify_old_sig)
            except UnicodeDecodeError as _e:
                # TODO: Unless we trace and eliminate the broken unicode in the
                #  audit_entry, we will get issues when packing the response.
                log.warning(
                    'Could not verify log entry! We get invalid values '
                    'from the database, please check the encoding.')
                log.debug('{0!s}'.format(traceback.format_exc()))

        is_not_missing = self._check_missing(int(audit_entry.id))
        # is_not_missing = True
        audit_dict = OrderedDict()
        audit_dict['number'] = audit_entry.id
        audit_dict['date'] = audit_entry.date.isoformat()
        audit_dict['sig_check'] = "OK" if sig else "FAIL"
        audit_dict['missing_line'] = "OK" if is_not_missing else "FAIL"
        audit_dict['action'] = audit_entry.action
        audit_dict['success'] = audit_entry.success
        audit_dict['serial'] = audit_entry.serial
        audit_dict['token_type'] = audit_entry.token_type
        audit_dict['user'] = audit_entry.user
        audit_dict['realm'] = audit_entry.realm
        audit_dict['resolver'] = audit_entry.resolver
        audit_dict['administrator'] = audit_entry.administrator
        audit_dict['action_detail'] = audit_entry.action_detail
        audit_dict['info'] = audit_entry.info
        audit_dict['privacyidea_server'] = audit_entry.privacyidea_server
        audit_dict['policies'] = audit_entry.policies
        audit_dict['client'] = audit_entry.client
        audit_dict['log_level'] = audit_entry.loglevel
        audit_dict['clearance_level'] = audit_entry.clearance_level
        return audit_dict
Beispiel #18
0
class Audit(AuditBase):
    """
    This is the SQLAudit module, which writes the audit entries
    to an SQL database table.
    It requires the configuration parameters.
    PI_AUDIT_SQL_URI
    """
    
    def __init__(self, config=None):
        self.name = "sqlaudit"
        self.config = config or {}
        self.audit_data = {}
        self.sign_object = None
        self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                       self.config.get("PI_AUDIT_KEY_PRIVATE"))
        
        # an Engine, which the Session will use for connection
        # resources
        connect_string = self.config.get("PI_AUDIT_SQL_URI",
                                        self.config.get(
                                            "SQLALCHEMY_DATABASE_URI"))
        log.debug("using the connect string {0!s}".format(connect_string))
        self.engine = create_engine(connect_string)

        # create a configured "Session" class
        Session = sessionmaker(bind=self.engine)

        # create a Session
        self.session = Session()
        self.session._model_changes = {}
        try:
            metadata.create_all(self.engine)
        except OperationalError as exx:  # pragma: no cover
            log.info("{0!r}".format(exx))

    @staticmethod
    def _create_filter(param):
        """
        create a filter condition for the logentry
        """
        conditions = []
        for search_key in param.keys():
            search_value = param.get(search_key)
            # We do not search if the search value only consists of '*'
            if search_value.strip() != '' and search_value.strip('*') != '':
                try:
                    search_value = search_value.replace('*', '%')
                    if '%' in search_value:
                        conditions.append(getattr(LogEntry,
                                                  search_key).like(search_value))
                    else:
                        conditions.append(getattr(LogEntry, search_key) ==
                                          search_value)
                except Exception as exx:
                    # The search_key was no search key but some
                    # bullshit stuff in the param
                    log.debug("Not a valid searchkey: {0!s}".format(exx))
        # Combine them with or to a BooleanClauseList
        filter_condition = and_(*conditions)
        return filter_condition

    def get_total(self, param, AND=True, display_error=True):
        """
        This method returns the total number of audit entries
        in the audit store
        """
        count = 0

        # if param contains search filters, we build the search filter
        # to only return the number of those entries
        filter_condition = self._create_filter(param)
        
        try:
            count = self.session.query(LogEntry.id)\
                .filter(filter_condition)\
                .count()
        finally:
            self.session.close()
        return count

    def log(self, param):
        """
        Add new log details in param to the internal log data self.audit_data.

        :param param: Log data that is to be added
        :type param: dict
        :return: None
        """
        for k, v in param.items():
            self.audit_data[k] = v

    def add_to_log(self, param):
        """
        Add new text to an existing log entry
        :param param:
        :return:
        """
        for k, v in param.items():
            self.audit_data[k] += v

    def finalize_log(self):
        """
        This method is used to log the data.
        It should hash the data and do a hash chain and sign the data
        """
        try:
            le = LogEntry(action=self.audit_data.get("action"),
                          success=int(self.audit_data.get("success", 0)),
                          serial=self.audit_data.get("serial"),
                          token_type=self.audit_data.get("token_type"),
                          user=self.audit_data.get("user"),
                          realm=self.audit_data.get("realm"),
                          administrator=self.audit_data.get("administrator"),
                          action_detail=self.audit_data.get("action_detail"),
                          info=self.audit_data.get("info"),
                          privacyidea_server=self.audit_data.get("privacyidea_server"),
                          client=self.audit_data.get("client", ""),
                          loglevel=self.audit_data.get("log_level"),
                          clearance_level=self.audit_data.get("clearance_level")
                          )
            self.session.add(le)
            self.session.commit()
            # Add the signature
            if self.sign_object:
                s = self._log_to_string(le)
                sign = self.sign_object.sign(s)
                le.signature = sign
                self.session.merge(le)
                self.session.commit()
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.error("DATA: {0!s}".format(self.audit_data))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()

        finally:
            self.session.close()
            # clear the audit data
            self.audit_data = {}

    def read_keys(self, pub, priv):
        """
        Set the private and public key for the audit class. This is achieved by
        passing the entries.

        #priv = config.get("privacyideaAudit.key.private")
        #pub = config.get("privacyideaAudit.key.public")

        :param pub: Public key, used for verifying the signature
        :type pub: string with filename
        :param priv: Private key, used to sign the audit entry
        :type priv: string with filename
        :return: None
        """
        self.sign_object = Sign(priv, pub)

    def _check_missing(self, audit_id):
        """
        Check if the audit log contains the entries before and after
        the given id.
        
        TODO: We can not check at the moment if the first or the last entries
        were deleted. If we want to do this, we need to store some signed
        meta information
        1. Which one was the first entry. (use initialize_log)
        2. Which one was the last entry.
        """
        res = False
        try:
            id_bef = self.session.query(LogEntry.id
                                        ).filter(LogEntry.id ==
                                                 int(audit_id) - 1).count()
            id_aft = self.session.query(LogEntry.id
                                        ).filter(LogEntry.id ==
                                                 int(audit_id) + 1).count()
            # We may not do a commit!
            # self.session.commit()
            if id_bef and id_aft:
                res = True
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            # self.session.rollback()
        finally:
            # self.session.close()
            pass
            
        return res

    @staticmethod
    def _log_to_string(le):
        """
        This function creates a string from the logentry so
        that this string can be signed.
        
        Note: Not all elements of the LogEntry are used to generate the
        string (the Signature is not!), otherwise we could have used pickle
        """
        s = "id=%s,date=%s,action=%s,succ=%s,serial=%s,t=%s,u=%s,r=%s,adm=%s,"\
            "ad=%s,i=%s,ps=%s,c=%s,l=%s,cl=%s" % (le.id,
                                                  le.date,
                                                  le.action,
                                                  le.success,
                                                  le.serial,
                                                  le.token_type,
                                                  le.user,
                                                  le.realm,
                                                  le.administrator,
                                                  le.action_detail,
                                                  le.info,
                                                  le.privacyidea_server,
                                                  le.client,
                                                  le.loglevel,
                                                  le.clearance_level)
        return s

    @staticmethod
    def _get_logentry_attribute(key):
        """
        This function returns the LogEntry attribute for the given key value
        """
        sortname = {'number': LogEntry.id,
                    'action': LogEntry.action,
                    'success': LogEntry.success,
                    'serial': LogEntry.serial,
                    'date': LogEntry.date,
                    'token_type': LogEntry.token_type,
                    'user': LogEntry.user,
                    'realm': LogEntry.realm,
                    'administrator': LogEntry.administrator,
                    'action_detail': LogEntry.action_detail,
                    'info': LogEntry.info,
                    'privacyidea_server': LogEntry.privacyidea_server,
                    'client': LogEntry.client,
                    'loglevel': LogEntry.loglevel,
                    'clearance_level': LogEntry.clearance_level}
        return sortname.get(key)

    def csv_generator(self, param=None, user=None):
        """
        Returns the audit log as csv file.
        :param config: The current flask app configuration
        :type config: dict
        :param param: The request parameters
        :type param: dict
        :param user: The user, who issued the request
        :return: None. It yields results as a generator
        """
        logentries = self.session.query(LogEntry).all()

        for le in logentries:
            audit_dict = self.audit_entry_to_dict(le)
            audit_list = audit_dict.values()
            string_list = ["'{0!s}'".format(x) for x in audit_list]
            yield ",".join(string_list)+"\n"

    def get_count(self, search_dict, timedelta=None, success=None):
        # create filter condition
        filter_condition = self._create_filter(search_dict)
        conditions = [filter_condition]
        if success is not None:
            conditions.append(LogEntry.success == success)

        if timedelta is not None:
            conditions.append(LogEntry.date >= datetime.datetime.now() -
                              timedelta)

        filter_condition = and_(*conditions)
        log_count = self.session.query(LogEntry).filter(filter_condition).count()

        return log_count

    def search(self, search_dict, page_size=15, page=1, sortorder="asc"):
        """
        This function returns the audit log as a Pagination object.
        """
        page = int(page)
        page_size = int(page_size)
        paging_object = Paginate()
        paging_object.page = page
        paging_object.total = self.get_total(search_dict)
        if page > 1:
            paging_object.prev = page - 1
        if paging_object.total > (page_size * page):
            paging_object.next = page + 1

        auditIter = self.search_query(search_dict, page_size=page_size,
                                      page=page, sortorder=sortorder)
        try:
            le = auditIter.next()
            while le:
                # Fill the list
                paging_object.auditdata.append(self.audit_entry_to_dict(le))
                le = auditIter.next()
        except StopIteration:
            log.debug("Interation stopped.")

        return paging_object
        
    def search_query(self, search_dict, page_size=15, page=1, sortorder="asc",
                     sortname="number"):
        """
        This function returns the audit log as an iterator on the result
        """
        logentries = None
        try:
            limit = int(page_size)
            offset = (int(page) - 1) * limit
            
            # create filter condition
            filter_condition = self._create_filter(search_dict)

            if sortorder == "desc":
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                    desc(self._get_logentry_attribute("number"))).limit(
                    limit).offset(offset)
            else:
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                    asc(self._get_logentry_attribute("number"))).limit(
                    limit).offset(offset)
                                         
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()
        finally:
            self.session.close()

        if logentries is None:
            return iter([])
        else:
            return iter(logentries)

    def get_dataframe(self,
                      start_time=datetime.datetime.now()
                                 -datetime.timedelta(days=7),
                      end_time=datetime.datetime.now()):
        """
        The Audit module can handle its data the best. This function is used
        to return a pandas.dataframe with all audit data in the given time
        frame.

        This dataframe then can be used for extracting statistics.

        :param start_time: The start time of the data
        :type start_time: datetime
        :param end_time: The end time of the data
        :type end_time: datetime
        :return: Audit data
        :rtype: dataframe
        """
        if not PANDAS_READY:
            log.warning("If you want to use statistics, you need to install "
                        "python-pandas.")
            return None
        result = self.engine.execute("select * from %s where date "
                                     "> '%s' and date < "
                                     "'%s'"
                                     % (TABLE_NAME, start_time, end_time))
        rows = result.fetchall()
        df = DataFrame(rows, columns=result.keys())
        return df

    def clear(self):
        """
        Deletes all entries in the database table.
        This is only used for test cases!
        :return:
        """
        self.session.query(LogEntry).delete()
        self.session.commit()
    
    def audit_entry_to_dict(self, audit_entry):
        sig = self.sign_object.verify(self._log_to_string(audit_entry),
                                      audit_entry.signature)
        is_not_missing = self._check_missing(int(audit_entry.id))
        # is_not_missing = True
        audit_dict = {'number': audit_entry.id,
                      'date': audit_entry.date.isoformat(),
                      'sig_check': "OK" if sig else "FAIL",
                      'missing_line': "OK" if is_not_missing else "FAIL",
                      'action': audit_entry.action,
                      'success': audit_entry.success,
                      'serial': audit_entry.serial,
                      'token_type': audit_entry.token_type,
                      'user': audit_entry.user,
                      'realm': audit_entry.realm,
                      'administrator': audit_entry.administrator,
                      'action_detail': audit_entry.action_detail,
                      'info': audit_entry.info,
                      'privacyidea_server': audit_entry.privacyidea_server,
                      'client': audit_entry.client,
                      'log_level': audit_entry.loglevel,
                      'clearance_level': audit_entry.clearance_level
                      }
        return audit_dict
Beispiel #19
0
class Audit(AuditBase):
    """
    This is the SQLAudit module, which writes the audit entries
    to an SQL database table.
    It requires the configuration parameters.
    PI_AUDIT_SQL_URI
    """
    def __init__(self, config=None):
        self.name = "sqlaudit"
        self.config = config or {}
        self.audit_data = {}
        self.sign_object = None
        self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                       self.config.get("PI_AUDIT_KEY_PRIVATE"))

        # an Engine, which the Session will use for connection
        # resources
        connect_string = self.config.get(
            "PI_AUDIT_SQL_URI", self.config.get("SQLALCHEMY_DATABASE_URI"))
        log.debug("using the connect string {0!s}".format(connect_string))
        try:
            pool_size = self.config.get("PI_AUDIT_POOL_SIZE", 20)
            self.engine = create_engine(connect_string,
                                        pool_size=pool_size,
                                        pool_recycle=self.config.get(
                                            "PI_AUDIT_POOL_RECYCLE", 600))
            log.debug("Using SQL pool_size of {0!s}".format(pool_size))
        except TypeError:
            # SQLite does not support pool_size
            self.engine = create_engine(connect_string)
            log.debug("Using no SQL pool_size.")

        # create a configured "Session" class
        Session = sessionmaker(bind=self.engine)

        # create a Session
        self.session = Session()
        self.session._model_changes = {}

    def _truncate_data(self):
        """
        Truncate self.audit_data according to the column_length.
        :return: None
        """
        for column, l in column_length.iteritems():
            if column in self.audit_data:
                self.audit_data[column] = self.audit_data[column][:l]

    @staticmethod
    def _create_filter(param, timelimit=None):
        """
        create a filter condition for the logentry
        """
        conditions = []
        param = param or {}
        for search_key in param.keys():
            search_value = param.get(search_key)
            # We do not search if the search value only consists of '*'
            if search_value.strip() != '' and search_value.strip('*') != '':
                try:
                    if search_key == "success":
                        # "success" is the only integer.
                        search_value = search_value.strip("*")
                        conditions.append(
                            getattr(LogEntry, search_key) == int(search_value))
                    else:
                        # All other keys are strings
                        search_value = search_value.replace('*', '%')
                        if '%' in search_value:
                            conditions.append(
                                getattr(LogEntry,
                                        search_key).like(search_value))
                        else:
                            conditions.append(
                                getattr(LogEntry, search_key) == search_value)
                except Exception as exx:
                    # The search_key was no search key but some
                    # bullshit stuff in the param
                    log.debug("Not a valid searchkey: {0!s}".format(exx))

        if timelimit:
            conditions.append(
                LogEntry.date >= datetime.datetime.now() - timelimit)
        # Combine them with or to a BooleanClauseList
        filter_condition = and_(*conditions)
        return filter_condition

    def get_total(self, param, AND=True, display_error=True, timelimit=None):
        """
        This method returns the total number of audit entries
        in the audit store
        """
        count = 0
        # if param contains search filters, we build the search filter
        # to only return the number of those entries
        filter_condition = self._create_filter(param, timelimit=timelimit)

        try:
            count = self.session.query(LogEntry.id)\
                .filter(filter_condition)\
                .count()
        finally:
            self.session.close()
        return count

    def log(self, param):
        """
        Add new log details in param to the internal log data self.audit_data.

        :param param: Log data that is to be added
        :type param: dict
        :return: None
        """
        for k, v in param.items():
            self.audit_data[k] = v

    def add_to_log(self, param):
        """
        Add new text to an existing log entry
        :param param:
        :return:
        """
        for k, v in param.items():
            self.audit_data[k] += v

    def finalize_log(self):
        """
        This method is used to log the data.
        It should hash the data and do a hash chain and sign the data
        """
        try:
            if self.config.get("PI_AUDIT_SQL_TRUNCATE"):
                self._truncate_data()
            le = LogEntry(
                action=self.audit_data.get("action"),
                success=int(self.audit_data.get("success", 0)),
                serial=self.audit_data.get("serial"),
                token_type=self.audit_data.get("token_type"),
                user=self.audit_data.get("user"),
                realm=self.audit_data.get("realm"),
                resolver=self.audit_data.get("resolver"),
                administrator=self.audit_data.get("administrator"),
                action_detail=self.audit_data.get("action_detail"),
                info=self.audit_data.get("info"),
                privacyidea_server=self.audit_data.get("privacyidea_server"),
                client=self.audit_data.get("client", ""),
                loglevel=self.audit_data.get("log_level"),
                clearance_level=self.audit_data.get("clearance_level"))
            self.session.add(le)
            self.session.commit()
            # Add the signature
            if self.sign_object:
                s = self._log_to_string(le)
                sign = self.sign_object.sign(s)
                le.signature = sign
                self.session.merge(le)
                self.session.commit()
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.error("DATA: {0!s}".format(self.audit_data))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()

        finally:
            self.session.close()
            # clear the audit data
            self.audit_data = {}

    def read_keys(self, pub, priv):
        """
        Set the private and public key for the audit class. This is achieved by
        passing the entries.

        #priv = config.get("privacyideaAudit.key.private")
        #pub = config.get("privacyideaAudit.key.public")

        :param pub: Public key, used for verifying the signature
        :type pub: string with filename
        :param priv: Private key, used to sign the audit entry
        :type priv: string with filename
        :return: None
        """
        self.sign_object = Sign(priv, pub)

    def _check_missing(self, audit_id):
        """
        Check if the audit log contains the entries before and after
        the given id.
        
        TODO: We can not check at the moment if the first or the last entries
        were deleted. If we want to do this, we need to store some signed
        meta information
        1. Which one was the first entry. (use initialize_log)
        2. Which one was the last entry.
        """
        res = False
        try:
            id_bef = self.session.query(
                LogEntry.id).filter(LogEntry.id == int(audit_id) - 1).count()
            id_aft = self.session.query(
                LogEntry.id).filter(LogEntry.id == int(audit_id) + 1).count()
            # We may not do a commit!
            # self.session.commit()
            if id_bef and id_aft:
                res = True
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            # self.session.rollback()
        finally:
            # self.session.close()
            pass

        return res

    @staticmethod
    def _log_to_string(le):
        """
        This function creates a string from the logentry so
        that this string can be signed.
        
        Note: Not all elements of the LogEntry are used to generate the
        string (the Signature is not!), otherwise we could have used pickle
        """
        s = "id=%s,date=%s,action=%s,succ=%s,serial=%s,t=%s,u=%s,r=%s,adm=%s,"\
            "ad=%s,i=%s,ps=%s,c=%s,l=%s,cl=%s" % (le.id,
                                                  le.date,
                                                  le.action,
                                                  le.success,
                                                  le.serial,
                                                  le.token_type,
                                                  le.user,
                                                  le.realm,
                                                  le.administrator,
                                                  le.action_detail,
                                                  le.info,
                                                  le.privacyidea_server,
                                                  le.client,
                                                  le.loglevel,
                                                  le.clearance_level)
        return s

    @staticmethod
    def _get_logentry_attribute(key):
        """
        This function returns the LogEntry attribute for the given key value
        """
        sortname = {
            'number': LogEntry.id,
            'action': LogEntry.action,
            'success': LogEntry.success,
            'serial': LogEntry.serial,
            'date': LogEntry.date,
            'token_type': LogEntry.token_type,
            'user': LogEntry.user,
            'realm': LogEntry.realm,
            'administrator': LogEntry.administrator,
            'action_detail': LogEntry.action_detail,
            'info': LogEntry.info,
            'privacyidea_server': LogEntry.privacyidea_server,
            'client': LogEntry.client,
            'loglevel': LogEntry.loglevel,
            'clearance_level': LogEntry.clearance_level
        }
        return sortname.get(key)

    def csv_generator(self, param=None, user=None, timelimit=None):
        """
        Returns the audit log as csv file.
        :param config: The current flask app configuration
        :type config: dict
        :param param: The request parameters
        :type param: dict
        :param user: The user, who issued the request
        :return: None. It yields results as a generator
        """
        filter_condition = self._create_filter(param, timelimit=timelimit)
        logentries = self.session.query(LogEntry).filter(
            filter_condition).all()

        for le in logentries:
            audit_dict = self.audit_entry_to_dict(le)
            audit_list = audit_dict.values()
            string_list = ["'{0!s}'".format(x) for x in audit_list]
            yield ",".join(string_list) + "\n"

    def get_count(self, search_dict, timedelta=None, success=None):
        # create filter condition
        filter_condition = self._create_filter(search_dict)
        conditions = [filter_condition]
        if success is not None:
            conditions.append(LogEntry.success == success)

        if timedelta is not None:
            conditions.append(
                LogEntry.date >= datetime.datetime.now() - timedelta)

        filter_condition = and_(*conditions)
        log_count = self.session.query(LogEntry).filter(
            filter_condition).count()

        return log_count

    def search(self,
               search_dict,
               page_size=15,
               page=1,
               sortorder="asc",
               timelimit=None):
        """
        This function returns the audit log as a Pagination object.

        :param timelimit: Only audit entries newer than this timedelta will
            be searched
        :type timelimit: timedelta
        """
        page = int(page)
        page_size = int(page_size)
        paging_object = Paginate()
        paging_object.page = page
        paging_object.total = self.get_total(search_dict, timelimit=timelimit)
        if page > 1:
            paging_object.prev = page - 1
        if paging_object.total > (page_size * page):
            paging_object.next = page + 1

        auditIter = self.search_query(search_dict,
                                      page_size=page_size,
                                      page=page,
                                      sortorder=sortorder,
                                      timelimit=timelimit)
        try:
            le = auditIter.next()
            while le:
                # Fill the list
                paging_object.auditdata.append(self.audit_entry_to_dict(le))
                le = auditIter.next()
        except StopIteration:
            log.debug("Interation stopped.")

        return paging_object

    def search_query(self,
                     search_dict,
                     page_size=15,
                     page=1,
                     sortorder="asc",
                     sortname="number",
                     timelimit=None):
        """
        This function returns the audit log as an iterator on the result

        :param timelimit: Only audit entries newer than this timedelta will
            be searched
        :type timelimit: timedelta
        """
        logentries = None
        try:
            limit = int(page_size)
            offset = (int(page) - 1) * limit

            # create filter condition
            filter_condition = self._create_filter(search_dict,
                                                   timelimit=timelimit)

            if sortorder == "desc":
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                        desc(self._get_logentry_attribute("number"))).limit(
                            limit).offset(offset)
            else:
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                        asc(self._get_logentry_attribute("number"))).limit(
                            limit).offset(offset)

        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()
        finally:
            self.session.close()

        if logentries is None:
            return iter([])
        else:
            return iter(logentries)

    def get_dataframe(self,
                      start_time=datetime.datetime.now() -
                      datetime.timedelta(days=7),
                      end_time=datetime.datetime.now()):
        """
        The Audit module can handle its data the best. This function is used
        to return a pandas.dataframe with all audit data in the given time
        frame.

        This dataframe then can be used for extracting statistics.

        :param start_time: The start time of the data
        :type start_time: datetime
        :param end_time: The end time of the data
        :type end_time: datetime
        :return: Audit data
        :rtype: dataframe
        """
        if not PANDAS_READY:
            log.warning("If you want to use statistics, you need to install "
                        "python-pandas.")
            return None

        q = self.session.query(LogEntry)\
            .filter(LogEntry.date > start_time,
                    LogEntry.date < end_time)
        rows = q.all()
        rows = [r.__dict__ for r in rows]
        df = DataFrame(rows)
        return df

    def clear(self):
        """
        Deletes all entries in the database table.
        This is only used for test cases!
        :return:
        """
        self.session.query(LogEntry).delete()
        self.session.commit()

    def audit_entry_to_dict(self, audit_entry):
        sig = self.sign_object.verify(self._log_to_string(audit_entry),
                                      audit_entry.signature)
        is_not_missing = self._check_missing(int(audit_entry.id))
        # is_not_missing = True
        audit_dict = {
            'number': audit_entry.id,
            'date': audit_entry.date.isoformat(),
            'sig_check': "OK" if sig else "FAIL",
            'missing_line': "OK" if is_not_missing else "FAIL",
            'action': audit_entry.action,
            'success': audit_entry.success,
            'serial': audit_entry.serial,
            'token_type': audit_entry.token_type,
            'user': audit_entry.user,
            'realm': audit_entry.realm,
            'resolver': audit_entry.resolver,
            'administrator': audit_entry.administrator,
            'action_detail': audit_entry.action_detail,
            'info': audit_entry.info,
            'privacyidea_server': audit_entry.privacyidea_server,
            'client': audit_entry.client,
            'log_level': audit_entry.loglevel,
            'clearance_level': audit_entry.clearance_level
        }
        return audit_dict
Beispiel #20
0
class Audit(AuditBase):
    """
    This is the SQLAudit module, which writes the audit entries
    to an SQL database table.
    It requires the configuration parameters in pi.cfg:
    * PI_AUDIT_KEY_PUBLIC
    * PI_AUDIT_KEY_PRIVATE

    If you want to host the SQL Audit database in another DB than the
    token DB, you can use:
    * PI_AUDIT_SQL_URI

    It also takes the optional parameters:
    * PI_AUDIT_POOL_SIZE
    * PI_AUDIT_POOL_RECYCLE
    * PI_AUDIT_SQL_TRUNCATE
    * PI_AUDIT_NO_SIGN

    You can use PI_AUDIT_NO_SIGN = True to avoid signing of the audit log.
    """
    
    def __init__(self, config=None):
        self.name = "sqlaudit"
        self.config = config or {}
        self.audit_data = {}
        self.sign_data = not self.config.get("PI_AUDIT_NO_SIGN")
        self.sign_object = None
        if self.sign_data:
            self.read_keys(self.config.get("PI_AUDIT_KEY_PUBLIC"),
                           self.config.get("PI_AUDIT_KEY_PRIVATE"))

        # We can use "sqlaudit" as the key because the SQLAudit connection
        # string is fixed for a running privacyIDEA instance.
        # In other words, we will not run into any problems with changing connect strings.
        self.engine = get_engine(self.name, self._create_engine)
        # create a configured "Session" class. ``scoped_session`` is not
        # necessary because we do not share session objects among threads.
        # We use it anyway as a safety measure.
        Session = scoped_session(sessionmaker(bind=self.engine))
        self.session = Session()
        # Ensure that the connection gets returned to the pool when the request has
        # been handled. This may close an already-closed session, but this is not a problem.
        register_finalizer(self.session.close)
        self.session._model_changes = {}

    def _create_engine(self):
        """
        :return: a new SQLAlchemy engine connecting to the database specified in PI_AUDIT_SQL_URI.
        """
        # an Engine, which the Session will use for connection
        # resources
        connect_string = self.config.get("PI_AUDIT_SQL_URI", self.config.get(
            "SQLALCHEMY_DATABASE_URI"))
        log.debug("using the connect string {0!s}".format(censor_connect_string(connect_string)))
        try:
            pool_size = self.config.get("PI_AUDIT_POOL_SIZE", 20)
            engine = create_engine(
                connect_string,
                pool_size=pool_size,
                pool_recycle=self.config.get("PI_AUDIT_POOL_RECYCLE", 600))
            log.debug("Using SQL pool size of {}".format(pool_size))
        except TypeError:
            # SQLite does not support pool_size
            engine = create_engine(connect_string)
            log.debug("Using no SQL pool_size.")
        return engine

    def _truncate_data(self):
        """
        Truncate self.audit_data according to the column_length.
        :return: None
        """
        for column, l in column_length.items():
            if column in self.audit_data:
                data = self.audit_data[column]
                if isinstance(data, string_types):
                    if column == "policies":
                        # The policies column is shortend per comma entry
                        data = truncate_comma_list(data, l)
                    else:
                        data = data[:l]
                self.audit_data[column] = data

    @staticmethod
    def _create_filter(param, timelimit=None):
        """
        create a filter condition for the logentry
        """
        conditions = []
        param = param or {}
        for search_key in param.keys():
            search_value = param.get(search_key)
            if search_key == "allowed_audit_realm":
                # Add each realm in the allowed_audit_realm list to the
                # search condition
                realm_conditions = []
                for realm in search_value:
                    realm_conditions.append(LogEntry.realm == realm)
                filter_realm = or_(*realm_conditions)
                conditions.append(filter_realm)
            # We do not search if the search value only consists of '*'
            elif search_value.strip() != '' and search_value.strip('*') != '':
                try:
                    if search_key == "success":
                        # "success" is the only integer.
                        search_value = search_value.strip("*")
                        conditions.append(getattr(LogEntry, search_key) ==
                                          int(search_value))
                    else:
                        # All other keys are compared as strings
                        column = getattr(LogEntry, search_key)
                        if search_key == "date":
                            # but we cast "date" to a string first (required on postgresql)
                            column = cast(column, String)
                        search_value = search_value.replace('*', '%')
                        if '%' in search_value:
                            conditions.append(column.like(search_value))
                        else:
                            conditions.append(column == search_value)
                except Exception as exx:
                    # The search_key was no search key but some
                    # bullshit stuff in the param
                    log.debug("Not a valid searchkey: {0!s}".format(exx))

        if timelimit:
            conditions.append(LogEntry.date >= datetime.datetime.now() -
                              timelimit)
        # Combine them with or to a BooleanClauseList
        filter_condition = and_(*conditions)
        return filter_condition

    def get_total(self, param, AND=True, display_error=True, timelimit=None):
        """
        This method returns the total number of audit entries
        in the audit store
        """
        count = 0
        # if param contains search filters, we build the search filter
        # to only return the number of those entries
        filter_condition = self._create_filter(param, timelimit=timelimit)
        
        try:
            count = self.session.query(LogEntry.id)\
                .filter(filter_condition)\
                .count()
        finally:
            self.session.close()
        return count

    def finalize_log(self):
        """
        This method is used to log the data.
        It should hash the data and do a hash chain and sign the data
        """
        try:
            self.audit_data["policies"] = ",".join(self.audit_data.get("policies",[]))
            if self.config.get("PI_AUDIT_SQL_TRUNCATE"):
                self._truncate_data()
            le = LogEntry(action=self.audit_data.get("action"),
                          success=int(self.audit_data.get("success", 0)),
                          serial=self.audit_data.get("serial"),
                          token_type=self.audit_data.get("token_type"),
                          user=self.audit_data.get("user"),
                          realm=self.audit_data.get("realm"),
                          resolver=self.audit_data.get("resolver"),
                          administrator=self.audit_data.get("administrator"),
                          action_detail=self.audit_data.get("action_detail"),
                          info=self.audit_data.get("info"),
                          privacyidea_server=self.audit_data.get("privacyidea_server"),
                          client=self.audit_data.get("client", ""),
                          loglevel=self.audit_data.get("log_level"),
                          clearance_level=self.audit_data.get("clearance_level"),
                          policies=self.audit_data.get("policies")
                          )
            self.session.add(le)
            self.session.commit()
            # Add the signature
            if self.sign_data and self.sign_object:
                s = self._log_to_string(le)
                sign = self.sign_object.sign(s)
                le.signature = sign
                self.session.merge(le)
                self.session.commit()
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.error("DATA: {0!s}".format(self.audit_data))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()

        finally:
            self.session.close()
            # clear the audit data
            self.audit_data = {}

    def read_keys(self, pub, priv):
        """
        Set the private and public key for the audit class. This is achieved by
        passing the entries.

        #priv = config.get("privacyideaAudit.key.private")
        #pub = config.get("privacyideaAudit.key.public")

        :param pub: Public key, used for verifying the signature
        :type pub: string with filename
        :param priv: Private key, used to sign the audit entry
        :type priv: string with filename
        :return: None
        """
        self.sign_object = Sign(priv, pub)

    def _check_missing(self, audit_id):
        """
        Check if the audit log contains the entries before and after
        the given id.
        
        TODO: We can not check at the moment if the first or the last entries
        were deleted. If we want to do this, we need to store some signed
        meta information
        1. Which one was the first entry. (use initialize_log)
        2. Which one was the last entry.
        """
        res = False
        try:
            id_bef = self.session.query(LogEntry.id
                                        ).filter(LogEntry.id ==
                                                 int(audit_id) - 1).count()
            id_aft = self.session.query(LogEntry.id
                                        ).filter(LogEntry.id ==
                                                 int(audit_id) + 1).count()
            # We may not do a commit!
            # self.session.commit()
            if id_bef and id_aft:
                res = True
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            # self.session.rollback()
        finally:
            # self.session.close()
            pass
            
        return res

    @staticmethod
    def _log_to_string(le):
        """
        This function creates a string from the logentry so
        that this string can be signed.
        
        Note: Not all elements of the LogEntry are used to generate the
        string (the Signature is not!), otherwise we could have used pickle

        :param le: LogEntry object containing the data
        :type le: LogEntry
        :rtype str
        """
        s = u"id=%s,date=%s,action=%s,succ=%s,serial=%s,t=%s,u=%s,r=%s,adm=%s," \
            u"ad=%s,i=%s,ps=%s,c=%s,l=%s,cl=%s" % (le.id,
                                                   le.date,
                                                   le.action,
                                                   le.success,
                                                   le.serial,
                                                   le.token_type,
                                                   le.user,
                                                   le.realm,
                                                   le.administrator,
                                                   le.action_detail,
                                                   le.info,
                                                   le.privacyidea_server,
                                                   le.client,
                                                   le.loglevel,
                                                   le.clearance_level)
        return s

    @staticmethod
    def _get_logentry_attribute(key):
        """
        This function returns the LogEntry attribute for the given key value
        """
        sortname = {'number': LogEntry.id,
                    'action': LogEntry.action,
                    'success': LogEntry.success,
                    'serial': LogEntry.serial,
                    'date': LogEntry.date,
                    'token_type': LogEntry.token_type,
                    'user': LogEntry.user,
                    'realm': LogEntry.realm,
                    'administrator': LogEntry.administrator,
                    'action_detail': LogEntry.action_detail,
                    'info': LogEntry.info,
                    'privacyidea_server': LogEntry.privacyidea_server,
                    'client': LogEntry.client,
                    'loglevel': LogEntry.loglevel,
                    'policies': LogEntry.policies,
                    'clearance_level': LogEntry.clearance_level}
        return sortname.get(key)

    def csv_generator(self, param=None, user=None, timelimit=None):
        """
        Returns the audit log as csv file.
        :param timelimit: Limit the number of dumped entries by time
        :type timelimit: datetime.timedelta
        :param param: The request parameters
        :type param: dict
        :param user: The user, who issued the request
        :return: None. It yields results as a generator
        """
        filter_condition = self._create_filter(param,
                                               timelimit=timelimit)
        logentries = self.session.query(LogEntry).filter(filter_condition).all()

        for le in logentries:
            audit_dict = self.audit_entry_to_dict(le)
            audit_list = list(audit_dict.values())
            string_list = [u"'{0!s}'".format(x) for x in audit_list]
            yield ",".join(string_list)+"\n"

    def get_count(self, search_dict, timedelta=None, success=None):
        # create filter condition
        filter_condition = self._create_filter(search_dict)
        conditions = [filter_condition]
        if success is not None:
            conditions.append(LogEntry.success == success)

        if timedelta is not None:
            conditions.append(LogEntry.date >= datetime.datetime.now() -
                              timedelta)

        filter_condition = and_(*conditions)
        log_count = self.session.query(LogEntry).filter(filter_condition).count()

        return log_count

    def search(self, search_dict, page_size=15, page=1, sortorder="asc",
               timelimit=None):
        """
        This function returns the audit log as a Pagination object.

        :param timelimit: Only audit entries newer than this timedelta will
            be searched
        :type timelimit: timedelta
        """
        page = int(page)
        page_size = int(page_size)
        paging_object = Paginate()
        paging_object.page = page
        paging_object.total = self.get_total(search_dict, timelimit=timelimit)
        if page > 1:
            paging_object.prev = page - 1
        if paging_object.total > (page_size * page):
            paging_object.next = page + 1

        auditIter = self.search_query(search_dict, page_size=page_size,
                                      page=page, sortorder=sortorder,
                                      timelimit=timelimit)
        try:
            le = next(auditIter)
            while le:
                # Fill the list
                paging_object.auditdata.append(self.audit_entry_to_dict(le))
                le = next(auditIter)
        except StopIteration as _e:
            log.debug("Interation stopped.")

        return paging_object
        
    def search_query(self, search_dict, page_size=15, page=1, sortorder="asc",
                     sortname="number", timelimit=None):
        """
        This function returns the audit log as an iterator on the result

        :param timelimit: Only audit entries newer than this timedelta will
            be searched
        :type timelimit: timedelta
        """
        logentries = None
        try:
            limit = int(page_size)
            offset = (int(page) - 1) * limit
            
            # create filter condition
            filter_condition = self._create_filter(search_dict,
                                                   timelimit=timelimit)

            if sortorder == "desc":
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                    desc(self._get_logentry_attribute("number"))).limit(
                    limit).offset(offset)
            else:
                logentries = self.session.query(LogEntry).filter(
                    filter_condition).order_by(
                    asc(self._get_logentry_attribute("number"))).limit(
                    limit).offset(offset)
                                         
        except Exception as exx:  # pragma: no cover
            log.error("exception {0!r}".format(exx))
            log.debug("{0!s}".format(traceback.format_exc()))
            self.session.rollback()
        finally:
            self.session.close()

        if logentries is None:
            return iter([])
        else:
            return iter(logentries)

    def clear(self):
        """
        Deletes all entries in the database table.
        This is only used for test cases!
        :return:
        """
        self.session.query(LogEntry).delete()
        self.session.commit()
    
    def audit_entry_to_dict(self, audit_entry):
        sig = None
        if self.sign_data:
            sig = self.sign_object.verify(self._log_to_string(audit_entry),
                                          audit_entry.signature)

        is_not_missing = self._check_missing(int(audit_entry.id))
        # is_not_missing = True
        audit_dict = {'number': audit_entry.id,
                      'date': audit_entry.date.isoformat(),
                      'sig_check': "OK" if sig else "FAIL",
                      'missing_line': "OK" if is_not_missing else "FAIL",
                      'action': audit_entry.action,
                      'success': audit_entry.success,
                      'serial': audit_entry.serial,
                      'token_type': audit_entry.token_type,
                      'user': audit_entry.user,
                      'realm': audit_entry.realm,
                      'resolver': audit_entry.resolver,
                      'administrator': audit_entry.administrator,
                      'action_detail': audit_entry.action_detail,
                      'info': audit_entry.info,
                      'privacyidea_server': audit_entry.privacyidea_server,
                      'policies': audit_entry.policies,
                      'client': audit_entry.client,
                      'log_level': audit_entry.loglevel,
                      'clearance_level': audit_entry.clearance_level
                      }
        return audit_dict