コード例 #1
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def continueObjectEditing(self, user, ref):
        """
        Objects which have been opened but not edited for a certain amount of time are automatically closed by the backend.
        This command delays this behaviour by increasing the timeout.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        ================= ==========================

        """
        objdsc = self.__get_ref(ref)
        if not objdsc:
            raise ValueError(C.make_error("REFERENCE_NOT_FOUND", ref=ref))

        if not self.__check_user(ref, user):
            raise ValueError(C.make_error("NOT_OBJECT_OWNER"))

        objdsc['last_interaction'] = datetime.datetime.now()
        with make_session() as session:
            obj = session.query(OpenObject).filter(OpenObject.ref == ref).one()
            obj.last_interaction = objdsc['last_interaction']
            session.commit()

        if 'mark_for_deletion' in objdsc:
            # as this object has been marked for deletion, we have to run the garbage collection
            # to remove this mark now
            self.__gc()
コード例 #2
0
ファイル: main.py プロジェクト: GOsa3/gosa
 def getForemanReleaseName(self, operatingsystem_id):
     if operatingsystem_id is not None:
         with make_session() as session:
             res = session.query(Cache.data).filter(Cache.key == "foreman.operating_system.%s" % operatingsystem_id).one_or_none()
             if res is not None:
                 return res[0]["release_name"]
     return None
コード例 #3
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def checkObjectRef(self, user, session_id, ref):
        """
        Objects which have been opened but not edited for a certain amount of time are automatically closed by the backend.
        This command delays this behaviour by increasing the timeout.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        ================= ==========================

        ``Return``: boolean
        """
        objdsc = self.__get_ref(ref)
        if objdsc and objdsc['user'] == user:
            # update session-id, which might have changed and is needed to inform the user about object closing
            if objdsc['session_id'] != session_id:
                objdsc['session_id'] = session_id
                with make_session() as session:
                    obj = session.query(OpenObject).filter(OpenObject.ref == ref).one()
                    obj.session_id = session_id
                    session.commit()
            return True

        return False
コード例 #4
0
ファイル: methods.py プロジェクト: gonicus/gosa
    def getContainerTree(self, user, base, object_type=None):
        table = inspect(ObjectInfoIndex)
        o2 = aliased(ObjectInfoIndex)
        base = ObjectProxy.get_adjusted_dn(base, self.env.base)
        query = and_(getattr(ObjectInfoIndex, "_adjusted_parent_dn") == base, getattr(ObjectInfoIndex, "_type").in_(self.containers))
        count = func.count(getattr(o2, "_parent_dn"))
        parent_join_condition = getattr(o2, "_parent_dn") == getattr(ObjectInfoIndex, "dn")

        with make_session() as session:
            query_result = session.query(ObjectInfoIndex, count) \
                .outerjoin(o2, and_(getattr(o2, "_invisible").is_(False), parent_join_condition)) \
                .filter(query) \
                .group_by(*table.c)

        res = {}
        factory = ObjectFactory.getInstance()
        for item, children in query_result:
            self.update_res(res, item, user, 1)

            if item.dn in res:
                res[item.dn]['hasChildren'] = children > 0
                res[item.dn]['adjusted_dn'] = ObjectProxy.get_adjusted_dn(item.dn, self.env.base)
                if object_type is not None:
                    # check if object_type is allowed in this container
                    allowed = factory.getAllowedSubElementsForObject(res[item.dn]['tag'], includeInvisible=False)
                    if "*" in object_type:
                        # all allowed
                        res[item.dn]['allowed_move_target'] = True
                    elif isinstance(object_type, list):
                        res[item.dn]['allowed_move_target'] = len(set(object_type).intersection(allowed)) > 0
                    else:
                        res[item.dn]['allowed_move_target'] = object_type in allowed
        return res
コード例 #5
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def continueObjectEditing(self, user, ref):
        """
        Objects which have been opened but not edited for a certain amount of time are automatically closed by the backend.
        This command delays this behaviour by increasing the timeout.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        ================= ==========================

        """
        objdsc = self.__get_ref(ref)
        if not objdsc:
            raise ValueError(C.make_error("REFERENCE_NOT_FOUND", ref=ref))

        if not self.__check_user(ref, user):
            raise ValueError(C.make_error("NOT_OBJECT_OWNER"))

        objdsc['last_interaction'] = datetime.datetime.now()
        with make_session() as session:
            obj = session.query(OpenObject).filter(OpenObject.ref == ref).one()
            obj.last_interaction = objdsc['last_interaction']
            session.commit()

        if 'mark_for_deletion' in objdsc:
            # as this object has been marked for deletion, we have to run the garbage collection
            # to remove this mark now
            self.__gc()
コード例 #6
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def checkObjectRef(self, user, session_id, ref):
        """
        Objects which have been opened but not edited for a certain amount of time are automatically closed by the backend.
        This command delays this behaviour by increasing the timeout.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        ================= ==========================

        ``Return``: boolean
        """
        objdsc = self.__get_ref(ref)
        if objdsc and objdsc['user'] == user:
            # update session-id, which might have changed and is needed to inform the user about object closing
            if objdsc['session_id'] != session_id:
                objdsc['session_id'] = session_id
                with make_session() as session:
                    obj = session.query(OpenObject).filter(
                        OpenObject.ref == ref).one()
                    obj.session_id = session_id
                    session.commit()
            return True

        return False
コード例 #7
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def __get_ref(self, ref):
        if ref in self.__stack:
            return self.__stack[ref]
        else:
            # check DB
            with make_session() as session:
                obj = session.query(OpenObject).filter(OpenObject.ref == ref).one_or_none()
                if obj is not None:
                    if obj.backend_uuid != self.env.core_uuid:
                        self.log.debug("open object '%s' is now handled by backend %s" % (obj.uuid, self.env.core_uuid))
                        with make_session() as session:
                            obj.backend_uuid = self.env.core_uuid
                            session.commit()

                    self.openObject(obj.user, obj.session_id, obj.oid, obj.uuid, db_object=obj)
                    return self.__stack[ref]
        return None
コード例 #8
0
ファイル: jsonrpc_service.py プロジェクト: gonicus/gosa
 def __get_user_session(self, sid):
     if self.env.mode == "proxy":
         if sid in sessions:
             return sessions[sid]
         else:
             return None
     else:
         with make_session() as session:
             return session.query(UserSession).filter(UserSession.sid == sid).one_or_none()
コード例 #9
0
ファイル: jsonrpc_service.py プロジェクト: gonicus/gosa
 def __delete_user_session(self, user_session):
     if user_session is None:
         return
     if self.env.mode == "proxy":
         if user_session.sid in sessions:
             del sessions[user_session.sid]
     else:
         with make_session() as session:
             session.delete(user_session)
コード例 #10
0
ファイル: jsonrpc_service.py プロジェクト: gonicus/gosa
 def __gc_sessions():
     """ delete sessions that not have been used for 10 hours """
     threshold_date = (datetime.datetime.now() - datetime.timedelta(hours=10))
     if Environment.getInstance().mode == "proxy":
         for sid, user_session in sessions.items():
             if user_session.last_used < threshold_date:
                 del sessions[sid]
     else:
         with make_session() as session:
             return session.query(UserSession).filter(UserSession.last_used < threshold_date).delete()
コード例 #11
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def __get_ref(self, ref):
        if ref in self.__stack:
            return self.__stack[ref]
        else:
            # check DB
            with make_session() as session:
                obj = session.query(OpenObject).filter(
                    OpenObject.ref == ref).one_or_none()
                if obj is not None:
                    if obj.backend_uuid != self.env.core_uuid:
                        self.log.debug(
                            "open object '%s' is now handled by backend %s" %
                            (obj.uuid, self.env.core_uuid))
                        with make_session() as session:
                            obj.backend_uuid = self.env.core_uuid
                            session.commit()

                    self.openObject(obj.user,
                                    obj.session_id,
                                    obj.oid,
                                    obj.uuid,
                                    db_object=obj)
                    return self.__stack[ref]
        return None
コード例 #12
0
ファイル: main.py プロジェクト: GOsa3/gosa
 def sync_release_names(self):
     """
     The GOsa proxies need to know the release names of the operating systems,
     as we do not store them within any object, we save them in the Cache-Database.
     Proxies can query their replicated database for them
     """
     if self.client:
         data = self.client.get("operatingsystems")
         if "results" in data:
             # clear Database
             with make_session() as session:
                 session.query(Cache).filter((Cache.key.ilike("foreman.operating_system.%"))).delete(synchronize_session='fetch')
                 for entry in data["results"]:
                     self.sync_release_name(entry, session)
                 session.commit()
コード例 #13
0
ファイル: methods.py プロジェクト: gonicus/gosa
 def isContainerForObjectType(self, user, container_dn, object_type):
     with make_session() as session:
         container_type_query = session.query(getattr(ObjectInfoIndex, "_type")).filter(
             getattr(ObjectInfoIndex, "dn") == container_dn).one()
         container_type = container_type_query[0]
         allowed = ObjectFactory.getInstance().getAllowedSubElementsForObject(container_type)
         if object_type is None:
             return True
         elif "*" in object_type:
             # all allowed
             return True
         elif isinstance(object_type, list):
             return len(set(object_type).intersection(allowed)) > 0
         else:
             return object_type in allowed
コード例 #14
0
ファイル: methods.py プロジェクト: GOsa3/gosa
 def isContainerForObjectType(self, user, container_dn, object_type):
     with make_session() as session:
         container_type_query = session.query(
             getattr(ObjectInfoIndex, "_type")).filter(
                 getattr(ObjectInfoIndex, "dn") == container_dn).one()
         container_type = container_type_query[0]
         allowed = ObjectFactory.getInstance(
         ).getAllowedSubElementsForObject(container_type)
         if object_type is None:
             return True
         elif "*" in object_type:
             # all allowed
             return True
         elif isinstance(object_type, list):
             return len(set(object_type).intersection(allowed)) > 0
         else:
             return object_type in allowed
コード例 #15
0
ファイル: ppd_proxy.py プロジェクト: gonicus/gosa
    def getPPDURL(self, source_url):
        """
        Downloads the source_url, stores it locally and returns the local URL

        :param source_url: remote PPD URL
        :return: local URL to the cached PPD
        """
        source = parse_url(source_url)
        host = source.host
        if host is None or host == "localhost":
            # no host: we assume that the PPD can be found on the current active master backend
            with make_session() as session:
                # get any other registered backend
                master_backend = session.query(RegisteredBackend) \
                    .filter(RegisteredBackend.uuid != self.env.core_uuid,
                            RegisteredBackend.type == BackendTypes.active_master).first()
                if master_backend is None:
                    self.log.error(C.make_error("NO_MASTER_BACKEND_FOUND"))
                    return source_url

                # Try to log in with provided credentials
                url = parse_url(master_backend.url)
                host = url.host

        # check if file exists locally
        rel_path = source.path[1:] if source.path.startswith("/") else source.path
        local_path = path.join(self.ppd_dir, host, rel_path)
        if not path.exists(local_path):
            # cache locally
            try:
                r = requests.get(source_url)
                if r.ok:
                    local_dir = path.dirname(local_path)
                    if not path.exists(local_dir):
                        makedirs(local_dir)
                    with open(local_path, "w") as f:
                        f.write(r.text)
                else:
                    self.log.error("requesting PPD from %s failed with status code: %s" % (source_url, r.status_code))
                    return source_url
            except requests.exceptions.ConnectionError as e:
                self.log.error("requesting PPD from %s failed with error: %s" % (source_url, str(e)))
                return source_url

        return "%s%s/%s" % (self.base_url, host, rel_path)
コード例 #16
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def setObjectProperty(self, user, ref, name, value, skip_db_update=False):
        """
        Set a property on an existing stack object.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        name              Property name
        value             Property value
        ================= ==========================

        """
        objdsc = self.__get_ref(ref)
        if not objdsc:
            raise ValueError(C.make_error("REFERENCE_NOT_FOUND", ref=ref))

        if not name in objdsc['object']['properties']:
            raise ValueError(C.make_error("PROPERTY_NOT_FOUND", property=name))

        details = objdsc['object']['object'].get_attributes(True)
        if 'auto' in details[name] and details[name]['auto']:
            raise ValueError(C.make_error("AUTOGENERATED_ATTRIBUTE"))

        if not self.__check_user(ref, user):
            raise ValueError(C.make_error("NOT_OBJECT_OWNER"))

        objdsc['last_interaction'] = datetime.datetime.now()
        if 'mark_for_deletion' in objdsc:
            # as this object has been marked for deletion, we have to run the garbage collection
            # to remove this mark now
            self.__gc()

        setattr(objdsc['object']['object'], name, value)

        if skip_db_update is False:
            with make_session() as session:
                obj = session.query(OpenObject).filter(
                    OpenObject.ref == ref).one()
                obj.last_interaction = objdsc['last_interaction']
                if obj.data is None:
                    obj.data = {name: value}
                else:
                    obj.data[name] = value
                session.commit()
コード例 #17
0
ファイル: ppd_proxy.py プロジェクト: GOsa3/gosa
    def getPPDURL(self, source_url):
        """
        Downloads the source_url, stores it locally and returns the local URL

        :param source_url: remote PPD URL
        :return: local URL to the cached PPD
        """
        source = parse_url(source_url)
        host = source.host
        if host is None or host == "localhost":
            # no host: we assume that the PPD can be found on the current active master backend
            with make_session() as session:
                # get any other registered backend
                master_backend = session.query(RegisteredBackend) \
                    .filter(RegisteredBackend.uuid != self.env.core_uuid,
                            RegisteredBackend.type == BackendTypes.active_master).first()
                if master_backend is None:
                    self.log.error(C.make_error("NO_MASTER_BACKEND_FOUND"))
                    return source_url

                # Try to log in with provided credentials
                url = parse_url(master_backend.url)
                host = url.host

        # check if file exists locally
        rel_path = source.path[1:] if source.path.startswith(
            "/") else source.path
        local_path = path.join(self.ppd_dir, host, rel_path)
        if not path.exists(local_path):
            # cache locally
            r = requests.get(source_url)
            if r.ok:
                local_dir = path.dirname(local_path)
                if not path.exists(local_dir):
                    makedirs(local_dir)
                with open(local_path, "w") as f:
                    f.write(r.text)
            else:
                self.log.error(
                    "requesting PPD from %s failed with status code: %s" %
                    (source_url, r.status_code))
                return source_url

        return "%s%s/%s" % (self.base_url, host, rel_path)
コード例 #18
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def setObjectProperty(self, user, ref, name, value, skip_db_update=False):
        """
        Set a property on an existing stack object.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        name              Property name
        value             Property value
        ================= ==========================

        """
        objdsc = self.__get_ref(ref)
        if not objdsc:
            raise ValueError(C.make_error("REFERENCE_NOT_FOUND", ref=ref))

        if not name in objdsc['object']['properties']:
            raise ValueError(C.make_error("PROPERTY_NOT_FOUND", property=name))

        details = objdsc['object']['object'].get_attributes(True)
        if 'auto' in details[name] and details[name]['auto']:
            raise ValueError(C.make_error("AUTOGENERATED_ATTRIBUTE"))

        if not self.__check_user(ref, user):
            raise ValueError(C.make_error("NOT_OBJECT_OWNER"))

        objdsc['last_interaction'] = datetime.datetime.now()
        if 'mark_for_deletion' in objdsc:
            # as this object has been marked for deletion, we have to run the garbage collection
            # to remove this mark now
            self.__gc()

        setattr(objdsc['object']['object'], name, value)

        if skip_db_update is False:
            with make_session() as session:
                obj = session.query(OpenObject).filter(OpenObject.ref == ref).one()
                obj.last_interaction = objdsc['last_interaction']
                if obj.data is None:
                    obj.data = {name: value}
                else:
                    obj.data[name] = value
                session.commit()
コード例 #19
0
ファイル: main.py プロジェクト: GOsa3/gosa
    def getForemanDiscoveredHosts(self, user):
        methods = PluginRegistry.getInstance("RPCMethods")

        query = and_(
            ObjectInfoIndex.uuid == ExtensionIndex.uuid,
            ObjectInfoIndex.uuid == KeyValueIndex.uuid,
            ObjectInfoIndex._type == "Device",
            ExtensionIndex.extension == "ForemanHost",
            KeyValueIndex.key == "status",
            KeyValueIndex.value == "discovered"
        )

        with make_session() as session:
            query_result = session.query(ObjectInfoIndex).filter(query)
        res = {}
        for item in query_result:
            methods.update_res(res, item, user, 1)

        return list(res.values())
コード例 #20
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def closeObject(self, user, ref):
        """
        Close an object by its reference. This will free the object on
        the agent side.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        ================= ==========================
        """
        if not self.__get_ref(ref):
            raise ValueError(C.make_error("REFERENCE_NOT_FOUND", ref=ref))

        if not self.__check_user(ref, user):
            raise ValueError(C.make_error("NOT_OBJECT_OWNER"))

        with make_session() as session:
            session.query(OpenObject).filter(OpenObject.ref == ref).delete()

        del self.__stack[ref]
コード例 #21
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def closeObject(self, user, ref):
        """
        Close an object by its reference. This will free the object on
        the agent side.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        ref               UUID / object reference
        ================= ==========================
        """
        if not self.__get_ref(ref):
            raise ValueError(C.make_error("REFERENCE_NOT_FOUND", ref=ref))

        if not self.__check_user(ref, user):
            raise ValueError(C.make_error("NOT_OBJECT_OWNER"))

        with make_session() as session:
            session.query(OpenObject).filter(OpenObject.ref == ref).delete()

        del self.__stack[ref]
コード例 #22
0
ファイル: methods.py プロジェクト: GOsa3/gosa
    def getContainerTree(self, user, base, object_type=None):
        types = []
        table = inspect(ObjectInfoIndex)
        o2 = aliased(ObjectInfoIndex)
        for container in self.containers:
            types.append(getattr(ObjectInfoIndex, "_type") == container)

        query = and_(
            getattr(ObjectInfoIndex, "_adjusted_parent_dn") == base,
            or_(*types))

        with make_session() as session:
            query_result = session.query(ObjectInfoIndex, func.count(getattr(o2, "_adjusted_parent_dn"))) \
                .outerjoin(o2, and_(getattr(o2, "_invisible").is_(False), getattr(o2, "_adjusted_parent_dn") == getattr(ObjectInfoIndex, "dn"))) \
                .filter(query) \
                .group_by(*table.c)

        res = {}
        factory = ObjectFactory.getInstance()
        for item, children in query_result:
            self.update_res(res, item, user, 1)

            if object_type is not None and item.dn in res:
                res[item.dn]['hasChildren'] = children > 0
                # check if object_type is allowed in this container
                allowed = factory.getAllowedSubElementsForObject(
                    res[item.dn]['tag'], includeInvisible=False)
                if "*" in object_type:
                    # all allowed
                    res[item.dn]['allowed_move_target'] = True
                elif isinstance(object_type, list):
                    res[item.dn]['allowed_move_target'] = len(
                        set(object_type).intersection(allowed)) > 0
                else:
                    res[item.
                        dn]['allowed_move_target'] = object_type in allowed
        return res
コード例 #23
0
ファイル: test_integration.py プロジェクト: gonicus/gosa
    async def test_provision_host(self, m_get, m_del, m_put, m_post):
        """ convert a discovered host to a 'real' host  """
        self._test_dn = GosaTestCase.create_test_data()
        container = ObjectProxy(self._test_dn, "IncomingDeviceContainer")
        container.commit()

        mocked_foreman = MockForeman()
        m_get.side_effect = mocked_foreman.get
        m_del.side_effect = mocked_foreman.delete
        m_put.side_effect = mocked_foreman.put
        m_post.side_effect = mocked_foreman.post

        # create the discovered host + foremanHostgroup
        d_host = ObjectProxy(container.dn, "Device")
        d_host.cn = "mac00262df16a2c"
        d_host.extend("ForemanHost")
        d_host.status = "discovered"
        d_host.extend("ieee802Device")
        d_host.macAddress = "00:26:2d:f1:6a:2c"
        d_host.extend("IpHost")
        d_host.ipHostNumber = "192.168.0.1"
        d_host.commit()

        hostgroup = ObjectProxy("%s" % self._test_dn, "GroupOfNames")
        hostgroup.extend("ForemanHostGroup")
        hostgroup.cn = "Test"
        hostgroup.foremanGroupId = "4"
        hostgroup.commit()

        # add host to group
        logging.getLogger("test.foreman-integration").info("########### START: Add Host to group ############# %s" % AsyncHTTPTestCase.get_url(self, "/hooks/"))
        d_host = ObjectProxy("cn=mac00262df16a2c,%s" % container.dn)

        def check():
            logging.getLogger("test.foreman-integration").info("check condition: %s, %s" % (d_host.cn, d_host.status))
            return d_host.cn == "mac00262df16a2c" and d_host.status == "discovered"

        def check2():
            logging.getLogger("test.foreman-integration").info("check2 condition: %s" % d_host.cn)
            return d_host.cn == "Testhost"

        base_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
        with open(os.path.join(base_dir, "discovered_hosts", "mac00262df16a2c.json")) as f:
            mocked_foreman.register_conditional_response("http://localhost:8000/api/v2/discovered_hosts/mac00262df16a2c",
                                                         "get",
                                                         check,
                                                         f.read())
        with open(os.path.join(base_dir, "conditional", "Testhost.json")) as f:
            mocked_foreman.register_conditional_response("http://localhost:8000/api/v2/hosts/Testhost",
                                                         "get",
                                                         check2,
                                                         f.read())

        def activate(**kwargs):
            return True

        mocked_foreman.register_trigger("http://localhost:8000/api/v2/discovered_hosts/mac00262df16a2c",
                                        "put",
                                        activate,
                                        self.execute)

        with make_session() as session:
            assert session.query(ObjectInfoIndex.dn)\
                       .join(ObjectInfoIndex.properties)\
                       .filter(and_(KeyValueIndex.key == "cn", KeyValueIndex.value == "Testhost"))\
                       .count() == 0

        d_host.cn = "Testhost"
        d_host.groupMembership = hostgroup.dn
        d_host.commit()
        logging.getLogger("test.foreman-integration").info("waiting for 2 seconds")
        await asyncio.sleep(2)

        logging.getLogger("test.foreman-integration").info("########### END: Add Host to group #############")

        # now move the host to the final destination
        d_host = ObjectProxy("cn=Testhost,ou=incoming,%s" % self._test_dn)
        assert d_host.status != "discovered"
        assert d_host.name == "Testhost"
        assert d_host.hostgroup_id == "4"
        assert d_host.is_extended_by("RegisteredDevice") is True
        assert len(d_host.userPassword[0]) > 0
        assert d_host.deviceUUID is not None

        with make_session() as session:
            assert session.query(ObjectInfoIndex.dn) \
                       .join(ObjectInfoIndex.properties) \
                       .filter(and_(KeyValueIndex.key == "cn", KeyValueIndex.value == "Testhost")) \
                       .count() == 1

        logging.getLogger("test.foreman-integration").info("########### START: moving host #############")
        d_host.move("%s" % self._test_dn)
        logging.getLogger("test.foreman-integration").info("########### END: moving host #############")

        # lets check if everything is fine in the database
        d_host = ObjectProxy("cn=Testhost,ou=devices,%s" % self._test_dn, read_only=True)
        assert d_host is not None
        assert d_host.status == "unknown"
        assert d_host.groupMembership == hostgroup.dn
コード例 #24
0
ファイル: methods.py プロジェクト: gonicus/gosa
    def search(self, user, base, scope, qstring, fltr=None):
        """
       Performs a query based on a simple search string consisting of keywords.

       Query the database using the given query string and an optional filter
       dict - and return the result set.

       ========== ==================
       Parameter  Description
       ========== ==================
       base       Query base
       scope      Query scope (SUB, BASE, ONE, CHILDREN)
       qstring    Query string
       fltr       Hash for extra parameters
       ========== ==================

       ``Return``: List of dicts
       """

        res = {}
        keywords = None
        dn_hook = "_parent_dn"
        fallback = fltr and "fallback" in fltr and fltr["fallback"]

        if not base:
            return []

        adjusted_base = base
        # Set defaults
        if not fltr:
            fltr = {}
        if not 'category' in fltr:
            fltr['category'] = "all"
        if not 'secondary' in fltr:
            fltr['secondary'] = "enabled"
        if not 'mod-time' in fltr:
            fltr['mod-time'] = "all"
        if 'adjusted-dn' in fltr and fltr['adjusted-dn'] is True:
            dn_hook = "_adjusted_parent_dn"
            adjusted_base = ObjectProxy.get_adjusted_dn(base, self.env.base)

        actions = 'actions' in fltr and fltr['actions'] is True

        # Sanity checks
        scope = scope.upper()
        if not scope in ["SUB", "BASE", "ONE", "CHILDREN"]:
            raise GOsaException(C.make_error("INVALID_SEARCH_SCOPE", scope=scope))
        if not fltr['mod-time'] in ["hour", "day", "week", "month", "year", "all"]:
            raise GOsaException(C.make_error("INVALID_SEARCH_DATE", date=fltr['mod-time']))

        # Build query: join attributes and keywords
        queries = []

        # Build query: assemble
        query = None
        if scope == "SUB":
            if queries:
                query = and_(or_(ObjectInfoIndex._parent_dn == base, ObjectInfoIndex._parent_dn.like("%," + base)), or_(*queries))
            else:
                query = or_(ObjectInfoIndex._parent_dn == base, ObjectInfoIndex._parent_dn.like("%," + base))

        elif scope == "ONE":
            query = and_(or_(ObjectInfoIndex.dn == adjusted_base, getattr(ObjectInfoIndex, dn_hook) == adjusted_base), or_(*queries))

        elif scope == "CHILDREN":
            query = and_(getattr(ObjectInfoIndex, dn_hook) == adjusted_base, or_(*queries))

        else:
            if queries:
                query = and_(ObjectInfoIndex.dn == base, or_(*queries))
            else:
                query = ObjectInfoIndex.dn == base

        # Build query: eventually extend with timing information
        td = None
        if fltr['mod-time'] != "all":
            now = datetime.datetime.now()
            if fltr['mod-time'] == 'hour':
                td = now - datetime.timedelta(hours=1)
            elif fltr['mod-time'] == 'day':
                td = now - datetime.timedelta(days=1)
            elif fltr['mod-time'] == 'week':
                td = now - datetime.timedelta(weeks=1)
            elif fltr['mod-time'] == 'month':
                td = now - datetime.timedelta(days=31)
            elif fltr['mod-time'] == 'year':
                td = now - datetime.timedelta(days=365)

            query = and_(ObjectInfoIndex._last_modified >= td, query)

        order_by = None
        if 'order-by' in fltr:
            is_desc = 'order' in fltr and fltr['order'] == 'desc'
            order_by = "_last_changed"
            if fltr['order-by'] == "last-changed":
                order_by = "_last_modified"
            order_by = desc(getattr(ObjectInfoIndex, order_by)) if is_desc else getattr(ObjectInfoIndex, order_by)

        # Perform primary query and get collect the results
        squery = []
        these = dict([(x, 1) for x in self.__search_aid['used_attrs']])
        these.update(dict(dn=1, _type=1, _uuid=1, _last_changed=1))
        these = list(these.keys())

        with make_session() as session:
            query_result, ranked = self.finalize_query(query, fltr, session, qstring=qstring, order_by=order_by)

            # try:
            #     self.log.debug(str(query_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})))
            # except Exception as e:
            #     self.log.warning(str(e))
            #     self.log.debug(str(query_result))
            #     pass

            # limit only secondary enabled searches, because e.g. the treeitems use this search to resolve and we do not want to limit those results
            if fltr['secondary'] == "enabled":
                max_results = self.env.config.get("backend.max-results", default=1000)
            else:
                max_results = math.inf

            counter = 0
            total = query_result.count()
            response = {}
            if total == 0 and fallback is True and PluginRegistry.getInstance("ObjectIndex").fuzzy is True:
                # do fuzzy search
                if qstring:
                    try:
                        keywords = [s.strip("'").strip('"') for s in shlex.split(qstring)]
                    except ValueError:
                        keywords = [s.strip("'").strip('"') for s in qstring.split(" ")]
                    # Make keywords unique
                    keywords = list(set(keywords))

                    # find most similar words
                    for i, kw in enumerate(keywords):
                        r = session.execute("SELECT word, similarity(word, '{0}') as sim FROM unique_lexeme WHERE length(word) > 2 AND similarity(word, '{0}') > {1} ORDER BY similarity(word, '{0}') DESC LIMIT 3;".format(kw, self.__fuzzy_similarity_threshold)).fetchall()
                        if len(r) > 0 and self.__fuzzy_similarity_threshold < 0.5 <= r[0]['sim']:
                            # we have good results to take only those and ignore the lower similarity threshold
                            r = [x for x in r if x['sim'] >= 0.5]

                        keywords[i] = " or ".join([x['word'] for x in r])

                    keywords_string = " ".join(keywords)
                    if keywords_string != "":
                        self.log.info("no results found for: '%s' => re-trying with: '%s'" % (qstring, keywords_string))
                        response['orig'] = qstring
                        if qstring != keywords_string:
                            response['fuzzy'] = keywords_string
                            query_result, ranked = self.finalize_query(query, fltr, session, qstring=" ".join(keywords), order_by=order_by)
                            total = query_result.count()

            response['primary_total'] = total
            self.log.debug("Query: %s Keywords: %s, Filter: %s => %s results" % (qstring, keywords, fltr, total))

            squery_constraints = {}
            primary_uuids = []
            for tuple in query_result.all():
                if ranked is True:
                    item = tuple[0]
                    rank = tuple[1]
                    # make sure that the primary rank is higher that the secondaries
                    rank += 1
                else:
                    item = tuple
                    rank = 1
                self.update_res(res, item, user, rank, these=these, actions=actions)
                counter += 1
                if counter >= max_results:
                    break
                primary_uuids.append(item.uuid)
                # Collect information for secondary search?
                if fltr['secondary'] != "enabled":
                    continue

                if item._type in self.__search_aid['resolve']:
                    if len(self.__search_aid['resolve'][item._type]) == 0:
                        continue

                    kv = self.__index_props_to_key_value(item.properties)
                    for r in self.__search_aid['resolve'][item._type]:
                        if r['attribute'] in kv:
                            tag = r['type'] if r['type'] else item._type

                            # If a category was choosen and it does not fit the
                            # desired target tag - skip that one
                            if not (fltr['category'] == "all" or fltr['category'] == tag):
                                continue

                            if hasattr(ObjectInfoIndex, r['filter']):
                                if tag == "*":
                                    squery.append(getattr(ObjectInfoIndex, r['filter']).in_(kv[r['attribute']]))
                                else:
                                    squery.append(and_(ObjectInfoIndex._type == tag, getattr(ObjectInfoIndex, r['filter']).in_(kv[r['attribute']])))
                            else:
                                if tag not in squery_constraints:
                                    squery_constraints[tag] = {}
                                if r['filter'] not in squery_constraints[tag]:
                                    squery_constraints[tag][r['filter']] = []
                                squery_constraints[tag][r['filter']].extend(kv[r['attribute']])

            for type, constraints in squery_constraints.items():
                for key, values in constraints.items():
                    values = list(set(values))
                    if len(values) > 0:
                        if type == "*":
                            squery.append(KeyValueIndex.key == key, KeyValueIndex.value.in_(values))
                        else:
                            squery.append(and_(ObjectInfoIndex._type == type, KeyValueIndex.key == key, KeyValueIndex.value.in_(values)))

            # Perform secondary query and update the result
            if fltr['secondary'] == "enabled" and squery:
                query = and_(or_(*squery), ~ObjectInfoIndex.uuid.in_(primary_uuids))

                # Add "_last_changed" information to query
                if fltr['mod-time'] != "all":
                    query = and_(query, ObjectInfoIndex._last_modified >= td)

                # Execute query and update results
                sec_result = session.query(ObjectInfoIndex).join(ObjectInfoIndex.properties).options(contains_eager(ObjectInfoIndex.properties)).filter(query)
                # try:
                #     self.log.debug("Secondary query: %s " % str(sec_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})))
                # except Exception as e:
                #     self.log.warning(str(e))
                #     self.log.debug("Secondary query: %s " % str(sec_result))
                #     pass

                results = sec_result.all()
                total += len(results)
                if counter < max_results:
                    for item in results:
                        self.update_res(res, item, user, self.__make_relevance(item, keywords, fltr, True), secondary=True, these=these, actions=actions)
                        counter += 1
                        if counter >= max_results:
                            break

        response['total'] = total
        response['results'] = list(res.values())
        return response
コード例 #25
0
ファイル: test_filters.py プロジェクト: gonicus/gosa
    def test_ImageProcessor(self):
        # read example image
        with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), "test.jpg"), "r+b") as f:
            byte = f.read()

        user = mock.MagicMock()
        with make_session() as session:
            res = session.query(ObjectInfoIndex.uuid).filter(ObjectInfoIndex.dn == "cn=Frank Reich,ou=people,dc=example,dc=net").one()
            user.uuid = res[0]
        user.modifyTimestamp = datetime.datetime.now()
        test_dict = {
            "image": {
                "value": [Binary(byte)]
            }
        }
        image_dir = os.path.join(Environment.getInstance().config.get("user.image-path", "/tmp/images"), user.uuid)
        tmp_image = mock.MagicMock()

        with mock.patch("gosa.backend.plugins.user.filters.Base.metadata.create_all") as m_create_all, \
                mock.patch("gosa.backend.plugins.user.filters.os.path.exists", return_value=True), \
                mock.patch("gosa.backend.plugins.user.filters.os.path.isdir", return_value=True), \
                mock.patch("gosa.backend.plugins.user.filters.ImageOps.fit", return_value=tmp_image):
            filter = ImageProcessor(None)
            with mock.patch("gosa.backend.plugins.user.filters.make_session") as m:
                mocked_db_query = m.return_value.__enter__.return_value.query.return_value.filter.return_value.one_or_none
                mocked_db_query.side_effect = OperationalError(None, None, None)

                filter.process(user, "image", test_dict, "32", "64")
                assert m_create_all.called
                m_create_all.reset_mock()

                mocked_db_query.side_effect = [None, OperationalError(None, None, None)]
                filter.process(user, "image", test_dict, "32")
                assert m_create_all.called
                assert tmp_image.save.called

        filter = ImageProcessor(None)

        with pytest.raises(ElementFilterException):
            filter.process(None, None, None)

        with pytest.raises(ElementFilterException), \
                mock.patch("gosa.backend.plugins.user.filters.os.path.exists", return_value=True),\
                mock.patch("gosa.backend.plugins.user.filters.os.path.isdir", return_value=False), \
                mock.patch("gosa.backend.plugins.user.filters.make_session"):
            filter.process(user, "image", test_dict, "32", "64")

        with mock.patch("gosa.backend.plugins.user.filters.make_session") as m:
            m_session = m.return_value.__enter__.return_value
            m_session.query.return_value.filter.return_value.one_or_none.return_value = None
            filter.process(user, "image", test_dict, "32", "64")
            assert m_session.add.called
            assert m_session.commit.called
            assert os.path.exists(os.path.join(image_dir, "image", "0", "32.jpg"))
            assert os.path.exists(os.path.join(image_dir, "image", "0", "64.jpg"))

        shutil.rmtree(image_dir)

        found = mock.MagicMock()
        found.filter.return_value.one_or_none.return_value.modified = user.modifyTimestamp
        with mock.patch("gosa.backend.plugins.user.filters.make_session") as m:
            m_session = m.return_value.__enter__.return_value
            m_session.query.return_value = found
            filter.process(user, "image", test_dict, "32", "64")
            assert not m_session.add.called
            assert not m_session.commit.called
            assert not os.path.exists(os.path.join(image_dir, "image", "0", "32.jpg"))
            assert not os.path.exists(os.path.join(image_dir, "image", "0", "64.jpg"))

            filter.process(user, "image", {'image': {'value': [Binary(b"wrong binary data")]}}, "32", "64")

        with mock.patch("gosa.backend.plugins.user.filters.make_session") as m:
            m_session = m.return_value.__enter__.return_value
            m_session.query.return_value.filter.return_value.one_or_none.return_value = None
            filter.process(user, "image", {'image': {'value': [Binary(b"wrong binary data")]}}, "32", "64")
            assert m_session.add.called
            assert m_session.commit.called
            assert not os.path.exists(os.path.join(image_dir, "image", "0", "32.jpg"))
            assert not os.path.exists(os.path.join(image_dir, "image", "0", "64.jpg"))
コード例 #26
0
ファイル: jsonrpc_service.py プロジェクト: gonicus/gosa
 def check_session(cls, sid, user):
     with make_session() as session:
         return session.query(UserSession).filter(UserSession.sid == sid, UserSession.user == user).count() > 0
コード例 #27
0
ファイル: jsonrpc_service.py プロジェクト: gonicus/gosa
 def __save_user_session(self, user_session):
     if self.env.mode == "proxy":
         sessions[user_session.sid] = user_session
     else:
         with make_session() as session:
             session.add(user_session)
コード例 #28
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def __gc(self):
        self.env.log.debug("running garbage collector on object store")
        ten_minutes_ago = datetime.datetime.now() - datetime.timedelta(minutes=self.__inactivity_timeout)
        e = EventMaker()
        command = PluginRegistry.getInstance("CommandRegistry")
        sched = PluginRegistry.getInstance("SchedulerService").getScheduler()

        for ref, item in list(self.__stack.items()):
            uuid = item['object']['uuid']
            if uuid is None:
                # new items without uuid do not need to be closed by timeout
                continue
            last_interaction_time = item['last_interaction'] if 'last_interaction' in item else item['created']
            if last_interaction_time < ten_minutes_ago:
                if 'mark_for_deletion' in item:
                    if item['mark_for_deletion'] <= datetime.datetime.now():
                        if 'countdown_job' in item:
                            try:
                                sched.unschedule_job(item['countdown_job'])
                            except KeyError:
                                pass
                            finally:
                                del item['countdown_job']

                        del self.__stack[ref]
                        with make_session() as session:
                            session.query(OpenObject).filter(OpenObject.ref == ref).delete()

                        event = e.Event(
                            e.ObjectCloseAnnouncement(
                                e.Target(item['user']),
                                e.SessionId(item['session_id']),
                                e.State("closed"),
                                e.UUID(uuid)
                            )
                        )
                        command.sendEvent(item['user'], event)
                else:
                    # notify user to do something otherwise the lock gets removed in 1 minute
                    event = e.Event(
                        e.ObjectCloseAnnouncement(
                            e.Target(item['user']),
                            e.SessionId(item['session_id']),
                            e.State("closing"),
                            e.UUID(uuid),
                            e.Minutes("1")
                        )
                    )
                    command.sendEvent(item['user'], event)
                    item['mark_for_deletion'] = datetime.datetime.now() + datetime.timedelta(seconds=59)
                    if 'countdown_job' in item:
                        try:
                            sched.unschedule_job(item['countdown_job'])
                        except KeyError:
                            pass
                        finally:
                            del item['countdown_job']

                    item['countdown_job'] = sched.add_date_job(self.__gc,
                                                               datetime.datetime.now() + datetime.timedelta(minutes=1),
                                                               tag="_internal",
                                                               jobstore="ram")

            elif 'mark_for_deletion' in item:
                # item has been modified -> remove the deletion mark
                del item['mark_for_deletion']
                event = e.Event(
                    e.ObjectCloseAnnouncement(
                        e.Target(item['user']),
                        e.SessionId(item['session_id']),
                        e.State("closing_aborted"),
                        e.UUID(uuid)
                    )
                )
                command.sendEvent(item['user'], event)
                if 'countdown_job' in item:
                    try:
                        sched.unschedule_job(item['countdown_job'])
                    except KeyError:
                        pass
                    finally:
                        del item['countdown_job']
コード例 #29
0
ファイル: main.py プロジェクト: GOsa3/gosa
    def handle_request(self, request_handler):
        foreman = PluginRegistry.getInstance("Foreman")
        data = loads(request_handler.request.body)
        self.log.debug(data)

        # TODO disable hook logging to file
        with open("foreman-log.json", "a") as f:
            f.write("%s,\n" % dumps(data, indent=4, sort_keys=True))

        if data["event"] in ForemanHookReceiver.skip_next_event and data["object"] in ForemanHookReceiver.skip_next_event[data["event"]]:
            ForemanHookReceiver.skip_next_event[data["event"]].remove(data["object"])
            self.log.info("skipped '%s' event for object: '%s'" % (data["event"], data["object"]))
            return

        data_keys = list(data['data'].keys())
        if len(data_keys) == 1:
            type = data_keys[0]
        else:
            # no type given -> skipping this event as other might come with more information
            self.log.warning("skipping event '%s' for object '%s' as no type information is given in data: '%s'" % (data["event"],
                                                                                                                  data["object"],
                                                                                                                  data["data"]))
            return

        # search for real data
        if len(data['data'][type].keys()) == 1:
            # something like {data: 'host': {host: {...}}}
            #             or {data: 'discovered_host': {host: {...}}}
            payload_data = data['data'][type][list(data['data'][type].keys())[0]]
        else:
            payload_data = data['data'][type]

        if type == "operatingsystem":
            with make_session() as session:
                foreman.sync_release_name(payload_data, session, event=data['event'])
                session.commit()
                return

        factory = ObjectFactory.getInstance()
        foreman_type = type
        if type == "discovered_host":
            type = "host"

        object_types = factory.getObjectNamesWithBackendSetting("Foreman", "type", "%ss" % type)
        object_type = object_types[0] if len(object_types) else None

        backend_attributes = factory.getObjectBackendProperties(object_type) if object_type is not None else None
        self.log.debug("Hookevent: '%s' for '%s' (%s)" % (data['event'], data['object'], object_type))

        uuid_attribute = None
        if "Foreman" in backend_attributes:
            uuid_attribute = backend_attributes["Foreman"]["_uuidSourceAttribute"] \
                if '_uuidSourceAttribute' in backend_attributes["Foreman"] else backend_attributes["Foreman"]["_uuidAttribute"]

        ForemanBackend.modifier = "foreman"
        update_data = {}

        if data['event'] in ["update", "create"] and foreman_type == "host":
            id = payload_data["id"] if "id" in payload_data else None
            try:
                foreman.write_parameters(id if id is not None else data['object'])
            except:
                foreman.mark_for_parameter_setting(data['object'], {
                    "status": "created",
                    "use_id": id
                })

        if data['event'] == "after_commit" or data['event'] == "update" or data['event'] == "after_create" or data['event'] == "create":
            host = None
            if data['event'] == "update" and foreman_type == "host" and "mac" in payload_data and payload_data["mac"] is not None:
                # check if we have an discovered host for this mac
                index = PluginRegistry.getInstance("ObjectIndex")
                res = index.search({
                    "_type": "Device",
                    "extension": ["ForemanHost", "ieee802Device"],
                    "macAddress": payload_data["mac"],
                    "status": "discovered"
                }, {"dn": 1})

                if len(res):
                    self.log.debug("update received for existing host with dn: %s" % res[0]["dn"])
                    host = ObjectProxy(res[0]["dn"])

                    if foreman_type != "discovered_host" and host.is_extended_by("ForemanHost"):
                        host.status = "unknown"

            foreman_object = foreman.get_object(object_type, payload_data[uuid_attribute], create=host is None)
            if foreman_object and host:
                if foreman_object != host:
                    self.log.debug("using known host instead of creating a new one")
                    # host is the formerly discovered host, which might have been changed in GOsa for provisioning
                    # so we want to use this one, foreman_object is the joined one, so copy the credentials from foreman_object to host
                    if not host.is_extended_by("RegisteredDevice"):
                        host.extend("RegisteredDevice")
                    if not host.is_extended_by("simpleSecurityObject"):
                        host.extend("simpleSecurityObject")
                    host.deviceUUID = foreman_object.deviceUUID
                    host.userPassword = foreman_object.userPassword
                    host.otp = foreman_object.otp
                    host.cn = foreman_object.cn

                    # now delete the formerly joined host
                    foreman_object.remove()
                    foreman_object = host

            elif foreman_object is None and host is not None:
                foreman_object = host

            elif foreman_type == "discovered_host":
                self.log.debug("setting discovered state for %s" % payload_data[uuid_attribute])
                if not foreman_object.is_extended_by("ForemanHost"):
                    foreman_object.extend("ForemanHost")
                foreman_object.status = "discovered"

            if foreman_type == "host":
                old_build_state = foreman_object.build

            foreman.update_type(object_type, foreman_object, payload_data, uuid_attribute, update_data=update_data)
            if foreman_type == "host" and old_build_state is True and foreman_object.build is False and \
                            foreman_object.status == "ready":
                # send notification
                e = EventMaker()
                ev = e.Event(e.Notification(
                    e.Title(N_("Host ready")),
                    e.Body(N_("Host '%s' has been successfully build." % foreman_object.cn)),
                    e.Icon("@Ligature/pc"),
                    e.Timeout("10000")
                ))
                event_object = objectify.fromstring(etree.tostring(ev, pretty_print=True).decode('utf-8'))
                SseHandler.notify(event_object)

        elif data['event'] == "after_destroy":
            # print("Payload: %s" % payload_data)
            foreman.remove_type(object_type, payload_data[uuid_attribute])

            # because foreman sends the after_commit event after the after_destroy event
            # we need to skip this event, otherwise the host would be re-created
            if "after_commit" not in ForemanHookReceiver.skip_next_event:
                ForemanHookReceiver.skip_next_event["after_commit"] = [data['object']]
            else:
                ForemanHookReceiver.skip_next_event["after_commit"].append(data['object'])

            # add garbage collection for skip
            sobj = PluginRegistry.getInstance("SchedulerService")
            sobj.getScheduler().add_date_job(self.cleanup_event_skipper,
                                             datetime.datetime.now() + datetime.timedelta(minutes=1),
                                             args=("after_commit", data['object']),
                                             tag='_internal', jobstore='ram')

        else:
            self.log.info("unhandled hook event '%s' received for '%s'" % (data['event'], type))

        ForemanBackend.modifier = None
コード例 #30
0
ファイル: methods.py プロジェクト: GOsa3/gosa
    def search(self, user, base, scope, qstring, fltr=None):
        """
       Performs a query based on a simple search string consisting of keywords.

       Query the database using the given query string and an optional filter
       dict - and return the result set.

       ========== ==================
       Parameter  Description
       ========== ==================
       base       Query base
       scope      Query scope (SUB, BASE, ONE, CHILDREN)
       qstring    Query string
       fltr       Hash for extra parameters
       ========== ==================

       ``Return``: List of dicts
       """

        res = {}
        keywords = None
        dn_hook = "_parent_dn"
        fallback = fltr and "fallback" in fltr and fltr["fallback"]

        if not base:
            return []

        adjusted_base = base
        # Set defaults
        if not fltr:
            fltr = {}
        if not 'category' in fltr:
            fltr['category'] = "all"
        if not 'secondary' in fltr:
            fltr['secondary'] = "enabled"
        if not 'mod-time' in fltr:
            fltr['mod-time'] = "all"
        if 'adjusted-dn' in fltr and fltr['adjusted-dn'] is True:
            dn_hook = "_adjusted_parent_dn"
            adjusted_base = ObjectProxy.get_adjusted_dn(base, self.env.base)

        actions = 'actions' in fltr and fltr['actions'] is True

        # Sanity checks
        scope = scope.upper()
        if not scope in ["SUB", "BASE", "ONE", "CHILDREN"]:
            raise GOsaException(
                C.make_error("INVALID_SEARCH_SCOPE", scope=scope))
        if not fltr['mod-time'] in [
                "hour", "day", "week", "month", "year", "all"
        ]:
            raise GOsaException(
                C.make_error("INVALID_SEARCH_DATE", date=fltr['mod-time']))

        # Build query: join attributes and keywords
        queries = []

        # Build query: assemble
        query = None
        if scope == "SUB":
            if queries:
                query = and_(
                    or_(ObjectInfoIndex._parent_dn == base,
                        ObjectInfoIndex._parent_dn.like("%," + base)),
                    or_(*queries))
            else:
                query = or_(ObjectInfoIndex._parent_dn == base,
                            ObjectInfoIndex._parent_dn.like("%," + base))

        elif scope == "ONE":
            query = and_(
                or_(ObjectInfoIndex.dn == adjusted_base,
                    getattr(ObjectInfoIndex, dn_hook) == adjusted_base),
                or_(*queries))

        elif scope == "CHILDREN":
            query = and_(
                getattr(ObjectInfoIndex, dn_hook) == adjusted_base,
                or_(*queries))

        else:
            if queries:
                query = and_(ObjectInfoIndex.dn == base, or_(*queries))
            else:
                query = ObjectInfoIndex.dn == base

        # Build query: eventually extend with timing information
        td = None
        if fltr['mod-time'] != "all":
            now = datetime.datetime.now()
            if fltr['mod-time'] == 'hour':
                td = now - datetime.timedelta(hours=1)
            elif fltr['mod-time'] == 'day':
                td = now - datetime.timedelta(days=1)
            elif fltr['mod-time'] == 'week':
                td = now - datetime.timedelta(weeks=1)
            elif fltr['mod-time'] == 'month':
                td = now - datetime.timedelta(days=31)
            elif fltr['mod-time'] == 'year':
                td = now - datetime.timedelta(days=365)

            query = and_(ObjectInfoIndex._last_modified >= td, query)

        order_by = None
        if 'order-by' in fltr:
            is_desc = 'order' in fltr and fltr['order'] == 'desc'
            order_by = "_last_changed"
            if fltr['order-by'] == "last-changed":
                order_by = "_last_modified"
            order_by = desc(getattr(ObjectInfoIndex,
                                    order_by)) if is_desc else getattr(
                                        ObjectInfoIndex, order_by)

        # Perform primary query and get collect the results
        squery = []
        these = dict([(x, 1) for x in self.__search_aid['used_attrs']])
        these.update(dict(dn=1, _type=1, _uuid=1, _last_changed=1))
        these = list(these.keys())
        ranked = False

        with make_session() as session:
            query_result, ranked = self.finalize_query(query,
                                                       fltr,
                                                       session,
                                                       qstring=qstring,
                                                       order_by=order_by)

            # try:
            #     self.log.debug(str(query_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})))
            # except Exception as e:
            #     self.log.warning(str(e))
            #     self.log.debug(str(query_result))
            #     pass

            # limit only secondary enabled searches, because e.g. the treeitems use this search to resolve and we do not want to limit those results
            if fltr['secondary'] == "enabled":
                max_results = self.env.config.get("backend.max-results",
                                                  default=1000)
            else:
                max_results = math.inf

            counter = 0
            total = query_result.count()
            response = {}
            if total == 0 and fallback is True and PluginRegistry.getInstance(
                    "ObjectIndex").fuzzy is True:
                # do fuzzy search
                if qstring:
                    try:
                        keywords = [
                            s.strip("'").strip('"')
                            for s in shlex.split(qstring)
                        ]
                    except ValueError:
                        keywords = [
                            s.strip("'").strip('"') for s in qstring.split(" ")
                        ]
                    # Make keywords unique
                    keywords = list(set(keywords))

                    # find most similar words
                    for i, kw in enumerate(keywords):
                        r = session.execute(
                            "SELECT word FROM unique_lexeme WHERE length(word) > 2 AND similarity(word, '{0}') > {1} ORDER BY levenshtein(word, '{0}', 2, 2, 1) LIMIT 3;"
                            .format(
                                kw,
                                self.__fuzzy_similarity_threshold)).fetchall()
                        keywords[i] = " or ".join([x['word'] for x in r])

                    self.log.info(
                        "no results found for: '%s' => re-trying with: '%s'" %
                        (qstring, " ".join(keywords)))
                    response['orig'] = qstring
                    response['fuzzy'] = " ".join(keywords)
                    query_result, ranked = self.finalize_query(
                        query,
                        fltr,
                        session,
                        qstring=" ".join(keywords),
                        order_by=order_by)
                    total = query_result.count()

            response['primary_total'] = total
            self.log.debug("Query: %s Keywords: %s, Filter: %s => %s results" %
                           (qstring, keywords, fltr, total))

            squery_constraints = {}
            primary_uuids = []
            for tuple in query_result:
                if ranked is True:
                    item = tuple[0]
                    rank = tuple[1]
                else:
                    item = tuple
                    rank = 0
                self.update_res(res,
                                item,
                                user,
                                rank,
                                these=these,
                                actions=actions)
                counter += 1
                if counter >= max_results:
                    break
                primary_uuids.append(item.uuid)
                # Collect information for secondary search?
                if fltr['secondary'] != "enabled":
                    continue

                if item._type in self.__search_aid['resolve']:
                    if len(self.__search_aid['resolve'][item._type]) == 0:
                        continue

                    kv = self.__index_props_to_key_value(item.properties)
                    for r in self.__search_aid['resolve'][item._type]:
                        if r['attribute'] in kv:
                            tag = r['type'] if r['type'] else item._type

                            # If a category was choosen and it does not fit the
                            # desired target tag - skip that one
                            if not (fltr['category'] == "all"
                                    or fltr['category'] == tag):
                                continue

                            if hasattr(ObjectInfoIndex, r['filter']):
                                if tag == "*":
                                    squery.append(
                                        getattr(ObjectInfoIndex,
                                                r['filter']).in_(
                                                    kv[r['attribute']]))
                                else:
                                    squery.append(
                                        and_(
                                            ObjectInfoIndex._type == tag,
                                            getattr(ObjectInfoIndex,
                                                    r['filter']).in_(
                                                        kv[r['attribute']])))
                            else:
                                if tag not in squery_constraints:
                                    squery_constraints[tag] = {}
                                if r['filter'] not in squery_constraints[tag]:
                                    squery_constraints[tag][r['filter']] = []
                                squery_constraints[tag][r['filter']].extend(
                                    kv[r['attribute']])

            for type, constraints in squery_constraints.items():
                for key, values in constraints.items():
                    values = list(set(values))
                    if len(values) > 0:
                        if type == "*":
                            squery.append(KeyValueIndex.key == key,
                                          KeyValueIndex.value.in_(values))
                        else:
                            squery.append(
                                and_(ObjectInfoIndex._type == type,
                                     KeyValueIndex.key == key,
                                     KeyValueIndex.value.in_(values)))

            # Perform secondary query and update the result
            if fltr['secondary'] == "enabled" and squery:
                query = and_(or_(*squery),
                             ~ObjectInfoIndex.uuid.in_(primary_uuids))

                # Add "_last_changed" information to query
                if fltr['mod-time'] != "all":
                    query = and_(query, ObjectInfoIndex._last_modified >= td)

                # Execute query and update results
                sec_result = session.query(ObjectInfoIndex).join(
                    ObjectInfoIndex.properties).options(
                        contains_eager(
                            ObjectInfoIndex.properties)).filter(query)
                # try:
                #     self.log.debug("Secondary query: %s " % str(sec_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})))
                # except Exception as e:
                #     self.log.warning(str(e))
                #     self.log.debug("Secondary query: %s " % str(sec_result))
                #     pass

                results = sec_result.all()
                total += len(results)
                if counter < max_results:
                    for item in results:
                        self.update_res(res,
                                        item,
                                        user,
                                        self.__make_relevance(
                                            item, keywords, fltr, True),
                                        secondary=True,
                                        these=these,
                                        actions=actions)
                        counter += 1
                        if counter >= max_results:
                            break

        response['total'] = total
        response['results'] = list(res.values())
        return response
コード例 #31
0
ファイル: jsonrpc_service.py プロジェクト: gonicus/gosa
 def user_sessions_available(cls, user):
     with make_session() as session:
         if user is not None:
             return session.query(UserSession).filter(UserSession.user == user).count() > 0
         else:
             return session.query(UserSession).count() > 0
コード例 #32
0
ファイル: jsonrpc_service.py プロジェクト: GOsa3/gosa
 def user_sessions_available(cls, user):
     with make_session() as session:
         if user is not None:
             return session.query(UserSession).filter(UserSession.user == user).count() > 0
         else:
             return session.query(UserSession).count() > 0
コード例 #33
0
ファイル: filters.py プロジェクト: gonicus/gosa
    def process(self, obj, key, valDict, *sizes):

        # Sanity check
        if len(sizes) == 0:
            raise ElementFilterException(C.make_error("USER_IMAGE_SIZE_MISSING"))
        
        with make_session() as session:
            # Do we have an attribute to process?
            if key in valDict:
    
                if valDict[key]['value']:
    
                    # Check if a cache entry exists...
                    try:
                        entry = session.query(ImageIndex).filter(and_(ImageIndex.uuid == obj.uuid, ImageIndex.attribute == key)).one_or_none()
                    except OperationalError:
                        session.rollback()
                        Base.metadata.create_all(Environment.getInstance().getDatabaseEngine("backend-database"), tables=tables)
                        entry = None
    
                    if entry:
    
                        # Nothing to do if it's unmodified
                        if obj.modifyTimestamp == entry.modified:
                            return key, valDict
    
                    # Create new cache entry
                    else:
                        entry = ImageIndex(uuid=obj.uuid, attribute=key)
                        session.add(entry)
    
                    # Convert all images to all requested sizes
                    entry.modified = obj.modifyTimestamp
    
                    for idx in range(0, len(valDict[key]['value'])):
                        image = BytesIO(valDict[key]['value'][idx].get())
                        try:
                            im = Image.open(image) #@UndefinedVariable
                        except IOError:
                            continue
    
                        # Check for target directory
                        wd = os.path.join(self.__path, obj.uuid, key, str(idx))
                        if os.path.exists(wd) and not os.path.isdir(wd):
                            raise ElementFilterException(C.make_error("USER_IMAGE_CACHE_BROKEN"))
                        if not os.path.exists(wd):
                            os.makedirs(wd)
    
                        for size in sizes:
                            wds = os.path.join(wd, size + ".jpg")
                            s = int(size)
                            tmp = ImageOps.fit(im, (s, s), Image.ANTIALIAS) #@UndefinedVariable
                            tmp.save(wds, "JPEG")
    
                            # Save size reference if not there yet
                            try:
                                se = session.query(ImageSize.size).filter(and_(ImageSize.uuid == obj.uuid, ImageSize.size == s)).one_or_none()
                            except OperationalError:
                                Base.metadata.create_all(Environment.getInstance().getDatabaseEngine("backend-database"), tables=tables)
                                se = None
                            if not se:
                                se = ImageSize(uuid=obj.uuid, size=s, path=wds)
                                session.add(se)
    
                    # Flush
                    session.commit()
    
                elif 'last_value' in valDict[key] and valDict[key]['last_value']:
    
                    # Delete from db index
                    try:
                        entry = session.query(ImageIndex).filter(and_(ImageIndex.uuid == obj.uuid, ImageIndex.attribute == key)).one_or_none()
                        if entry is not None:
                            session.delete(entry)
                    except OperationalError:
                        pass
    
                    # delete from file system
                    for idx in range(0, len(valDict[key]['last_value'])):
    
                        # Check for target directory
                        wd = os.path.join(self.__path, obj.uuid, key, str(idx))
                        if os.path.exists(wd) and os.path.isdir(wd):
                            # delete
                            shutil.rmtree(wd)
    
                    # Flush
                    session.commit()

        return key, valDict
コード例 #34
0
ファイル: main.py プロジェクト: GOsa3/gosa
 def getForemanLsbNames(self):
     res = {}
     with make_session() as session:
         for r in session.query(Cache.data).filter(Cache.key.ilike("foreman.operating_system.%")).all():
             res[r[0]["release_name"]] = {"value": r[0]["release_name"]}
     return res
コード例 #35
0
def check_trigger():
    with make_session() as session:
        return session.execute(
            "select * from pg_trigger WHERE tgname LIKE 'so_index%'"
        ).rowcount == 0
コード例 #36
0
ファイル: jsonrpc_service.py プロジェクト: GOsa3/gosa
 def check_session(cls, sid, user):
     with make_session() as session:
         return session.query(UserSession).filter(UserSession.sid == sid, UserSession.user == user).count() > 0
コード例 #37
0
ファイル: hsts_request_handler.py プロジェクト: GOsa3/gosa
 def _execute(self, transforms, *args, **kwargs):
     with make_session(skip_context_check=True) as session:
         with StackContext(SessionContext(session)):
             super(HSTSRequestHandler, self)._execute(transforms, *args, **kwargs)
コード例 #38
0
ファイル: jsonrpc_objects.py プロジェクト: gonicus/gosa
    def openObject(self, user, session_id, oid, *args, **kwargs):
        """
        Open object on the agent side. This creates an instance on the
        stack and returns an a JSON description of the object and it's
        values.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        oid               OID of the object to create
        args/kwargs       Arguments to be used when getting an object instance
        ================= ==========================

        ``Return``: JSON encoded object description
        """

        # In case of "object" we want to check the lock
        if oid == 'object':
            lck = self.__get_lock(args[0])
            if lck and (not session_id or lck['user'] != user or lck['session_id'] != session_id):
                raise Exception(C.make_error("OBJECT_LOCKED", object=args[0],
                                             user=lck['user'],
                                             when=lck['created'].strftime("%Y-%m-%d (%H:%M:%S)")))

        # Use oid to find the object type
        obj_type = self.__get_object_type(oid)
        methods, properties = self.__inspect(obj_type)

        # Load instance, fill with dummy stuff
        db_object = None
        if "db_object" in kwargs:
            db_object = kwargs["db_object"]
            ref = db_object.ref
            del kwargs["db_object"]
        else:
            ref = str(uuid.uuid1())

        # Make object instance and store it
        kwargs['user'] = user
        kwargs['session_id'] = session_id
        obj = obj_type(*args, **kwargs)

        # Merge in methods that may be available later due to extending more addons
        methods += obj.get_all_method_names()

        # Add dynamic information - if available
        if hasattr(obj, 'get_attributes'):
            properties = properties + obj.get_attributes()
        if hasattr(obj, 'get_methods'):
            methods = methods + obj.get_methods()

        objdsc = {
            'oid': oid,
            'dn': obj.dn if hasattr(obj, 'dn') else None,
            'uuid': obj.uuid if hasattr(obj, 'uuid') else None,
            'object': obj,
            'methods': list(set(methods)),
            'properties': properties
        }

        self.__stack[ref] = {
            'user': user,
            'session_id': session_id,
            'object': objdsc,
            'created': datetime.datetime.now() if db_object is None else db_object.created
        }

        # Build property dict
        propvals = {}
        if properties:
            propvals = dict([(p, getattr(obj, p)) for p in properties])

        propvals['uuid'] = obj.uuid

        # Build result
        result = {"__jsonclass__": ["json.JSONObjectFactory", [obj_type.__name__, ref, obj.dn, oid, methods, properties]]}
        result.update(propvals)

        if db_object is None:
            # store in DB
            with make_session() as session:
                oo = OpenObject(
                    ref=ref,
                    uuid=obj.uuid,
                    oid=oid,
                    user=user,
                    session_id=session_id,
                    backend_uuid=self.env.core_uuid,
                    created=self.__stack[ref]["created"],
                    last_interaction=self.__stack[ref]["created"]
                )
                session.add(oo)
                session.commit()
        elif db_object.data is not None:
            # apply changes to opened object
            for prop, value in db_object.data.items():
                self.setObjectProperty(user, ref, prop, value, skip_db_update=True)


        return result
コード例 #39
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def openObject(self, user, session_id, oid, *args, **kwargs):
        """
        Open object on the agent side. This creates an instance on the
        stack and returns an a JSON description of the object and it's
        values.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        oid               OID of the object to create
        args/kwargs       Arguments to be used when getting an object instance
        ================= ==========================

        ``Return``: JSON encoded object description
        """

        # In case of "object" we want to check the lock
        if oid == 'object':
            lck = self.__get_lock(args[0])
            if lck and (not session_id or lck['user'] != user
                        or lck['session_id'] != session_id):
                raise Exception(
                    C.make_error(
                        "OBJECT_LOCKED",
                        object=args[0],
                        user=lck['user'],
                        when=lck['created'].strftime("%Y-%m-%d (%H:%M:%S)")))

        # Use oid to find the object type
        obj_type = self.__get_object_type(oid)
        methods, properties = self.__inspect(obj_type)

        # Load instance, fill with dummy stuff
        db_object = None
        if "db_object" in kwargs:
            db_object = kwargs["db_object"]
            ref = db_object.ref
            del kwargs["db_object"]
        else:
            ref = str(uuid.uuid1())

        # Make object instance and store it
        kwargs['user'] = user
        kwargs['session_id'] = session_id
        obj = obj_type(*args, **kwargs)

        # Merge in methods that may be available later due to extending more addons
        methods += obj.get_all_method_names()

        # Add dynamic information - if available
        if hasattr(obj, 'get_attributes'):
            properties = properties + obj.get_attributes()
        if hasattr(obj, 'get_methods'):
            methods = methods + obj.get_methods()

        objdsc = {
            'oid': oid,
            'dn': obj.dn if hasattr(obj, 'dn') else None,
            'uuid': obj.uuid if hasattr(obj, 'uuid') else None,
            'object': obj,
            'methods': list(set(methods)),
            'properties': properties
        }

        self.__stack[ref] = {
            'user':
            user,
            'session_id':
            session_id,
            'object':
            objdsc,
            'created':
            datetime.datetime.now() if db_object is None else db_object.created
        }

        # Build property dict
        propvals = {}
        if properties:
            propvals = dict([(p, getattr(obj, p)) for p in properties])

        propvals['uuid'] = obj.uuid

        # Build result
        result = {
            "__jsonclass__": [
                "json.JSONObjectFactory",
                [obj_type.__name__, ref, obj.dn, oid, methods, properties]
            ]
        }
        result.update(propvals)

        if db_object is None:
            # store in DB
            with make_session() as session:
                oo = OpenObject(ref=ref,
                                uuid=obj.uuid,
                                oid=oid,
                                user=user,
                                session_id=session_id,
                                backend_uuid=self.env.core_uuid,
                                created=self.__stack[ref]["created"],
                                last_interaction=self.__stack[ref]["created"])
                session.add(oo)
                session.commit()
        elif db_object.data is not None:
            # apply changes to opened object
            for prop, value in db_object.data.items():
                self.setObjectProperty(user,
                                       ref,
                                       prop,
                                       value,
                                       skip_db_update=True)

        return result
コード例 #40
0
ファイル: filters.py プロジェクト: GOsa3/gosa
    def process(self, obj, key, valDict, *sizes):

        # Sanity check
        if len(sizes) == 0:
            raise ElementFilterException(
                C.make_error("USER_IMAGE_SIZE_MISSING"))

        with make_session() as session:
            # Do we have an attribute to process?
            if key in valDict:

                if valDict[key]['value']:

                    # Check if a cache entry exists...
                    try:
                        entry = session.query(ImageIndex).filter(
                            and_(ImageIndex.uuid == obj.uuid,
                                 ImageIndex.attribute == key)).one_or_none()
                    except OperationalError:
                        session.rollback()
                        Base.metadata.create_all(
                            Environment.getInstance().getDatabaseEngine(
                                "backend-database"))
                        entry = None

                    if entry:

                        # Nothing to do if it's unmodified
                        if obj.modifyTimestamp == entry.modified:
                            return key, valDict

                    # Create new cache entry
                    else:
                        entry = ImageIndex(uuid=obj.uuid, attribute=key)
                        session.add(entry)

                    # Convert all images to all requested sizes
                    entry.modified = obj.modifyTimestamp

                    for idx in range(0, len(valDict[key]['value'])):
                        image = BytesIO(valDict[key]['value'][idx].get())
                        try:
                            im = Image.open(image)  #@UndefinedVariable
                        except IOError:
                            continue

                        # Check for target directory
                        wd = os.path.join(self.__path, obj.uuid, key, str(idx))
                        if os.path.exists(wd) and not os.path.isdir(wd):
                            raise ElementFilterException(
                                C.make_error("USER_IMAGE_CACHE_BROKEN"))
                        if not os.path.exists(wd):
                            os.makedirs(wd)

                        for size in sizes:
                            wds = os.path.join(wd, size + ".jpg")
                            s = int(size)
                            tmp = ImageOps.fit(
                                im, (s, s),
                                Image.ANTIALIAS)  #@UndefinedVariable
                            tmp.save(wds, "JPEG")

                            # Save size reference if not there yet
                            try:
                                se = session.query(ImageSize.size).filter(
                                    and_(ImageSize.uuid == obj.uuid,
                                         ImageSize.size == s)).one_or_none()
                            except OperationalError:
                                Base.metadata.create_all(
                                    Environment.getInstance(
                                    ).getDatabaseEngine("backend-database"))
                                se = None
                            if not se:
                                se = ImageSize(uuid=obj.uuid, size=s, path=wds)
                                session.add(se)

                    # Flush
                    session.commit()

                elif 'last_value' in valDict[key] and valDict[key][
                        'last_value']:

                    # Delete from db index
                    try:
                        entry = session.query(ImageIndex).filter(
                            and_(ImageIndex.uuid == obj.uuid,
                                 ImageIndex.attribute == key)).one_or_none()
                        if entry is not None:
                            session.delete(entry)
                    except OperationalError:
                        pass

                    # delete from file system
                    for idx in range(0, len(valDict[key]['last_value'])):

                        # Check for target directory
                        wd = os.path.join(self.__path, obj.uuid, key, str(idx))
                        if os.path.exists(wd) and os.path.isdir(wd):
                            # delete
                            shutil.rmtree(wd)

                    # Flush
                    session.commit()

        return key, valDict
コード例 #41
0
ファイル: jsonrpc_objects.py プロジェクト: GOsa3/gosa
    def __gc(self):
        self.env.log.debug("running garbage collector on object store")
        ten_minutes_ago = datetime.datetime.now() - datetime.timedelta(
            minutes=self.__inactivity_timeout)
        e = EventMaker()
        command = PluginRegistry.getInstance("CommandRegistry")
        sched = PluginRegistry.getInstance("SchedulerService").getScheduler()

        for ref, item in list(self.__stack.items()):
            uuid = item['object']['uuid']
            if uuid is None:
                # new items without uuid do not need to be closed by timeout
                continue
            last_interaction_time = item[
                'last_interaction'] if 'last_interaction' in item else item[
                    'created']
            if last_interaction_time < ten_minutes_ago:
                if 'mark_for_deletion' in item:
                    if item['mark_for_deletion'] <= datetime.datetime.now():
                        if 'countdown_job' in item:
                            try:
                                sched.unschedule_job(item['countdown_job'])
                            except KeyError:
                                pass
                            finally:
                                del item['countdown_job']

                        del self.__stack[ref]
                        with make_session() as session:
                            session.query(OpenObject).filter(
                                OpenObject.ref == ref).delete()

                        event = e.Event(
                            e.ObjectCloseAnnouncement(
                                e.Target(item['user']),
                                e.SessionId(item['session_id']),
                                e.State("closed"), e.UUID(uuid)))
                        command.sendEvent(item['user'], event)
                else:
                    # notify user to do something otherwise the lock gets removed in 1 minute
                    event = e.Event(
                        e.ObjectCloseAnnouncement(
                            e.Target(item['user']),
                            e.SessionId(item['session_id']),
                            e.State("closing"), e.UUID(uuid), e.Minutes("1")))
                    command.sendEvent(item['user'], event)
                    item['mark_for_deletion'] = datetime.datetime.now(
                    ) + datetime.timedelta(seconds=59)
                    if 'countdown_job' in item:
                        try:
                            sched.unschedule_job(item['countdown_job'])
                        except KeyError:
                            pass
                        finally:
                            del item['countdown_job']

                    item['countdown_job'] = sched.add_date_job(
                        self.__gc,
                        datetime.datetime.now() +
                        datetime.timedelta(minutes=1),
                        tag="_internal",
                        jobstore="ram")

            elif 'mark_for_deletion' in item:
                # item has been modified -> remove the deletion mark
                del item['mark_for_deletion']
                event = e.Event(
                    e.ObjectCloseAnnouncement(e.Target(item['user']),
                                              e.SessionId(item['session_id']),
                                              e.State("closing_aborted"),
                                              e.UUID(uuid)))
                command.sendEvent(item['user'], event)
                if 'countdown_job' in item:
                    try:
                        sched.unschedule_job(item['countdown_job'])
                    except KeyError:
                        pass
                    finally:
                        del item['countdown_job']
コード例 #42
0
ファイル: test_methods.py プロジェクト: gonicus/gosa
def check_trigger():
    with make_session() as session:
        return session.execute("select * from pg_trigger WHERE tgname LIKE 'so_index%'").rowcount == 0
コード例 #43
0
ファイル: test_back_object.py プロジェクト: gonicus/gosa
 def setUp(self):
     self.back = ObjectHandler()
     with make_session() as session:
         res = session.query(ObjectInfoIndex.uuid).filter(ObjectInfoIndex.dn == "cn=Frank Reich,ou=people,dc=example,dc=net").one()
         self.user_uuid = res[0]
コード例 #44
0
ファイル: jsonrpc_service.py プロジェクト: GOsa3/gosa
    def process(self, data):
        """
        Process an incoming JSONRPC request and dispatch it thru the
        *CommandRegistry*.

        ================= ==========================
        Parameter         Description
        ================= ==========================
        data              Incoming body data
        ================= ==========================

        ``Return``: varries
        """
        try:
            json = loads(data)
        except ValueError as e:
            raise ValueError(C.make_error("INVALID_JSON", data=str(e)))

        try:
            method = json['method']
            params = json['params']
            jid = json['id']
        except KeyError as e:
            raise ValueError(C.make_error("JSON_MISSING_PARAMETER"))

        if method.startswith('_'):
            raise tornado.web.HTTPError(403, "Bad method name %s: must not start with _" % method)
        if not isinstance(params, list) and not isinstance(params, dict):
            raise ValueError(C.make_error("PARAMETER_LIST_OR_DICT"))

        # execute command if it is allowed without login
        if method in no_login_commands:
            return self.dispatch(method, params, jid)

        # Check if we're globally locked currently
        if GlobalLock.exists("scan_index"):
            raise FilterException(C.make_error('INDEXING', "base"))

        cls = self.__class__

        twofa_manager = PluginRegistry.getInstance("TwoFactorAuthManager")

        # Create an authentication cookie on login
        if method == 'login':
            (user, password) = params

            # Check password and create session id on success
            sid = str(uuid.uuid1())
            result = {
                'state': AUTH_FAILED
            }

            lock_result = self.get_lock_result(user)
            if lock_result is not None:
                return dict(result=lock_result, error=None, id=jid)

            dn = self.authenticate(user, password)
            if dn is not False:
                # user and password matches so delete the user from observer list
                if user in cls.__dos_manager:
                    del cls.__dos_manager[user]

                with make_session() as session:
                    us = UserSession(
                        sid=sid,
                        user=user,
                        dn=dn
                    )

                    self.set_secure_cookie('REMOTE_USER', user)
                    self.set_secure_cookie('REMOTE_SESSION', sid)
                    factor_method = twofa_manager.get_method_from_user(dn)
                    if factor_method is None:
                        result['state'] = AUTH_SUCCESS
                        self.log.info("login succeeded for user '%s'" % user)
                    elif factor_method == "otp":
                        result['state'] = AUTH_OTP_REQUIRED
                        self.log.info("login succeeded for user '%s', proceeding with OTP two-factor authentication" % user)
                    elif factor_method == "u2f":
                        self.log.info("login succeeded for user '%s', proceeding with U2F two-factor authentication" % user)
                        result['state'] = AUTH_U2F_REQUIRED
                        result['u2f_data'] = twofa_manager.sign(user, dn)

                    us.auth_state = result['state']
                    session.add(us)
            else:
                # Remove current sid if present
                with make_session() as session:
                    db_session = session.query(UserSession).filter(UserSession.sid == sid).one_or_none()
                    if not self.get_secure_cookie('REMOTE_SESSION') and db_session is not None:
                        session.delete(db_session)

                self.log.error("login failed for user '%s'" % user)
                result['state'] = AUTH_FAILED

                # log login tries
                if user in cls.__dos_manager:
                    login_stats = cls.__dos_manager[user]
                    # stop counting after 6 tries to avoid "infinity" lock, the user is locked for more than an hour
                    if login_stats['count'] < 6:
                        login_stats['count'] += 1
                    login_stats['timestamp'] = time.time()
                    cls.__dos_manager[user] = login_stats
                else:
                    cls.__dos_manager[user] = {
                        'count': 1,
                        'timestamp': time.time(),
                        'ip': self.request.remote_ip
                    }
                lock_result = self.get_lock_result(user)
                if lock_result is not None:
                    return dict(result=lock_result, error=None, id=jid)

            return dict(result=result, error=None, id=jid)

        # Don't let calls pass beyond this point if we've no valid session ID
        with make_session() as session:
            cookie = self.get_secure_cookie('REMOTE_SESSION')
            if cookie is None:
                self.log.error("blocked unauthenticated call of method '%s'" % method)
                raise tornado.web.HTTPError(401, "Please use the login method to authorize yourself.")

            db_session = session.query(UserSession).filter(UserSession.sid == cookie.decode('ascii')).one_or_none()
            if db_session is None:
                self.log.error("blocked unauthenticated call of method '%s'" % method)
                raise tornado.web.HTTPError(401, "Please use the login method to authorize yourself.")

            # Remove remote session on logout
            if method == 'logout':

                # Remove current sid if present
                if self.get_secure_cookie('REMOTE_SESSION'):
                    if db_session is not None:
                        session.delete(db_session)

                # Show logout message
                if self.get_secure_cookie('REMOTE_USER'):
                    self.log.info("logout for user '%s' succeeded" % self.get_secure_cookie('REMOTE_USER'))

                self.clear_cookie("REMOTE_USER")
                self.clear_cookie("REMOTE_SESSION")
                return dict(result=True, error=None, id=jid)

            # check two-factor authentication
            if method == 'verify':
                (key,) = params

                if db_session.auth_state == AUTH_OTP_REQUIRED or db_session.auth_state == AUTH_U2F_REQUIRED:

                    if twofa_manager.verify(db_session.user, db_session.dn, key):
                        db_session.auth_state = AUTH_SUCCESS
                        return dict(result={'state': AUTH_SUCCESS}, error=None, id=jid)
                    else:
                        return dict(result={'state': AUTH_FAILED}, error=None, id=jid)

            if db_session.auth_state != AUTH_SUCCESS:
                raise tornado.web.HTTPError(401, "Please use the login method to authorize yourself.")

        return self.dispatch(method, params, jid)