def test_update_type(self, m_get, m_del, m_put, m_post): self._create_test_data() self.foreman.client = ForemanClient() m_get.return_value = MockResponse({"name": "testhost"}, 200) self.foreman.add_host("testhost", base=self._test_dn) device = ObjectProxy("cn=testhost,ou=devices,%s" % self._test_dn) dn = device.dn assert device.cn == "testhost" assert device.ip is None data = { "name": "testhost", "ip": "192.168.0.3", "global_status": 0, "build_status": 1 } m_get.return_value = MockResponse(data, 200) logging.getLogger("gosa.backend.objects").info( "------------------------START UPDATING TYPE---------------------------------------" ) self.foreman.update_type("ForemanHost", device, data) # opening the object calls the mocked m_get method device = ObjectProxy(dn) assert device.ipHostNumber == "192.168.0.3" assert device.status == "pending"
def getContainerTree(self, user, base, object_type=None): table = inspect(ObjectInfoIndex) o2 = aliased(ObjectInfoIndex) base = ObjectProxy.get_adjusted_dn(base, self.env.base) query = and_(getattr(ObjectInfoIndex, "_adjusted_parent_dn") == base, getattr(ObjectInfoIndex, "_type").in_(self.containers)) count = func.count(getattr(o2, "_parent_dn")) parent_join_condition = getattr(o2, "_parent_dn") == getattr(ObjectInfoIndex, "dn") with make_session() as session: query_result = session.query(ObjectInfoIndex, count) \ .outerjoin(o2, and_(getattr(o2, "_invisible").is_(False), parent_join_condition)) \ .filter(query) \ .group_by(*table.c) res = {} factory = ObjectFactory.getInstance() for item, children in query_result: self.update_res(res, item, user, 1) if item.dn in res: res[item.dn]['hasChildren'] = children > 0 res[item.dn]['adjusted_dn'] = ObjectProxy.get_adjusted_dn(item.dn, self.env.base) if object_type is not None: # check if object_type is allowed in this container allowed = factory.getAllowedSubElementsForObject(res[item.dn]['tag'], includeInvisible=False) if "*" in object_type: # all allowed res[item.dn]['allowed_move_target'] = True elif isinstance(object_type, list): res[item.dn]['allowed_move_target'] = len(set(object_type).intersection(allowed)) > 0 else: res[item.dn]['allowed_move_target'] = object_type in allowed return res
def diffObject(self, user, ref): """ Opens a copy of the object given as ref and returns a diff - if any. """ if not ref in self.__stack: return None if not self.__check_user(ref, user): raise ValueError(C.make_error("NOT_OBJECT_OWNER")) # Load current object item = self.__stack[ref] current_obj = ObjectProxy(item['object']['dn']) # Load cache object cache_obj = item['object']['object'] ## ## Generate delta ## delta = {'attributes': {'added': {}, 'removed': [], 'changed': {}, 'blocked_by': {}}, 'extensions': {'added': [], 'removed': []}} # Compare extension list crnt_extensions = set(current_obj.get_object_info()['extensions'].items()) cche_extensions = set(cache_obj.get_object_info()['extensions'].items()) for _e, _s in crnt_extensions - cche_extensions: if _s: delta['extensions']['added'].append(_e) else: delta['extensions']['removed'].append(_e) # Compare attribute contents crnt_attributes = dict(filter(lambda x: x[1] is not None, current_obj.get_attribute_values()['value'].items())) cche_attributes = dict(filter(lambda x: x[1] is not None, cache_obj.get_attribute_values()['value'].items())) all_attributes = [] for _k, _v in crnt_attributes.items(): if _k in cche_attributes: if _v != cche_attributes[_k]: delta['attributes']['changed'][_k] = _v all_attributes.append(_k) else: delta['attributes']['added'][_k] = _v all_attributes.append(_k) for _k, _v in cche_attributes.items(): # Don't add the individual attributes of extensions that are removed anyway if current_obj.get_extension_off_attribute(_k) in delta['extensions']['removed']: continue if not _k in crnt_attributes: delta['attributes']['removed'].append(_k) all_attributes.append(_k) # Find blocking dependencies between attributes details = current_obj.get_attributes(detail=True) for attribute_name in all_attributes: delta['attributes']['blocked_by'][attribute_name] = list(map(lambda x: x['name'], details[attribute_name]['blocked_by'])) return delta
def tearDown(self): super(ACLResolverTestCase, self).tearDown() for dn in self.__remove_objects: try: obj = ObjectProxy(dn) obj.remove() except ProxyException: pass
def test_hostgroup_request(self, m_get, m_delete): m_get.return_value = MockResponse( '{\ "name": "Testgroup", \ "id": 999\ }', 200) self._host_dn = "cn=Testgroup,ou=groups,dc=example,dc=net" payload_data = { "event": "after_create", "object": "Testgroup", "data": { "hostgroup": { "hostgroup": { "id": 999, "name": "Testgroup" } } } } headers, payload = self._create_request(payload_data) AsyncHTTPTestCase.fetch(self, "/hooks/", method="POST", headers=headers, body=payload) # check if the host has been updated device = ObjectProxy(self._host_dn) assert device.cn == "Testgroup" assert device.foremanGroupId == "999" # delete the host payload_data = { "event": "after_destroy", "object": "Testgroup", "data": { "hostgroup": { "hostgroup": { "id": 999, "name": "Testgroup" } } } } headers, payload = self._create_request(payload_data) AsyncHTTPTestCase.fetch(self, "/hooks/", method="POST", headers=headers, body=payload) with pytest.raises(ProxyException): ObjectProxy(self._host_dn) self._host_dn = None
def sync_type(self, object_type, foreman_type=None): """ sync foreman objects, request data from foreman API and apply those values to the object """ index = PluginRegistry.getInstance("ObjectIndex") backend_attributes = self.factory.getObjectBackendProperties(object_type) if "Foreman" not in backend_attributes: self.log.warning("no foreman backend attributes found for '%s' object" % object_type) return if foreman_type is None: foreman_type = backend_attributes["Foreman"]["type"] new_data = self.client.get(foreman_type) found_ids = [] ForemanBackend.modifier = "foreman" uuid_attribute = backend_attributes["Foreman"]["_uuidSourceAttribute"] \ if '_uuidSourceAttribute' in backend_attributes["Foreman"] else backend_attributes["Foreman"]["_uuidAttribute"] for data in new_data["results"]: found_ids.append(str(data[uuid_attribute])) self.log.debug(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") self.log.debug(">>> START syncing foreman object of type '%s' with id '%s'" % (object_type, data[uuid_attribute])) foreman_object = self.get_object(object_type, data[uuid_attribute], data=data) if foreman_type == "discovered_hosts": # add status to data if not foreman_object.is_extended_by("ForemanHost"): foreman_object.extend("ForemanHost") foreman_object.status = "discovered" self.update_type(object_type, foreman_object, data, uuid_attribute) else: self.update_type(object_type, foreman_object, data, uuid_attribute) self.log.debug("<<< DONE syncing foreman object of type '%s' with id '%s'" % (object_type, data[uuid_attribute])) self.log.debug("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<") types = self.factory.getObjectTypes()[object_type] base_type = object_type if types["base"] is True else types["extends"][0] # delete not existing ones query = {'_type': base_type} if base_type != object_type: query["extension"] = object_type if len(found_ids): query[backend_attributes["Foreman"]["_uuidAttribute"]] = {'not_in_': found_ids} if foreman_type == "discovered_hosts": query["status"] = "discovered" res = index.search(query, {'dn': 1}) for entry in res: foreman_object = ObjectProxy(entry['dn']) self.log.debug("removing %s '%s'" % (base_type, foreman_object.dn)) foreman_object.remove() ForemanBackend.modifier = None
def test_request(self, m_get): m_get.return_value = MockResponse( '{\ "status": 0,\ "status_label": "Build"\ }', 200) token = bytes(self.token, 'ascii') payload = bytes( dumps({ "action": "create", "hostname": "new-foreman-host", "parameters": {} }), 'utf-8') signature_hash = hmac.new(token, msg=payload, digestmod="sha512") signature = 'sha1=' + signature_hash.hexdigest() headers = { 'Content-Type': 'application/vnd.foreman.hostevent+json', 'HTTP_X_HUB_SENDER': 'test-webhook', 'HTTP_X_HUB_SIGNATURE': signature } response = AsyncHTTPTestCase.fetch(self, "/hooks/", method="POST", headers=headers, body=payload) otp_response = loads(response.body.decode("utf-8")) assert "randompassword" in otp_response assert otp_response["randompassword"] is not None # check if the host has been created device = ObjectProxy( "cn=new-foreman-host,ou=incoming,dc=example,dc=net") assert device.cn == "new-foreman-host" # delete the host payload = bytes( dumps({ "action": "delete", "hostname": "new-foreman-host", "parameters": {} }), 'utf-8') signature_hash = hmac.new(token, msg=payload, digestmod="sha512") signature = 'sha1=' + signature_hash.hexdigest() headers['HTTP_X_HUB_SIGNATURE'] = signature AsyncHTTPTestCase.fetch(self, "/hooks/", method="POST", headers=headers, body=payload) with pytest.raises(ProxyException): ObjectProxy("cn=new-foreman-host,ou=incoming,dc=example,dc=net")
def saveUserPreferences(self, userid, name, value): index = PluginRegistry.getInstance("ObjectIndex") res = index.search({'_type': 'User', 'uid': userid}, {'dn': 1}) if len(res) == 0: raise GOsaException(C.make_error("UNKNOWN_USER", target=userid)) user = ObjectProxy(res[0]['dn']) if user.guiPreferences is None: user.guiPreferences = {} user.guiPreferences[name] = value user.commit() return True
def getDestinationIndicator(self, client_id, uid, cn_query, rotate=True): """ :param client_id: UUID of the client used to find the closest destinationIndicators :param uid: uid of the user :param cn_query: filter for destinationIndicator-cns (e.g. 'lts-% for wildcards) :param rotate: rotate the destinationIndicators (do not use the last one twice in a row) :return: FQDN of the server marked as destinationIndicator """ index = PluginRegistry.getInstance('ObjectIndex') res = index.search({'_type': 'User', 'uid': uid}, {'dn': 1}) if len(res) == 0: raise ValueError(C.make_error("USER_NOT_FOUND", user=uid, status_code=404)) user = ObjectProxy(res[0]['dn']) if rotate is False and user.destinationIndicator is not None: # nothing to rotate, take the stored one return user.destinationIndicator client = self.__open_device(client_id, read_only=True) parent_dn = client.get_adjusted_parent_dn() res = index.search({'_type': 'Device', 'extension': 'GoServer', 'cn': cn_query, '_adjusted_parent_dn': parent_dn}, {'dn': 1}) while len(res) == 0 and len(parent_dn) > len(self.env.base): parent_dn = dn2str(str2dn(parent_dn, flags=ldap.DN_FORMAT_LDAPV3)[1:]) res = index.search({'_type': 'Device', 'cn': cn_query, '_adjusted_parent_dn': parent_dn}, {'dn': 1}) if len(res) > 0: di_pool = sorted([x['dn'] for x in res]) if user.destinationIndicator is None: # nothing to rotate, take the first one user.destinationIndicator = di_pool[0] user.commit() elif rotate is True: if user.destinationIndicator in di_pool: # take the next one from list position = di_pool.index(user.destinationIndicator)+1 if position >= len(di_pool): position = 0 user.destinationIndicator = di_pool[position] user.commit() else: # nothing to rotate, take the first one user.destinationIndicator = di_pool[0] user.commit() return user.destinationIndicator return None
def get_application(self, application): result = None if 'name' in application and 'dn' in application: result = {'name': application.get('name')} if 'gosaApplicationParameter' in application: result['gosaApplicationParameter'] = application.get('gosaApplicationParameter') application = ObjectProxy(application.get('dn')) if application is not None: for attribute in self.entry_attributes: if hasattr(application, attribute): attribute_name = self.entry_map.get(attribute, attribute) result[attribute_name] = getattr(application, attribute) return result
def getTwoFactorMethod(self, user_name, object_dn): # Do we have read permissions for the requested attribute self.__check_acl(user_name, object_dn, "r") user = ObjectProxy(object_dn) return self.get_method_from_user(user)
def verify(self, user_name, object_dn, key): # Do we have read permissions for the requested attribute self.__check_acl(user_name, object_dn, "r") # Get the object for the given dn user = ObjectProxy(object_dn) factor_method = self.get_method_from_user(user) user_settings = self.__settings[ user.uuid] if user.uuid in self.__settings else {} if factor_method == "otp": totp = TOTP(user_settings.get('otp_secret')) return totp.verify(key) elif factor_method == "u2f": devices = [ DeviceRegistration.wrap(device) for device in user_settings.get('_u2f_devices_', []) ] challenge = user_settings.pop('_u2f_challenge_') data = loads(key) c, t = verify_authenticate(devices, challenge, data, [self.facet]) return {'touch': t, 'counter': c} elif factor_method is None: return True return False
def __refresh_reference_object(self, new_dn, override=False): attributes_changed = [] if not self.__reference_object: if new_dn is None: return # initial setting self.__reference_object = ObjectProxy(new_dn) elif self.__reference_object.dn == new_dn: # no change return elif new_dn is None: # reset object for key in self.__attribute: if hasattr(self.__reference_object, key) and \ getattr(self.__reference_object, key) is not None and \ getattr(self, key) == getattr(self.__reference_object, key) and \ self.__attribute_map[key]['mandatory'] is False: setattr(self, key, None) attributes_changed.append(key) self.__reference_object = None return else: self.__reference_object = ObjectProxy(new_dn) # update all attribute values that are not set yet if self.__reference_object is not None: for key in self.__attribute: if hasattr(self.__reference_object, key) and \ getattr(self.__reference_object, key) is not None and \ (getattr(self, key) is None or override is True): setattr(self, key, getattr(self.__reference_object, key)) attributes_changed.append(key) if len(attributes_changed) > 0: # tell the GUI to reload the changes attributes e = EventMaker() ev = e.Event( e.ObjectChanged( e.UUID(self.uuid), e.DN(new_dn), e.ModificationTime( datetime.datetime.now().strftime("%Y%m%d%H%M%SZ")), e.ChangeType("update"))) event_object = objectify.fromstring( etree.tostring(ev, pretty_print=True).decode('utf-8')) SseHandler.notify(event_object, channel="user.%s" % self.__user)
def getUserPPDs(self, user): index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"_type": "User", "uid": user}, {"dn": 1}) if len(res) == 0: raise EntryNotFound(C.make_error("USER_NOT_FOUND", topic=user)) object = ObjectProxy(res[0]["dn"]) printer_cns = [] if object.is_extended_by("GotoEnvironment"): printer_cns.append(object.gotoPrinters) if object.is_extended_by("PosixUser"): for group_cn in object.groupMembership: group = ObjectProxy(group_cn) if group.is_extended_by("GotoEnvironment"): printer_cns.append(group.gotoPrinters) # collect all PPDs res = index.search({ "_type": "GotoPrinter", "cn": { "in_": printer_cns } }, {"gotoPrinterPPD": 1}) ppds = [] for r in res: ppds.append(r["gotoPrinterPPD"]) return ppds
def systemSetStatus(self, device_uuid, status): """ TODO """ # Write to backend index = PluginRegistry.getInstance("ObjectIndex") res = index.search({'_type': 'Device', 'deviceUUID': device_uuid}, {'_uuid': 1}) if len(res) != 1: raise ValueError(C.make_error("CLIENT_NOT_FOUND", device_uuid)) device = ObjectProxy(res[0]['_uuid']) r = re.compile(r"([+-].)") for stat in r.findall(status): if stat[1] not in mapping: raise ValueError(C.make_error("CLIENT_STATUS_INVALID", device_uuid, status=stat[1])) setattr(device, mapping[stat[1]], stat.startswith("+")) device.commit()
def get_application(self, application): result = None if 'name' in application and 'dn' in application: result = {'name': application.get('name')} if 'gosaApplicationParameter' in application: result['gosaApplicationParameter'] = application.get( 'gosaApplicationParameter') application = ObjectProxy(application.get('dn')) if application is not None: for attribute in self.entry_attributes: if hasattr(application, attribute): attribute_name = self.entry_map.get( attribute, attribute) result[attribute_name] = getattr( application, attribute) return result
def __maintain_user_session(self, client_id, user_name): # save login time and system<->user references client = self.__open_device(client_id) client.gotoLastUser = user_name self.systemSetStatus(client, "+B") index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"_type": "User", "uid": user_name}, {"dn": 1}) for u in res: user = ObjectProxy(u["dn"]) if not user.is_extended_by("GosaAccount"): user.extend("GosaAccount") user.gotoLastSystemLogin = datetime.datetime.now() user.gotoLastSystem = client.dn user.commit()
def saveUserPreferences(self, userid, name, value): index = PluginRegistry.getInstance("ObjectIndex") res = index.search({'_type': 'User', 'uid': userid}, {'dn': 1}) if len(res) == 0: raise GOsaException(C.make_error("UNKNOWN_USER", target=userid)) user = ObjectProxy(res[0]['dn']) prefs = user.guiPreferences if not prefs: prefs = {} else: prefs = loads(prefs) prefs[name] = value user.guiPreferences = dumps(prefs) user.commit() return True
def __init__(self, _id, what=None, user=None, session_id=None): schema = etree.XMLSchema( file=resource_filename("gosa.backend", "data/workflow.xsd")) parser = objectify.makeparser(schema=schema) self.env = Environment.getInstance() self.parent = self self.uuid = _id self.dn = self.env.base self.__xml_parsing = XmlParsing('Workflows') self.__validator = Validator(self) self.__attribute_config = {} self.__user = user self.__session_id = session_id self.__log = logging.getLogger(__name__) self.__attribute_type = {} for entry in pkg_resources.iter_entry_points("gosa.object.type"): mod = entry.load() self.__attribute_type[mod.__alias__] = mod() self._path = self.env.config.get("core.workflow_path", "/var/lib/gosa/workflows") self._xml_root = objectify.parse( os.path.join(self._path, _id, "workflow.xml"), parser).getroot() self.__attribute = {key: None for key in self.get_attributes()} self.__method_map = { "commit": None, "get_templates": None, "get_translations": None, } self.__fill_method_map() if what is not None: # load object and copy attribute values to workflow try: self.__reference_object = ObjectProxy(what) self.__skip_refresh = True # set the reference dn if possible if 'reference_dn' in self.__attribute: setattr(self, 'reference_dn', what) # copy all other available attribute values to workflow object for key in self.__attribute: if hasattr(self.__reference_object, key) and getattr( self.__reference_object, key) is not None: setattr(self, key, getattr(self.__reference_object, key)) self.__skip_refresh = False except Exception as e: # could not open the reference object self.__log.error(e)
def create_container(self): # create incoming ou if not exists index = PluginRegistry.getInstance("ObjectIndex") res = index.search({'_parent_dn': self.type_bases["ForemanHost"], '_type': 'IncomingDeviceContainer'}, {'dn': 1}) if len(res) == 0: ou = ObjectProxy(self.type_bases["ForemanHost"], "IncomingDeviceContainer") ou.commit() res = index.search({'_parent_dn': self.type_bases["ForemanHostGroup"], '_type': 'GroupContainer'}, {'dn': 1}) if len(res) == 0: ou = ObjectProxy(self.type_bases["ForemanHostGroup"], "GroupContainer") ou.commit()
def test_add_host(self, m_get, m_del, m_put, m_post): self._create_test_data() m_get.return_value = MockResponse( { "status": 0, "status_label": "Build" }, 200) logging.getLogger("gosa.backend.objects.index").setLevel(logging.DEBUG) key = self.foreman.add_host("testhost", base=self._test_dn) device = ObjectProxy("cn=testhost,ou=devices,%s" % self._test_dn) assert device.dn == "cn=testhost,ou=devices,%s" % self._test_dn assert device.cn == "testhost" assert device.userPassword[0][0:6] == "{SSHA}" m_del.return_value = MockResponse('{}', 200) device.remove()
def test_load_from_object_database(self): # prepare some AclRoles role = ObjectProxy('ou=aclroles,dc=example,dc=net', 'AclRole') role.name = "tester" role.AclRoles = [] aclentry = { "priority": 0, "rolename": "tester" } role.AclRoles.append(aclentry) role.commit() self.__remove_objects.append('name=tester,ou=aclroles,dc=example,dc=net') with mock.patch("gosa.backend.acl.PluginRegistry.getInstance") as m_index: m_index.return_value.search.side_effect = [[ {'dn': 'name=tester,ou=aclroles,dc=example,dc=net'} ], [] # no ACLSets ] self.resolver.load_from_object_database()
def test_remove_type(self, m_get, m_del, m_put, m_post): self._create_test_data() m_get.return_value = MockResponse( { "status": 0, "status_label": "Build" }, 200) self.foreman.add_host("testhost", base=self._test_dn) device = ObjectProxy("cn=testhost,ou=devices,%s" % self._test_dn) dn = device.dn m_del.return_value = MockResponse('{}', 200) self.foreman.remove_type("ForemanHost", device.cn) with pytest.raises(ProxyException): ObjectProxy(dn)
def __open_device(self, device_uuid): device_uuid = self.get_client_uuid(device_uuid) index = PluginRegistry.getInstance("ObjectIndex") res = index.search({ '_type': 'Device', 'deviceUUID': device_uuid }, {'dn': 1}) if len(res) != 1: raise ValueError( C.make_error("CLIENT_NOT_FOUND", device_uuid, status_code=404)) return ObjectProxy(res[0]['dn'])
def getUserPPDs(self, user): index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"_type": "User", "uid": user}, {"dn": 1}) if len(res) == 0: raise EntryNotFound(C.make_error("USER_NOT_FOUND", topic=user)) object = ObjectProxy(res[0]["dn"]) printer_cns = [] if object.is_extended_by("GotoEnvironment"): printer_cns.append(object.gotoPrinters) if object.is_extended_by("PosixUser"): for group_cn in object.groupMembership: group = ObjectProxy(group_cn) if group.is_extended_by("GotoEnvironment"): printer_cns.append(group.gotoPrinters) # collect all PPDs res = index.search({"_type": "GotoPrinter", "cn": {"in_": printer_cns}}, {"gotoPrinterPPD": 1}) ppds = [] for r in res: ppds.append(r["gotoPrinterPPD"]) return ppds
def process(res): if len(res) == 0: self.log.warning("users defaultPrinter not found: %s" % user.gosaDefaultPrinter) return None elif len(res) == 1: # add this one to the result set printer = ObjectProxy(res[0]["dn"]) p_conf = {} for attr in self.printer_attributes: p_conf[attr] = getattr(printer, attr) return p_conf return False
def tearDown(self): backend = ObjectBackendRegistry.getBackend("Foreman") backend.client.foreman_host = None # logging.getLogger("gosa.backend.plugins.foreman").setLevel(logging.INFO) # logging.getLogger("gosa.backend.objects").setLevel(logging.INFO) # remove them all with mock.patch( "gosa.backend.objects.backend.back_foreman.requests.delete" ) as m_del: m_del.return_value = MockResponse({}, 200) for dn in self.dns_to_delete: try: self.log.info("deleting dn: %s" % dn) obj = ObjectProxy(dn) obj.remove() except Exception as e: self.log.error("%s" % e) pass super(ForemanSyncTestCase, self).tearDown()
def _get_object(self, dn): try: obj = ObjectProxy(dn) except ProxyException as e: self.log.warning("not found %s: %s" % (dn, str(e))) obj = None except ObjectException as e: self.log.warning("not indexing %s: %s" % (dn, str(e))) obj = None return obj
def create_test_data(): """ Insert new data just for testing purposes """ index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"dn": "dc=test,dc=example,dc=net"}, {"dn": 1}) if len(res) > 0: new_domain = ObjectProxy("dc=test,dc=example,dc=net") new_domain.remove(True) new_domain = ObjectProxy("dc=example,dc=net", "DomainComponent") new_domain.dc = "test" new_domain.description = "Domain for testing purposes" new_domain.commit() return "dc=test,dc=example,dc=net"
def get_object(self, object_type, oid, create=True, data=None): backend_attributes = self.factory.getObjectBackendProperties(object_type) foreman_object = None if "Foreman" not in backend_attributes: self.log.warning("no foreman backend attributes found for '%s' object" % object_type) return types = self.factory.getObjectTypes()[object_type] base_type = object_type if types["base"] is True else types["extends"][0] index = PluginRegistry.getInstance("ObjectIndex") # check if the object already exists query = { '_type': base_type, backend_attributes["Foreman"]["_uuidAttribute"]: str(oid) } if types["base"] is False: query["extension"] = object_type res = index.search(query, {'dn': 1}) if len(res) == 0: if create is True: # no object found -> create one self.log.debug(">>> creating new %s" % object_type) base_dn = self.env.base if object_type == "ForemanHost": # get the IncomingDevice-Container res = index.search({"_type": "IncomingDeviceContainer", "_parent_dn": self.type_bases["ForemanHost"]}, {"dn": 1}) if len(res) > 0: base_dn = res[0]["dn"] else: base_dn = self.type_bases["ForemanHost"] elif object_type in self.type_bases: base_dn = self.type_bases[object_type] foreman_object = ObjectProxy(base_dn, base_type) uuid_extension = foreman_object.get_extension_off_attribute(backend_attributes["Foreman"]["_uuidAttribute"]) if base_type != uuid_extension and not foreman_object.is_extended_by(uuid_extension): foreman_object.extend(uuid_extension) setattr(foreman_object, backend_attributes["Foreman"]["_uuidAttribute"], str(oid)) else: # open existing object self.log.debug(">>> open existing %s with DN: %s" % (object_type, res[0]["dn"])) foreman_object = ObjectProxy(res[0]["dn"], data={object_type: {"Foreman": data}} if data is not None else None) return foreman_object
def loadUserPreferences(self, userid, name): index = PluginRegistry.getInstance("ObjectIndex") res = index.search({'_type': 'User', 'uid': userid}, {'dn': 1}) if len(res) == 0: raise GOsaException(C.make_error("UNKNOWN_USER", target=userid)) user = ObjectProxy(res[0]['dn']) if not user.guiPreferences: return None elif name in user.guiPreferences: return user.guiPreferences[name] else: return None
def systemGetStatus(self, device_uuid): """ TODO """ index = PluginRegistry.getInstance("ObjectIndex") res = index.search({'_type': 'Device', 'deviceUUID': device_uuid}, {'_uuid': 1}) if len(res) != 1: raise ValueError(C.make_error("CLIENT_NOT_FOUND", device_uuid)) device = ObjectProxy(res[0]['_uuid']) return device.deviceStatus
def post_process(self): ObjectIndex.importing = False # Some object may have queued themselves to be re-indexed, process them now. self.log.info("need to refresh index for %d objects" % (len(ObjectIndex.to_be_updated))) for uuid in ObjectIndex.to_be_updated: dn = self.__session.query(ObjectInfoIndex.dn).filter( ObjectInfoIndex.uuid == uuid).one_or_none() if dn: obj = ObjectProxy(dn[0]) self.update(obj) ObjectIndex.to_be_updated = []
def sign(self, user_name, object_dn): # Do we have read permissions for the requested attribute self.__check_acl(user_name, object_dn, "r") user = ObjectProxy(object_dn) user_settings = self.__settings[ user.uuid] if user.uuid in self.__settings else {} devices = [ DeviceRegistration.wrap(device) for device in user_settings.get('_u2f_devices_', []) ] challenge = start_authenticate(devices) user_settings['_u2f_challenge_'] = challenge.json self.__save_settings() return challenge.json
def get_method_from_user(self, user): """ Get the currently used two-factor authentication method of the given user :param user: User to check :type user: ObjectProxy :return: the two-factor method of the user :rtype: string or None """ if isinstance(user, str): user = ObjectProxy(user) if user.uuid in self.__settings: if 'otp_secret' in self.__settings[user.uuid]: return "otp" elif '_u2f_devices_' in self.__settings[user.uuid]: return "u2f" return None
def __collect_printer_settings(self, object): settings = {"printers": [], "defaultPrinter": None} if object is not None and object.is_extended_by( "GotoEnvironment") and len(object.gotoPrinters): # get default printer settings["defaultPrinter"] = object.gotoDefaultPrinter # collect printer PPDs for printer_dn in object.gotoPrinters: printer = ObjectProxy(printer_dn) p_conf = {} for attr in self.printer_attributes: p_conf[attr] = getattr(printer, attr) if getattr( printer, attr) is not None else "" if attr == "gotoPrinterPPD" and p_conf[attr] != "": p_conf[attr] = self.ppd_proxy.getPPDURL(p_conf[attr]) settings["printers"].append(p_conf) return settings
def setTwoFactorMethod(self, user_name, object_dn, factor_method, user_password=None): # Do we have write permissions for the requested attribute self.__check_acl(user_name, object_dn, "w") if factor_method == "None": factor_method = None if factor_method not in self.methods: raise UnknownTwoFAMethod( C.make_error("UNKNOWN_2FA_METHOD", method=factor_method)) # Get the object for the given dn user = ObjectProxy(object_dn) current_method = self.get_method_from_user(user) if current_method == factor_method: # nothing to change return None if current_method is not None: # we need to be verified by user password in order to change the method if user_password is None or not check_auth(user_name, user_password): raise ChangingNotAllowed( C.make_error('CHANGE_2FA_METHOD_FORBIDDEN')) if factor_method == "otp": return self.__enable_otp(user) elif factor_method == "u2f": return self.__enable_u2f(user) elif factor_method is None: # disable two factor auth del self.__settings[user.uuid] self.__save_settings() return None
def completeU2FRegistration(self, user_name, object_dn, data): # Do we have write permissions for the requested attribute self.__check_acl(user_name, object_dn, "w") user = ObjectProxy(object_dn) user_settings = self.__settings[user.uuid] data = loads(data) binding, cert = complete_register(user_settings.pop('_u2f_enroll_'), data, [self.facet]) devices = [ DeviceRegistration.wrap(device) for device in user_settings.get('_u2f_devices_', []) ] devices.append(binding) user_settings['_u2f_devices_'] = [d.json for d in devices] self.__save_settings() self.__log.info("U2F device enrolled. Username: %s", user_name) self.__log.debug("Attestation certificate:\n%s", cert.public_bytes(Encoding.PEM)) return True
def update(self, uuid, data, back_attrs): """ Write back changes collected for foreign objects relations. E.g. If group memberships where modified from the user plugin we will forward the changes to the group objects. """ # Extract usable information out og the backend attributes mapping = self.extractBackAttrs(back_attrs) index = PluginRegistry.getInstance("ObjectIndex") # Ensure that we have a configuration for all attributes for attr in data.keys(): if attr not in mapping: raise BackendError( C.make_error("BACKEND_ATTRIBUTE_CONFIG_MISSING", attribute=attr)) # Walk through each mapped foreign-object-attribute for targetAttr in mapping: if not targetAttr in data: continue # Get the matching attribute for the current object foreignObject, foreignAttr, foreignMatchAttr, matchAttr = mapping[ targetAttr] res = index.search({'uuid': uuid, matchAttr: "%"}, {matchAttr: 1}) if len(res) == 0: raise BackendError( C.make_error("SOURCE_OBJECT_NOT_FOUND", object=targetAttr)) matchValue = res[0][matchAttr][0] # Collect all objects that match the given value allvalues = data[targetAttr]['orig'] + data[targetAttr]['value'] object_mapping = {} for value in allvalues: res = index.search({ '_type': foreignObject, foreignAttr: value }, {'dn': 1}) if len(res) != 1: raise EntryNotFound( C.make_error("NO_UNIQUE_ENTRY", object=foreignObject, attribute=foreignAttr, value=value)) else: object_mapping[value] = ObjectProxy(res[0]['dn']) # Calculate value that have to be removed/added remove = list( set(data[targetAttr]['orig']) - set(data[targetAttr]['value'])) add = list( set(data[targetAttr]['value']) - set(data[targetAttr]['orig'])) # Remove ourselves from the foreign object for item in remove: if object_mapping[item]: current_state = getattr(object_mapping[item], foreignMatchAttr) new_state = [x for x in current_state if x != matchValue] setattr(object_mapping[item], foreignMatchAttr, new_state) # Add ourselves to the foreign object for item in add: if object_mapping[item]: current_state = getattr(object_mapping[item], foreignMatchAttr) current_state.append(matchValue) setattr(object_mapping[item], foreignMatchAttr, current_state) # Save changes for item in object_mapping: if object_mapping[item]: object_mapping[item].commit()
def joinClient(self, user, device_uuid, mac, info=None): """ TODO """ index = PluginRegistry.getInstance("ObjectIndex") uuid_check = re.compile(r"^[0-9a-f]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$", re.IGNORECASE) if not uuid_check.match(device_uuid): raise ValueError(C.make_error("CLIENT_UUID_INVALID", device_uuid)) # Handle info, if present more_info = [] if info: # Check string entries for entry in filter(lambda x: x in info, ["serialNumber", "ou", "o", "l", "description"]): if not re.match(r"^[\w\s]+$", info[entry]): raise ValueError(C.make_error("CLIENT_DATA_INVALID", device_uuid, entry=entry, data=info[entry])) more_info.append((entry, info[entry])) # Check desired device type if set if "deviceType" in info: if re.match(r"^(terminal|workstation|server|sipphone|switch|router|printer|scanner)$", info["deviceType"]): more_info.append(("deviceType", info["deviceType"])) else: raise ValueError(C.make_error("CLIENT_TYPE_INVALID", device_uuid, type=info["deviceType"])) # Check owner for presence if "owner" in info: # Take a look at the directory to see if there's such an owner DN res = index.search({'_dn': info["owner"]}, {'_dn': 1}) if len(res) == 0: raise ValueError(C.make_error("CLIENT_OWNER_NOT_FOUND", device_uuid, owner=info["owner"])) more_info.append(("owner", info["owner"])) # Generate random client key random.seed() key = ''.join(random.Random().sample(string.ascii_letters + string.digits, 32)) salt = os.urandom(4) h = hashlib.sha1(key.encode('ascii')) h.update(salt) # Take a look at the directory to see if there's already a joined client with this uuid res = index.search({'_type': 'Device', 'macAddress': mac}, {'_uuid': 1}) if len(res): raise GOtoException(C.make_error("DEVICE_EXISTS", mac)) # While the client is going to be joined, generate a random uuid and an encoded join key cn = str(uuid4()) device_key = self.__encrypt_key(device_uuid.replace("-", ""), cn + key) # Resolve manager res = index.search({'_type': 'User', 'uid': user}, {'dn': 1}) if len(res) != 1: raise GOtoException(C.make_error("USER_NOT_UNIQUE" if res else "UNKNOWN_USER", target=user)) manager = res[0]['dn'] # Create new machine entry # dn = ",".join([self.env.config.get("goto.machine-rdn", default="ou=systems"), self.env.base]) # container = ObjectProxy(dn, "DeviceContainer") # container.commit() dn = ",".join([self.env.config.get("goto.machine-rdn", default="ou=devices,ou=systems"), self.env.base]) record = ObjectProxy(dn, "Device") record.extend("RegisteredDevice") record.extend("ieee802Device") record.extend("simpleSecurityObject") record.deviceUUID = cn record.deviceKey = Binary(device_key) record.cn = cn record.manager = manager record.status_Offline = True record.macAddress = mac.encode("ascii", "ignore") record.userPassword = "******" + encode(h.digest() + salt).decode() for key, value in more_info: setattr(record, key, value) record.commit() self.log.info("UUID '%s' joined as %s" % (device_uuid, record.dn)) return [key, cn] return None
def applyClientRights(self, device_uuid): # check rights acl = PluginRegistry.getInstance("ACLResolver") allowed_commands = [ 'joinClient', 'preUserSession', 'postUserSession', 'getMethods', 'getDestinationIndicator' ] missing = [x for x in allowed_commands if not acl.check(device_uuid, "%s.%s.%s" % (self.env.domain, "command", x), "x")] reload = False role_name = "$$ClientDevices" if len(missing) > 0: # create AclRole for joining if not exists index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"_type": "AclRole", "name": role_name}, {"dn": 1}) if len(res) == 0: # create role = ObjectProxy(self.env.base, "AclRole") role.name = role_name else: role = ObjectProxy(res[0]['dn']) # create rule aclentry = { "priority": 0, "scope": "sub", "actions": [ { "topic": "%s\.command\.(%s)" % (self.env.domain, "|".join(allowed_commands)), "acl": "x", "options": {} } ]} role.AclRoles = [aclentry] role.commit() reload = True # check if device has role found = False base = ObjectProxy(self.env.base) if base.is_extended_by("Acl"): for entry in base.AclSets: if entry["rolename"] == role_name and device_uuid in entry["members"]: found = True break else: base.extend("Acl") if found is False: acl_entry = {"priority": 0, "members": [device_uuid], "rolename": role_name} base.AclSets.append(acl_entry) if self.env.mode != "proxy": self.__acl_change_checks.append({"role": role_name, "member": device_uuid}) base.commit() reload = True if reload is True: # reload acls to make sure that they are applied in the current instance acl.load_acls()
def createAddGroupIfNotExists(self, user_dn, user_name, gid_number): if user_dn is None or user_name is None or gid_number is None: return index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"_type": "PosixGroup", "gidNumber": gid_number}, {"dn": 1}) if len(res) == 0: # create group user = ObjectProxy(user_dn) group = ObjectProxy(user.get_adjusted_parent_dn(), "PosixGroup") group.cn = user_name group.description = N_("Group of user %s" % user_name) group.autoGid = False group.gidNumber = gid_number group.memberUid = [user_name] group.commit() elif len(res) == 1: group = ObjectProxy(res[0]["dn"]) if user_name not in group.memberUid: group.memberUid.append(user_name) group.commit() else: raise GosaException(C.make_error('GROUP_ID_IS_AMBIGUOUS', gid=gid_number))
def joinClient(self, user, device_uuid, mac, info=None): """ TODO """ index = PluginRegistry.getInstance("ObjectIndex") uuid_check = re.compile(r"^[0-9a-f]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$", re.IGNORECASE) if not uuid_check.match(device_uuid): raise ValueError(C.make_error("CLIENT_UUID_INVALID", uuid=device_uuid)) # Handle info, if present more_info = [] extensions = ["simpleSecurityObject", "ieee802Device"] if info: # Check string entries for entry in filter(lambda x: x in info, ["serialNumber", "ou", "o", "l", "description"]): if not re.match(r"^[\w\s]+$", info[entry]): raise ValueError(C.make_error("CLIENT_DATA_INVALID", client=device_uuid, entry=entry, data=info[entry])) more_info.append((entry, info[entry])) if "ipHostNumber" in info: if re.match(r"^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}", info["ipHostNumber"]): more_info.append(("ipHostNumber", info["ipHostNumber"])) extensions.append("IpHost") else: raise ValueError(C.make_error("CLIENT_DATA_INVALID", client=device_uuid, entry="ipHostNumber", data=info["ipHostNumber"])) if "hostname" in info: allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE) if all(allowed.match(x) for x in info["hostname"].split(".")): more_info.append(("description", info["hostname"])) # Check desired device type if set if "deviceType" in info: if re.match(r"^(terminal|workstation|server|sipphone|switch|router|printer|scanner)$", info["deviceType"]): more_info.append(("deviceType", info["deviceType"])) else: raise ValueError(C.make_error("CLIENT_TYPE_INVALID", client=device_uuid, type=info["deviceType"])) # Check owner for presence if "owner" in info: # Take a look at the directory to see if there's such an owner DN res = index.search({'_dn': info["owner"]}, {'_dn': 1}) if len(res) == 0: raise ValueError(C.make_error("CLIENT_OWNER_NOT_FOUND", client=device_uuid, owner=info["owner"])) more_info.append(("owner", info["owner"])) # Generate random client key h, key, salt = generate_random_key() # Take a look at the directory to see if there's already a joined client with this uuid res = index.search({'_type': 'Device', 'macAddress': mac, 'extension': 'RegisteredDevice'}, {'dn': 1}) if len(res) > 0: record = ObjectProxy(res[0]['dn']) for ext in extensions: if not record.is_extended_by(ext): record.extend(ext) if record.is_extended_by("ForemanHost") and record.otp is not None: record.otp = None record.userPassword = ["{SSHA}" + encode(h.digest() + salt).decode()] for k, value in more_info: setattr(record, k, value) cn = record.deviceUUID record.status_Online = False record.status_Offline = True record.status_InstallationInProgress = False record.commit() self.log.info("UUID '%s' joined as %s" % (device_uuid, record.dn)) else: # While the client is going to be joined, generate a random uuid and an encoded join key cn = str(uuid4()) device_key = encrypt_key(device_uuid.replace("-", ""), cn + key) # Resolve manager res = index.search({'_type': 'User', 'uid': user}, {'dn': 1}) if len(res) != 1: raise GOtoException(C.make_error("USER_NOT_UNIQUE" if res else "UNKNOWN_USER", user=user)) manager = res[0]['dn'] # Create new machine entry dn = ",".join([self.env.config.get("goto.machine-rdn", default="ou=systems"), self.env.base]) record = ObjectProxy(dn, "Device") record.extend("RegisteredDevice") for ext in extensions: record.extend(ext) record.deviceUUID = cn record.deviceKey = Binary(device_key) record.cn = "mac%s" % mac.replace(":", "") record.manager = manager record.status_Offline = True record.macAddress = mac.encode("ascii", "ignore") record.userPassword = ["{SSHA}" + encode(h.digest() + salt).decode()] for k, value in more_info: setattr(record, k, value) record.commit() self.log.info("UUID '%s' joined as %s" % (device_uuid, record.dn)) # make sure the client has the access rights he needs self.applyClientRights(cn) return [key, cn]
def __collect_user_configuration(self, client_id, users): """ :param client_id: deviceUUID or hostname :param users: list of currently logged in users on the client """ if isinstance(client_id, ObjectProxy): client = client_id else: client = self.__open_device(client_id, read_only=True) group = None index = PluginRegistry.getInstance("ObjectIndex") res = index.search({"_type": "GroupOfNames", "member": client.dn}, {"dn": 1}) if len(res) > 0: group = ObjectProxy(res[0]["dn"], read_only=True) config = {} resolution = None if group is not None and group.is_extended_by("GotoEnvironment") and group.gotoXResolution is not None: resolution = group.gotoXResolution if client.is_extended_by("GotoEnvironment") and client.gotoXResolution is not None: resolution = client.gotoXResolution release = None if client.is_extended_by("GotoMenu"): release = client.getReleaseName() elif group is not None and group.is_extended_by("ForemanHostGroup"): release = group.getReleaseName() parent_group = group while release is None and parent_group is not None and parent_group.parent_id is not None: res = index.search({"_type": "GroupOfNames", "extension": "ForemanHostGroup", "foremanGroupId": parent_group.parent_id}, {"dn": 1}) if len(res) == 0: break else: parent_group = ObjectProxy(res[0]["dn"], read_only=True) release = parent_group.getReleaseName() if release is None: self.log.error("no release found for client/user combination (%s/%s)" % (client_id, users)) client_menu = None if hasattr(client, "gotoMenu") and client.gotoMenu is not None: client_menu = loads(client.gotoMenu) # collect users DNs query_result = index.search({"_type": "User", "uid": {"in_": users}}, {"dn": 1}) for entry in query_result: user = ObjectProxy(entry["dn"], read_only=True) config[user.uid] = {} if release is not None: menus = [] if client_menu is not None: menus.append(client_menu) # get all groups the user is member of which have a menu for the given release query = {'_type': 'GroupOfNames', "member": user.dn, "extension": "GotoMenu", "gotoLsbName": release} for res in index.search(query, {"gotoMenu": 1}): # collect user menus for m in res.get("gotoMenu", []): menus.append(loads(m)) if len(menus): user_menu = None for menu_entry in menus: if user_menu is None: user_menu = self.get_submenu(menu_entry) else: self.merge_submenu(user_menu, self.get_submenu(menu_entry)) config[user.uid]["menu"] = user_menu # collect printer settings for user, starting with the clients printers settings = self.__collect_printer_settings(group) printer_names = [x["cn"] for x in settings["printers"]] # get all GroupOfNames with GotoEnvironment the user or client is member of for res in index.search({'_type': 'GroupOfNames', "member": {"in_": [user.dn, client.dn]}, "extension": "GotoEnvironment"}, {"dn": 1}): user_group = ObjectProxy(res["dn"], read_only=True) if group is not None and user_group.dn == group.dn: # this group has already been handled continue s = self.__collect_printer_settings(user_group) if user_group.gotoXResolution is not None: resolution = user_group.gotoXResolution for p in s["printers"]: if p["cn"] not in printer_names: settings["printers"].append(p) printer_names.append(p["cn"]) if s["defaultPrinter"] is not None: settings["defaultPrinter"] = s["defaultPrinter"] # override group environment settings if the client has one s = self.__collect_printer_settings(client) if len(s["printers"]) > 0: settings["printers"] = s["printers"] settings["defaultPrinter"] = s["defaultPrinter"] if user.is_extended_by("GosaAccount") and user.gosaDefaultPrinter is not None: # check if the users default printer is send to the client found = False for printer_settings in settings["printers"]: if printer_settings["cn"] == user.gosaDefaultPrinter: found = True break def process(res): if len(res) == 0: self.log.warning("users defaultPrinter not found: %s" % user.gosaDefaultPrinter) return None elif len(res) == 1: # add this one to the result set printer = ObjectProxy(res[0]["dn"], read_only=True) p_conf = {} for attr in self.printer_attributes: p_conf[attr] = getattr(printer, attr) return p_conf return False if found is False: # find the printer and add it to the settings res = index.search({"_type": "GotoPrinter", "cn": user.gosaDefaultPrinter}, {"dn": 1}) printer_config = process(res) if printer_config is False: # more than 1 printers found by this CN, try to look in the users subtree res = index.search({ "_type": "GotoPrinter", "cn": user.gosaDefaultPrinter, "_adjusted_parent_dn": user.get_adjusted_parent_dn() }, {"dn": 1}) printer_config = process(res) if isinstance(printer_config, dict): settings["printers"].append(printer_config) settings["defaultPrinter"] = user.gosaDefaultPrinter else: self.log.warning("users defaultPrinter not found: %s" % user.gosaDefaultPrinter) else: settings["defaultPrinter"] = user.gosaDefaultPrinter config[user.uid]["printer-setup"] = settings config[user.uid]["resolution"] = None if resolution is not None: config[user.uid]["resolution"] = [int(x) for x in resolution.split("x")] # TODO: collect and send login scripts to client return config
def remove_test_data(dn): if dn is not None: new_domain = ObjectProxy(dn) new_domain.remove(True)
def search(self, user, base, scope, qstring, fltr=None): """ Performs a query based on a simple search string consisting of keywords. Query the database using the given query string and an optional filter dict - and return the result set. ========== ================== Parameter Description ========== ================== base Query base scope Query scope (SUB, BASE, ONE, CHILDREN) qstring Query string fltr Hash for extra parameters ========== ================== ``Return``: List of dicts """ res = {} keywords = None dn_hook = "_parent_dn" fallback = fltr and "fallback" in fltr and fltr["fallback"] if not base: return [] adjusted_base = base # Set defaults if not fltr: fltr = {} if not 'category' in fltr: fltr['category'] = "all" if not 'secondary' in fltr: fltr['secondary'] = "enabled" if not 'mod-time' in fltr: fltr['mod-time'] = "all" if 'adjusted-dn' in fltr and fltr['adjusted-dn'] is True: dn_hook = "_adjusted_parent_dn" adjusted_base = ObjectProxy.get_adjusted_dn(base, self.env.base) actions = 'actions' in fltr and fltr['actions'] is True # Sanity checks scope = scope.upper() if not scope in ["SUB", "BASE", "ONE", "CHILDREN"]: raise GOsaException(C.make_error("INVALID_SEARCH_SCOPE", scope=scope)) if not fltr['mod-time'] in ["hour", "day", "week", "month", "year", "all"]: raise GOsaException(C.make_error("INVALID_SEARCH_DATE", date=fltr['mod-time'])) # Build query: join attributes and keywords queries = [] # Build query: assemble query = None if scope == "SUB": if queries: query = and_(or_(ObjectInfoIndex._parent_dn == base, ObjectInfoIndex._parent_dn.like("%," + base)), or_(*queries)) else: query = or_(ObjectInfoIndex._parent_dn == base, ObjectInfoIndex._parent_dn.like("%," + base)) elif scope == "ONE": query = and_(or_(ObjectInfoIndex.dn == adjusted_base, getattr(ObjectInfoIndex, dn_hook) == adjusted_base), or_(*queries)) elif scope == "CHILDREN": query = and_(getattr(ObjectInfoIndex, dn_hook) == adjusted_base, or_(*queries)) else: if queries: query = and_(ObjectInfoIndex.dn == base, or_(*queries)) else: query = ObjectInfoIndex.dn == base # Build query: eventually extend with timing information td = None if fltr['mod-time'] != "all": now = datetime.datetime.now() if fltr['mod-time'] == 'hour': td = now - datetime.timedelta(hours=1) elif fltr['mod-time'] == 'day': td = now - datetime.timedelta(days=1) elif fltr['mod-time'] == 'week': td = now - datetime.timedelta(weeks=1) elif fltr['mod-time'] == 'month': td = now - datetime.timedelta(days=31) elif fltr['mod-time'] == 'year': td = now - datetime.timedelta(days=365) query = and_(ObjectInfoIndex._last_modified >= td, query) order_by = None if 'order-by' in fltr: is_desc = 'order' in fltr and fltr['order'] == 'desc' order_by = "_last_changed" if fltr['order-by'] == "last-changed": order_by = "_last_modified" order_by = desc(getattr(ObjectInfoIndex, order_by)) if is_desc else getattr(ObjectInfoIndex, order_by) # Perform primary query and get collect the results squery = [] these = dict([(x, 1) for x in self.__search_aid['used_attrs']]) these.update(dict(dn=1, _type=1, _uuid=1, _last_changed=1)) these = list(these.keys()) with make_session() as session: query_result, ranked = self.finalize_query(query, fltr, session, qstring=qstring, order_by=order_by) # try: # self.log.debug(str(query_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) # except Exception as e: # self.log.warning(str(e)) # self.log.debug(str(query_result)) # pass # limit only secondary enabled searches, because e.g. the treeitems use this search to resolve and we do not want to limit those results if fltr['secondary'] == "enabled": max_results = self.env.config.get("backend.max-results", default=1000) else: max_results = math.inf counter = 0 total = query_result.count() response = {} if total == 0 and fallback is True and PluginRegistry.getInstance("ObjectIndex").fuzzy is True: # do fuzzy search if qstring: try: keywords = [s.strip("'").strip('"') for s in shlex.split(qstring)] except ValueError: keywords = [s.strip("'").strip('"') for s in qstring.split(" ")] # Make keywords unique keywords = list(set(keywords)) # find most similar words for i, kw in enumerate(keywords): r = session.execute("SELECT word, similarity(word, '{0}') as sim FROM unique_lexeme WHERE length(word) > 2 AND similarity(word, '{0}') > {1} ORDER BY similarity(word, '{0}') DESC LIMIT 3;".format(kw, self.__fuzzy_similarity_threshold)).fetchall() if len(r) > 0 and self.__fuzzy_similarity_threshold < 0.5 <= r[0]['sim']: # we have good results to take only those and ignore the lower similarity threshold r = [x for x in r if x['sim'] >= 0.5] keywords[i] = " or ".join([x['word'] for x in r]) keywords_string = " ".join(keywords) if keywords_string != "": self.log.info("no results found for: '%s' => re-trying with: '%s'" % (qstring, keywords_string)) response['orig'] = qstring if qstring != keywords_string: response['fuzzy'] = keywords_string query_result, ranked = self.finalize_query(query, fltr, session, qstring=" ".join(keywords), order_by=order_by) total = query_result.count() response['primary_total'] = total self.log.debug("Query: %s Keywords: %s, Filter: %s => %s results" % (qstring, keywords, fltr, total)) squery_constraints = {} primary_uuids = [] for tuple in query_result.all(): if ranked is True: item = tuple[0] rank = tuple[1] # make sure that the primary rank is higher that the secondaries rank += 1 else: item = tuple rank = 1 self.update_res(res, item, user, rank, these=these, actions=actions) counter += 1 if counter >= max_results: break primary_uuids.append(item.uuid) # Collect information for secondary search? if fltr['secondary'] != "enabled": continue if item._type in self.__search_aid['resolve']: if len(self.__search_aid['resolve'][item._type]) == 0: continue kv = self.__index_props_to_key_value(item.properties) for r in self.__search_aid['resolve'][item._type]: if r['attribute'] in kv: tag = r['type'] if r['type'] else item._type # If a category was choosen and it does not fit the # desired target tag - skip that one if not (fltr['category'] == "all" or fltr['category'] == tag): continue if hasattr(ObjectInfoIndex, r['filter']): if tag == "*": squery.append(getattr(ObjectInfoIndex, r['filter']).in_(kv[r['attribute']])) else: squery.append(and_(ObjectInfoIndex._type == tag, getattr(ObjectInfoIndex, r['filter']).in_(kv[r['attribute']]))) else: if tag not in squery_constraints: squery_constraints[tag] = {} if r['filter'] not in squery_constraints[tag]: squery_constraints[tag][r['filter']] = [] squery_constraints[tag][r['filter']].extend(kv[r['attribute']]) for type, constraints in squery_constraints.items(): for key, values in constraints.items(): values = list(set(values)) if len(values) > 0: if type == "*": squery.append(KeyValueIndex.key == key, KeyValueIndex.value.in_(values)) else: squery.append(and_(ObjectInfoIndex._type == type, KeyValueIndex.key == key, KeyValueIndex.value.in_(values))) # Perform secondary query and update the result if fltr['secondary'] == "enabled" and squery: query = and_(or_(*squery), ~ObjectInfoIndex.uuid.in_(primary_uuids)) # Add "_last_changed" information to query if fltr['mod-time'] != "all": query = and_(query, ObjectInfoIndex._last_modified >= td) # Execute query and update results sec_result = session.query(ObjectInfoIndex).join(ObjectInfoIndex.properties).options(contains_eager(ObjectInfoIndex.properties)).filter(query) # try: # self.log.debug("Secondary query: %s " % str(sec_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) # except Exception as e: # self.log.warning(str(e)) # self.log.debug("Secondary query: %s " % str(sec_result)) # pass results = sec_result.all() total += len(results) if counter < max_results: for item in results: self.update_res(res, item, user, self.__make_relevance(item, keywords, fltr, True), secondary=True, these=these, actions=actions) counter += 1 if counter >= max_results: break response['total'] = total response['results'] = list(res.values()) return response
async def test_provision_host(self, m_get, m_del, m_put, m_post): """ convert a discovered host to a 'real' host """ self._test_dn = GosaTestCase.create_test_data() container = ObjectProxy(self._test_dn, "IncomingDeviceContainer") container.commit() mocked_foreman = MockForeman() m_get.side_effect = mocked_foreman.get m_del.side_effect = mocked_foreman.delete m_put.side_effect = mocked_foreman.put m_post.side_effect = mocked_foreman.post # create the discovered host + foremanHostgroup d_host = ObjectProxy(container.dn, "Device") d_host.cn = "mac00262df16a2c" d_host.extend("ForemanHost") d_host.status = "discovered" d_host.extend("ieee802Device") d_host.macAddress = "00:26:2d:f1:6a:2c" d_host.extend("IpHost") d_host.ipHostNumber = "192.168.0.1" d_host.commit() hostgroup = ObjectProxy("%s" % self._test_dn, "GroupOfNames") hostgroup.extend("ForemanHostGroup") hostgroup.cn = "Test" hostgroup.foremanGroupId = "4" hostgroup.commit() # add host to group logging.getLogger("test.foreman-integration").info("########### START: Add Host to group ############# %s" % AsyncHTTPTestCase.get_url(self, "/hooks/")) d_host = ObjectProxy("cn=mac00262df16a2c,%s" % container.dn) def check(): logging.getLogger("test.foreman-integration").info("check condition: %s, %s" % (d_host.cn, d_host.status)) return d_host.cn == "mac00262df16a2c" and d_host.status == "discovered" def check2(): logging.getLogger("test.foreman-integration").info("check2 condition: %s" % d_host.cn) return d_host.cn == "Testhost" base_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data") with open(os.path.join(base_dir, "discovered_hosts", "mac00262df16a2c.json")) as f: mocked_foreman.register_conditional_response("http://localhost:8000/api/v2/discovered_hosts/mac00262df16a2c", "get", check, f.read()) with open(os.path.join(base_dir, "conditional", "Testhost.json")) as f: mocked_foreman.register_conditional_response("http://localhost:8000/api/v2/hosts/Testhost", "get", check2, f.read()) def activate(**kwargs): return True mocked_foreman.register_trigger("http://localhost:8000/api/v2/discovered_hosts/mac00262df16a2c", "put", activate, self.execute) with make_session() as session: assert session.query(ObjectInfoIndex.dn)\ .join(ObjectInfoIndex.properties)\ .filter(and_(KeyValueIndex.key == "cn", KeyValueIndex.value == "Testhost"))\ .count() == 0 d_host.cn = "Testhost" d_host.groupMembership = hostgroup.dn d_host.commit() logging.getLogger("test.foreman-integration").info("waiting for 2 seconds") await asyncio.sleep(2) logging.getLogger("test.foreman-integration").info("########### END: Add Host to group #############") # now move the host to the final destination d_host = ObjectProxy("cn=Testhost,ou=incoming,%s" % self._test_dn) assert d_host.status != "discovered" assert d_host.name == "Testhost" assert d_host.hostgroup_id == "4" assert d_host.is_extended_by("RegisteredDevice") is True assert len(d_host.userPassword[0]) > 0 assert d_host.deviceUUID is not None with make_session() as session: assert session.query(ObjectInfoIndex.dn) \ .join(ObjectInfoIndex.properties) \ .filter(and_(KeyValueIndex.key == "cn", KeyValueIndex.value == "Testhost")) \ .count() == 1 logging.getLogger("test.foreman-integration").info("########### START: moving host #############") d_host.move("%s" % self._test_dn) logging.getLogger("test.foreman-integration").info("########### END: moving host #############") # lets check if everything is fine in the database d_host = ObjectProxy("cn=Testhost,ou=devices,%s" % self._test_dn, read_only=True) assert d_host is not None assert d_host.status == "unknown" assert d_host.groupMembership == hostgroup.dn