def build_snap_ent(entry): basefields = [] if entry.tag in ['Package', 'Service']: basefields += ['type'] desired = dict([(key, u_str(entry.get(key))) for key in basefields]) state = dict([(key, u_str(entry.get(key))) for key in basefields]) desired.update([(key, u_str(entry.get(key))) for key in \ datafields[entry.tag]]) if entry.tag == 'ConfigFile' or \ ((entry.tag == 'Path') and (entry.get('type') == 'file')): if entry.text == None: desired['contents'] = None else: if entry.get('encoding', 'ascii') == 'ascii': desired['contents'] = u_str(entry.text) else: desired['contents'] = u_str(b64decode(entry.text)) if 'current_bfile' in entry.attrib: state['contents'] = u_str(b64decode(entry.get('current_bfile'))) elif 'current_bdiff' in entry.attrib: diff = b64decode(entry.get('current_bdiff')) state['contents'] = u_str( \ '\n'.join(difflib.restore(diff.split('\n'), 1))) state.update([(key, u_str(entry.get('current_' + key, entry.get(key)))) \ for key in datafields[entry.tag]]) if entry.tag in ['ConfigFile', 'Path'] and entry.get('exists', 'true') == 'false': state = None return [desired, state]
def ssl_decrypt(data, passwd, algorithm=None): """ Decrypt openssl-encrypted data. This can decrypt data encrypted by :func:`ssl_encrypt`, or ``openssl enc``. It performs a base64 decode first if the data is base64 encoded, and automatically determines the salt and initialization vector (both of which are embedded in the encrypted data). :param data: The encrypted data (either base64-encoded or raw binary) to decrypt :type data: string :param passwd: The password to use to decrypt the data :type passwd: string :param algorithm: The cipher algorithm to use :type algorithm: string :returns: string - The decrypted data """ # base64-decode the data data = b64decode(data) salt = data[8:16] # pylint: disable=E1101,E1121 hashes = [md5(passwd + salt).digest()] for i in range(1, 3): hashes.append(md5(hashes[i - 1] + passwd + salt).digest()) # pylint: enable=E1101,E1121 key = hashes[0] + hashes[1] iv = hashes[2] return str_decrypt(data[16:], key=key, iv=iv, algorithm=algorithm)
def _import_Path(self, entry, state): name = entry.get("name") exists = entry.get("current_exists", default="true").lower() == "true" path_type = entry.get("type").lower() act_dict = dict(name=name, state=state, exists=exists, path_type=path_type) target_dict = dict( owner=entry.get("owner", default="root"), group=entry.get("group", default="root"), mode=entry.get("mode", default=entry.get("perms", default="")), ) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict["target_perms"] = fperm current_dict = dict( owner=entry.get("current_owner", default=""), group=entry.get("current_group", default=""), mode=entry.get("current_mode", default=entry.get("current_perms", default="")), ) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict["current_perms"] = fperm if path_type in ("symlink", "hardlink"): act_dict["target_path"] = entry.get("to", default="") act_dict["current_path"] = entry.get("current_to", default="") self.logger.debug("Adding link %s" % name) return LinkEntry.entry_get_or_create(act_dict) elif path_type == "device": # TODO devices self.logger.warn("device path types are not supported yet") return # TODO - vcs output act_dict["detail_type"] = PathEntry.DETAIL_UNUSED if path_type == "directory" and entry.get("prune", "false") == "true": unpruned_elist = [e.get("path") for e in entry.findall("Prune")] if unpruned_elist: act_dict["detail_type"] = PathEntry.DETAIL_PRUNED act_dict["details"] = "\n".join(unpruned_elist) elif entry.get("sensitive", "false").lower() == "true": act_dict["detail_type"] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get("current_bfile", None): act_dict["detail_type"] = PathEntry.DETAIL_BINARY cdata = entry.get("current_bfile") elif entry.get("current_bdiff", None): act_dict["detail_type"] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get("current_bdiff")) elif entry.get("current_diff", None): act_dict["detail_type"] = PathEntry.DETAIL_DIFF cdata = entry.get("current_bdiff") if cdata: if len(cdata) > Bcfg2.Options.setup.file_limit: act_dict["detail_type"] = PathEntry.DETAIL_SIZE_LIMIT act_dict["details"] = md5(cdata).hexdigest() else: act_dict["details"] = cdata self.logger.debug("Adding path %s" % name) return PathEntry.entry_get_or_create(act_dict)
def GetCurrentEntry(self, client, e_type, e_name): try: c_inst = Client.objects.filter(name=client)[0] except IndexError: self.logger.error("Unknown client: %s" % client) raise Bcfg2.Server.Plugin.PluginExecutionError result = c_inst.current_interaction.bad().filter(entry__kind=e_type, entry__name=e_name) if not result: raise Bcfg2.Server.Plugin.PluginExecutionError entry = result[0] ret = [] data = ("owner", "group", "perms") for t in data: if getattr(entry.reason, "current_%s" % t) == "": ret.append(getattr(entry.reason, t)) else: ret.append(getattr(entry.reason, "current_%s" % t)) if entry.reason.is_sensitive: raise Bcfg2.Server.Plugin.PluginExecutionError elif len(entry.reason.unpruned) != 0: ret.append("\n".join(entry.reason.unpruned)) elif entry.reason.current_diff != "": if entry.reason.is_binary: ret.append(b64decode(entry.reason.current_diff)) else: ret.append("\n".join(difflib.restore(entry.reason.current_diff.split("\n"), 1))) elif entry.reason.is_binary: # If len is zero the object was too large to store raise Bcfg2.Server.Plugin.PluginExecutionError else: ret.append(None) return ret
def do_authn(self): """ Perform authentication by calling :func:`Bcfg2.Server.Core.NetworkCore.authenticate`. This is implemented as a CherryPy tool.""" try: header = cherrypy.request.headers['Authorization'] except KeyError: self.critical_error("No authentication data presented") auth_content = header.split()[1] auth_content = b64decode(auth_content) try: username, password = auth_content.split(":") except ValueError: username = auth_content password = "" # FIXME: Get client cert cert = None address = (cherrypy.request.remote.ip, cherrypy.request.remote.port) rpcmethod = xmlrpcutil.process_body()[1] if rpcmethod == 'ERRORMETHOD': raise Exception("Unknown error processing XML-RPC request body") if (not self.check_acls(address[0], rpcmethod) or not self.authenticate(cert, username, password, address)): raise cherrypy.HTTPError(401)
def verify_file(self, filename, contents, metadata): """ Service the FAM events queued up by the key generation so the data structure entries will be available for binding. NOTE: We wait for up to ten seconds. There is some potential for race condition, because if the file monitor doesn't get notified about the new key files in time, those entries won't be available for binding. In practice, this seems "good enough".""" entry = self.entries[metadata.hostname][filename] cfg = self.core.plugins['Cfg'] tries = 0 updated = False while not updated: if tries >= 10: self.logger.error("%s still not registered" % filename) return self.core.fam.handle_events_in_interval(1) try: cfg.entries[filename].bind_entry(entry, metadata) except Bcfg2.Server.Plugin.PluginExecutionError: tries += 1 continue # get current entry data if entry.get("encoding") == "base64": entrydata = b64decode(entry.text) else: entrydata = entry.text if entrydata == contents: updated = True tries += 1
def is_encrypted(val): """ Make a best guess if the value is encrypted or not. This just checks to see if ``val`` is a base64-encoded string whose content starts with "Salted\\_\\_", so it may have (rare) false positives. It will not have false negatives. """ try: return b64decode(val).startswith("Salted__") except: # pylint: disable=W0702 return False
def write_data(self, data, metadata): """Write the probed file data to the bcfg2 specification.""" filename = data.get("name") contents = b64decode(data.text) entry = self.entries[metadata.hostname][filename] cfg = self.core.plugins['Cfg'] specific = "%s.H_%s" % (os.path.basename(filename), metadata.hostname) # we can't use os.path.join() for this because specific # already has a leading /, which confuses os.path.join() fileloc = os.path.join(cfg.data, os.path.join(filename, specific).lstrip("/")) create = False try: cfg.entries[filename].bind_entry(entry, metadata) except (KeyError, Bcfg2.Server.Plugin.PluginExecutionError): create = True # get current entry data if entry.text and entry.get("encoding") == "base64": entrydata = b64decode(entry.text) else: entrydata = entry.text if create: self.logger.info("Writing new probed file %s" % fileloc) self.write_file(fileloc, contents) self.verify_file(filename, contents, metadata) infoxml = os.path.join(cfg.data, filename.lstrip("/"), "info.xml") self.write_infoxml(infoxml, entry, data) elif entrydata == contents: self.debug_log("Existing %s contents match probed contents" % filename) return elif (entry.get('update', 'false').lower() == "true"): self.logger.info("Writing updated probed file %s" % fileloc) self.write_file(fileloc, contents) self.verify_file(filename, contents, metadata) else: self.logger.info("Skipping updated probed file %s" % fileloc) return
def GetCurrentEntry(self, client, e_type, e_name): curr = self.FindCurrent(client) entry = curr.xpath('.//Bad/%s[@name="%s"]' % (e_type, e_name)) if not entry: raise Bcfg2.Server.Plugin.PluginExecutionError cfentry = entry[-1] owner = cfentry.get("current_owner", cfentry.get("owner")) group = cfentry.get("current_group", cfentry.get("group")) perms = cfentry.get("current_perms", cfentry.get("perms")) if cfentry.get("sensitive") in ["true", "True"]: raise Bcfg2.Server.Plugin.PluginExecutionError elif "current_bfile" in cfentry.attrib: contents = b64decode(cfentry.get("current_bfile")) elif "current_bdiff" in cfentry.attrib: diff = b64decode(cfentry.get("current_bdiff")) contents = "\n".join(difflib.restore(diff.split("\n"), 1)) else: contents = None return (owner, group, perms, contents)
def GetCurrentEntry(self, client, e_type, e_name): curr = self.FindCurrent(client) entry = curr.xpath('.//Bad/%s[@name="%s"]' % (e_type, e_name)) if not entry: raise Bcfg2.Server.Plugin.PluginExecutionError cfentry = entry[-1] owner = cfentry.get('current_owner', cfentry.get('owner')) group = cfentry.get('current_group', cfentry.get('group')) perms = cfentry.get('current_perms', cfentry.get('perms')) if cfentry.get('sensitive') in ['true', 'True']: raise Bcfg2.Server.Plugin.PluginExecutionError elif 'current_bfile' in cfentry.attrib: contents = b64decode(cfentry.get('current_bfile')) elif 'current_bdiff' in cfentry.attrib: diff = b64decode(cfentry.get('current_bdiff')) contents = '\n'.join(difflib.restore(diff.split('\n'), 1)) else: contents = None return (owner, group, perms, contents)
def _is_encrypted(self, data): """ Pretty crappy check for whether or not data is encrypted: just see if it's a valid base64-encoded string whose contents start with "Salted__". But without decrypting, which rather begs the question in a set of crypto unit tests, I'm not sure how to do a better test.""" try: return b64decode(data).startswith("Salted__") except UnicodeDecodeError: # decoded base64, resulting value contained non-ASCII text return True except TypeError: # couldn't decode base64 return False
def build_reason_kwargs(r_ent, encoding, logger): binary_file = False sensitive_file = False unpruned_entries = '' if r_ent.get('sensitive') in ['true', 'True']: sensitive_file = True rc_diff = '' elif r_ent.get('current_bfile', False): binary_file = True rc_diff = r_ent.get('current_bfile') if len(rc_diff) > 1024 * 1024: rc_diff = '' elif len(rc_diff) == 0: # No point in flagging binary if we have no data binary_file = False elif r_ent.get('current_bdiff', False): rc_diff = b64decode(r_ent.get('current_bdiff')) elif r_ent.get('current_diff', False): rc_diff = r_ent.get('current_diff') else: rc_diff = '' # detect unmanaged entries in pruned directories if r_ent.get('prune', 'false') == 'true' and r_ent.get('qtest'): unpruned_elist = [e.get('path') for e in r_ent.findall('Prune')] unpruned_entries = "\n".join(unpruned_elist) if not binary_file: try: rc_diff = rc_diff.decode(encoding) except: logger.error("Reason isn't %s encoded, cannot decode it" % encoding) rc_diff = '' return dict(owner=r_ent.get('owner', default=""), current_owner=r_ent.get('current_owner', default=""), group=r_ent.get('group', default=""), current_group=r_ent.get('current_group', default=""), perms=r_ent.get('perms', default=""), current_perms=r_ent.get('current_perms', default=""), status=r_ent.get('status', default=""), current_status=r_ent.get('current_status', default=""), to=r_ent.get('to', default=""), current_to=r_ent.get('current_to', default=""), version=r_ent.get('version', default=""), current_version=r_ent.get('current_version', default=""), current_exists=r_ent.get('current_exists', default="True").capitalize() == "True", current_diff=rc_diff, is_binary=binary_file, is_sensitive=sensitive_file, unpruned=unpruned_entries)
def do_authn(self): try: header = cherrypy.request.headers['Authorization'] except KeyError: self.critical_error("No authentication data presented") auth_type, auth_content = header.split() auth_content = b64decode(auth_content) try: username, password = auth_content.split(":") except ValueError: username = auth_content password = "" # FIXME: Get client cert cert = None address = (cherrypy.request.remote.ip, cherrypy.request.remote.name) return self.authenticate(cert, username, password, address)
def _get_data(self, entry): """ Get a tuple of (<file data>, <is binary>) for the given entry """ is_binary = entry.get('encoding', 'ascii') == 'base64' if entry.get('empty', 'false') == 'true' or not entry.text: tempdata = '' elif is_binary: tempdata = b64decode(entry.text) else: tempdata = entry.text if isinstance(tempdata, unicode) and unicode != str: try: tempdata = tempdata.encode(Bcfg2.Options.setup.encoding) except UnicodeEncodeError: err = sys.exc_info()[1] self.logger.error("POSIX: Error encoding file %s: %s" % (entry.get('name'), err)) return (tempdata, is_binary)
def _get_data(self, entry): """ Get a tuple of (<file data>, <is binary>) for the given entry """ is_binary = entry.get('encoding', 'ascii') == 'base64' if entry.get('empty', 'false') == 'true' or not entry.text: tempdata = '' elif is_binary: tempdata = b64decode(entry.text) else: tempdata = entry.text if isinstance(tempdata, unicode) and unicode != str: try: tempdata = tempdata.encode(self.setup['encoding']) except UnicodeEncodeError: err = sys.exc_info()[1] self.logger.error("POSIX: Error encoding file %s: %s" % (entry.get('name'), err)) return (tempdata, is_binary)
def GetCurrentEntry(self, client, e_type, e_name): """"GetCurrentEntry: Used by PullSource""" try: c_inst = Client.objects.get(name=client) except ObjectDoesNotExist: self.logger.error("Unknown client: %s" % client) raise PluginExecutionError except MultipleObjectsReturned: self.logger.error("%s Inconsistency: Multiple entries for %s." % (self.__class__.__name__, client)) raise PluginExecutionError try: cls = BaseEntry.entry_from_name(e_type + "Entry") result = cls.objects.filter(name=e_name, state=TYPE_BAD, interaction=c_inst.current_interaction) except ValueError: self.logger.error("Unhandled type %s" % e_type) raise PluginExecutionError if not result: raise PluginExecutionError entry = result[0] ret = [] for p_entry in ('owner', 'group', 'mode'): this_entry = getattr(entry.current_perms, p_entry) if this_entry == '': ret.append(getattr(entry.target_perms, p_entry)) else: ret.append(this_entry) if entry.entry_type == 'Path': if entry.is_sensitive(): raise PluginExecutionError elif entry.detail_type == PathEntry.DETAIL_PRUNED: ret.append('\n'.join(entry.details)) elif entry.is_binary(): ret.append(b64decode(entry.details)) elif entry.is_diff(): ret.append('\n'.join(difflib.restore(\ entry.details.split('\n'), 1))) elif entry.is_too_large(): # If len is zero the object was too large to store raise PluginExecutionError else: ret.append(None) return ret
def _get_data(self, entry): is_binary = False if entry.get('encoding', 'ascii') == 'base64': tempdata = b64decode(entry.text) is_binary = True elif entry.get('empty', 'false') == 'true': tempdata = '' else: tempdata = entry.text if isinstance(tempdata, unicode) and unicode != str: try: tempdata = tempdata.encode(self.setup['encoding']) except UnicodeEncodeError: err = sys.exc_info()[1] self.logger.error("POSIX: Error encoding file %s: %s" % (entry.get('name'), err)) return (tempdata, is_binary)
def do_authn(self): """ perform authentication """ try: header = cherrypy.request.headers['Authorization'] except KeyError: self.critical_error("No authentication data presented") auth_content = header.split()[1] auth_content = b64decode(auth_content) try: username, password = auth_content.split(":") except ValueError: username = auth_content password = "" # FIXME: Get client cert cert = None address = (cherrypy.request.remote.ip, cherrypy.request.remote.name) return self.authenticate(cert, username, password, address)
def _get_data(self, entry): """ Get a tuple of (<file data>, <is binary>) for the given entry """ is_binary = False if entry.get("encoding", "ascii") == "base64": tempdata = b64decode(entry.text) is_binary = True elif entry.get("empty", "false") == "true": tempdata = "" else: tempdata = entry.text if isinstance(tempdata, unicode) and unicode != str: try: tempdata = tempdata.encode(self.setup["encoding"]) except UnicodeEncodeError: err = sys.exc_info()[1] self.logger.error("POSIX: Error encoding file %s: %s" % (entry.get("name"), err)) return (tempdata, is_binary)
def do_authn(self): """ Perform authentication by calling :func:`Bcfg2.Server.Core.BaseCore.authenticate`. This is implemented as a CherryPy tool.""" try: header = cherrypy.request.headers['Authorization'] except KeyError: self.critical_error("No authentication data presented") auth_content = header.split()[1] auth_content = b64decode(auth_content) try: username, password = auth_content.split(":") except ValueError: username = auth_content password = "" # FIXME: Get client cert cert = None address = (cherrypy.request.remote.ip, cherrypy.request.remote.name) return self.authenticate(cert, username, password, address)
def authenticate(self): try: header = self.headers['Authorization'] except KeyError: self.logger.error("No authentication data presented") return False auth_content = b64decode(header.split()[1]) try: # py3k compatibility try: username, password = auth_content.split(":") except TypeError: username, pw = auth_content.split(bytes(":", encoding='utf-8')) password = pw.decode('utf-8') except ValueError: username = auth_content password = "" cert = self.request.getpeercert() client_address = self.request.getpeername() return self.server.instance.authenticate(cert, username, password, client_address)
def _import_Path(self, entry, state): name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" path_type = entry.get("type").lower() act_dict = dict(name=name, state=state, exists=exists, path_type=path_type) target_dict = dict( owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), mode=entry.get('mode', default=entry.get('perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict['target_perms'] = fperm current_dict = dict( owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), mode=entry.get('current_mode', default=entry.get('current_perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) return LinkEntry.entry_get_or_create(act_dict) elif path_type == 'device': # TODO devices self.logger.warn("device path types are not supported yet") return # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get('prune', 'false') == 'true': unpruned_elist = [e.get('path') for e in entry.findall('Prune')] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > self.size_limit: act_dict['detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) return PathEntry.entry_get_or_create(act_dict)
def _import_interaction(self, interaction): """Real import function""" hostname = interaction['hostname'] stats = etree.fromstring(interaction['stats']) metadata = interaction['metadata'] server = metadata['server'] client = cache.get(hostname) if not client: client, created = Client.objects.get_or_create(name=hostname) if created: self.logger.debug("Client %s added to the db" % hostname) cache.set(hostname, client) timestamp = datetime(*strptime(stats.get('time'))[0:6]) if len(Interaction.objects.filter(client=client, timestamp=timestamp)) > 0: self.logger.warn("Interaction for %s at %s already exists" % (hostname, timestamp)) return if 'profile' in metadata: profile, created = Group.objects.get_or_create(name=metadata['profile']) else: profile = None inter = Interaction(client=client, timestamp=timestamp, state=stats.get('state', default="unknown"), repo_rev_code=stats.get('revision', default="unknown"), good_count=stats.get('good', default="0"), total_count=stats.get('total', default="0"), server=server, profile=profile) inter.save() self.logger.debug("Interaction for %s at %s with INSERTED in to db" % (client.id, timestamp)) #FIXME - this should be more efficient for group_name in metadata['groups']: group = cache.get("GROUP_" + group_name) if not group: group, created = Group.objects.get_or_create(name=group_name) if created: self.logger.debug("Added group %s" % group) cache.set("GROUP_" + group_name, group) inter.groups.add(group) for bundle_name in metadata['bundles']: bundle = cache.get("BUNDLE_" + bundle_name) if not bundle: bundle, created = Bundle.objects.get_or_create(name=bundle_name) if created: self.logger.debug("Added bundle %s" % bundle) cache.set("BUNDLE_" + bundle_name, bundle) inter.bundles.add(bundle) inter.save() counter_fields = {TYPE_BAD: 0, TYPE_MODIFIED: 0, TYPE_EXTRA: 0} pattern = [('Bad/*', TYPE_BAD), ('Extra/*', TYPE_EXTRA), ('Modified/*', TYPE_MODIFIED)] updates = dict(failures=[], paths=[], packages=[], actions=[], services=[]) for (xpath, state) in pattern: for entry in stats.findall(xpath): counter_fields[state] = counter_fields[state] + 1 entry_type = entry.tag name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" # handle server failures differently failure = entry.get('failure', '') if failure: act_dict = dict(name=name, entry_type=entry_type, message=failure) newact = FailureEntry.entry_get_or_create(act_dict) updates['failures'].append(newact) continue act_dict = dict(name=name, state=state, exists=exists) if entry_type == 'Action': act_dict['status'] = entry.get('status', default="check") act_dict['output'] = entry.get('rc', default=-1) self.logger.debug("Adding action %s" % name) updates['actions'].append(ActionEntry.entry_get_or_create(act_dict)) elif entry_type == 'Package': act_dict['target_version'] = entry.get('version', default='') act_dict['current_version'] = entry.get('current_version', default='') # extra entries are a bit different. They can have Instance objects if not act_dict['target_version']: for instance in entry.findall("Instance"): #TODO - this probably only works for rpms release = instance.get('release', '') arch = instance.get('arch', '') act_dict['current_version'] = instance.get('version') if release: act_dict['current_version'] += "-" + release if arch: act_dict['current_version'] += "." + arch self.logger.debug("Adding package %s %s" % (name, act_dict['current_version'])) updates['packages'].append(PackageEntry.entry_get_or_create(act_dict)) else: self.logger.debug("Adding package %s %s" % (name, act_dict['target_version'])) # not implemented yet act_dict['verification_details'] = entry.get('verification_details', '') updates['packages'].append(PackageEntry.entry_get_or_create(act_dict)) elif entry_type == 'Path': path_type = entry.get("type").lower() act_dict['path_type'] = path_type target_dict = dict( owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), mode=entry.get('mode', default=entry.get('perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict['target_perms'] = fperm current_dict = dict( owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), mode=entry.get('current_mode', default=entry.get('current_perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) updates['paths'].append(LinkEntry.entry_get_or_create(act_dict)) continue elif path_type == 'device': #TODO devices self.logger.warn("device path types are not supported yet") continue # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get('prune', 'false') == 'true': unpruned_elist = [e.get('path') for e in entry.findall('Prune')] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > self.size_limit: act_dict['detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) updates['paths'].append(PathEntry.entry_get_or_create(act_dict)) #TODO - secontext #TODO - acls elif entry_type == 'Service': act_dict['target_status'] = entry.get('status', default='') act_dict['current_status'] = entry.get('current_status', default='') self.logger.debug("Adding service %s" % name) updates['services'].append(ServiceEntry.entry_get_or_create(act_dict)) elif entry_type == 'SELinux': self.logger.info("SELinux not implemented yet") else: self.logger.error("Unknown type %s not handled by reporting yet" % entry_type) inter.bad_count = counter_fields[TYPE_BAD] inter.modified_count = counter_fields[TYPE_MODIFIED] inter.extra_count = counter_fields[TYPE_EXTRA] inter.save() for entry_type in updates.keys(): getattr(inter, entry_type).add(*updates[entry_type]) # performance metrics for times in stats.findall('OpStamps'): for metric, value in list(times.items()): Performance(interaction=inter, metric=metric, value=value).save()
def _import_interaction(self, interaction): """Real import function""" hostname = interaction['hostname'] stats = etree.fromstring(interaction['stats']) metadata = interaction['metadata'] server = metadata['server'] client = cache.get(hostname) if not client: client, created = Client.objects.get_or_create(name=hostname) if created: self.logger.debug("Client %s added to the db" % hostname) cache.set(hostname, client) timestamp = datetime(*strptime(stats.get('time'))[0:6]) if len(Interaction.objects.filter(client=client, timestamp=timestamp)) > 0: self.logger.warn("Interaction for %s at %s already exists" % (hostname, timestamp)) return profile, created = Group.objects.get_or_create( name=metadata['profile']) inter = Interaction(client=client, timestamp=timestamp, state=stats.get('state', default="unknown"), repo_rev_code=stats.get('revision', default="unknown"), good_count=stats.get('good', default="0"), total_count=stats.get('total', default="0"), server=server, profile=profile) inter.save() self.logger.debug("Interaction for %s at %s with INSERTED in to db" % (client.id, timestamp)) #FIXME - this should be more efficient for group_name in metadata['groups']: group = cache.get("GROUP_" + group_name) if not group: group, created = Group.objects.get_or_create(name=group_name) if created: self.logger.debug("Added group %s" % group) cache.set("GROUP_" + group_name, group) inter.groups.add(group) for bundle_name in metadata['bundles']: bundle = cache.get("BUNDLE_" + bundle_name) if not bundle: bundle, created = Bundle.objects.get_or_create( name=bundle_name) if created: self.logger.debug("Added bundle %s" % bundle) cache.set("BUNDLE_" + bundle_name, bundle) inter.bundles.add(bundle) inter.save() counter_fields = {TYPE_BAD: 0, TYPE_MODIFIED: 0, TYPE_EXTRA: 0} pattern = [('Bad/*', TYPE_BAD), ('Extra/*', TYPE_EXTRA), ('Modified/*', TYPE_MODIFIED)] updates = dict(failures=[], paths=[], packages=[], actions=[], services=[]) for (xpath, state) in pattern: for entry in stats.findall(xpath): counter_fields[state] = counter_fields[state] + 1 entry_type = entry.tag name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" # handle server failures differently failure = entry.get('failure', '') if failure: act_dict = dict(name=name, entry_type=entry_type, message=failure) newact = FailureEntry.entry_get_or_create(act_dict) updates['failures'].append(newact) continue act_dict = dict(name=name, state=state, exists=exists) if entry_type == 'Action': act_dict['status'] = entry.get('status', default="check") act_dict['output'] = entry.get('rc', default=-1) self.logger.debug("Adding action %s" % name) updates['actions'].append( ActionEntry.entry_get_or_create(act_dict)) elif entry_type == 'Package': act_dict['target_version'] = entry.get('version', default='') act_dict['current_version'] = entry.get('current_version', default='') # extra entries are a bit different. They can have Instance objects if not act_dict['target_version']: for instance in entry.findall("Instance"): #TODO - this probably only works for rpms release = instance.get('release', '') arch = instance.get('arch', '') act_dict['current_version'] = instance.get( 'version') if release: act_dict['current_version'] += "-" + release if arch: act_dict['current_version'] += "." + arch self.logger.debug( "Adding package %s %s" % (name, act_dict['current_version'])) updates['packages'].append( PackageEntry.entry_get_or_create(act_dict)) else: self.logger.debug("Adding package %s %s" % (name, act_dict['target_version'])) # not implemented yet act_dict['verification_details'] = entry.get( 'verification_details', '') updates['packages'].append( PackageEntry.entry_get_or_create(act_dict)) elif entry_type == 'Path': path_type = entry.get("type").lower() act_dict['path_type'] = path_type target_dict = dict( owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), perms=entry.get('perms', default=""), ) fperm, created = FilePerms.objects.get_or_create( **target_dict) act_dict['target_perms'] = fperm current_dict = dict( owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), perms=entry.get('current_perms', default=""), ) fperm, created = FilePerms.objects.get_or_create( **current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) updates['paths'].append( LinkEntry.entry_get_or_create(act_dict)) continue elif path_type == 'device': #TODO devices self.logger.warn( "device path types are not supported yet") continue # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get( 'prune', 'false') == 'true': unpruned_elist = [ e.get('path') for e in entry.findall('Prune') ] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > self.size_limit: act_dict[ 'detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) updates['paths'].append( PathEntry.entry_get_or_create(act_dict)) #TODO - secontext #TODO - acls elif entry_type == 'Service': act_dict['target_status'] = entry.get('status', default='') act_dict['current_status'] = entry.get('current_status', default='') self.logger.debug("Adding service %s" % name) updates['services'].append( ServiceEntry.entry_get_or_create(act_dict)) elif entry_type == 'SELinux': self.logger.info("SELinux not implemented yet") else: self.logger.error( "Unknown type %s not handled by reporting yet" % entry_type) inter.bad_count = counter_fields[TYPE_BAD] inter.modified_count = counter_fields[TYPE_MODIFIED] inter.extra_count = counter_fields[TYPE_EXTRA] inter.save() for entry_type in updates.keys(): getattr(inter, entry_type).add(*updates[entry_type]) # performance metrics for times in stats.findall('OpStamps'): for metric, value in list(times.items()): Performance(interaction=inter, metric=metric, value=value).save()
def _import_Path(self, entry, state): name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" path_type = entry.get("type").lower() act_dict = dict(name=name, state=state, exists=exists, path_type=path_type) target_dict = dict(owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), mode=entry.get('mode', default=entry.get('perms', default=""))) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict['target_perms'] = fperm current_dict = dict(owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), mode=entry.get('current_mode', default=entry.get('current_perms', default=""))) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) return LinkEntry.entry_get_or_create(act_dict) elif path_type == 'device': # TODO devices self.logger.warn("device path types are not supported yet") return # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get('prune', 'false') == 'true': unpruned_elist = [e.get('name') for e in entry.findall('Prune')] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > Bcfg2.Options.setup.file_limit: act_dict['detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) return PathEntry.entry_get_or_create(act_dict)