def ssl_decrypt(data, passwd, algorithm=None): """ Decrypt openssl-encrypted data. This can decrypt data encrypted by :func:`ssl_encrypt`, or ``openssl enc``. It performs a base64 decode first if the data is base64 encoded, and automatically determines the salt and initialization vector (both of which are embedded in the encrypted data). :param data: The encrypted data (either base64-encoded or raw binary) to decrypt :type data: string :param passwd: The password to use to decrypt the data :type passwd: string :param algorithm: The cipher algorithm to use :type algorithm: string :returns: string - The decrypted data """ # base64-decode the data data = b64decode(data) salt = data[8:16] # pylint: disable=E1101,E1121 hashes = [md5(passwd + salt).digest()] for i in range(1, 3): hashes.append(md5(hashes[i - 1] + passwd + salt).digest()) # pylint: enable=E1101,E1121 key = hashes[0] + hashes[1] iv = hashes[2] return str_decrypt(data[16:], key=key, iv=iv, algorithm=algorithm)
def ssl_encrypt(plaintext, passwd, algorithm=None, salt=None): """ Encrypt data in a format that is openssl compatible. :param plaintext: The plaintext data to encrypt :type plaintext: string :param passwd: The password to use to encrypt the data :type passwd: string :param algorithm: The cipher algorithm to use :type algorithm: string :param salt: The salt to use. If none is provided, one will be randomly generated. :type salt: bytes :returns: string - The base64-encoded, salted, encrypted string. The string includes a trailing newline to make it fully compatible with openssl command-line tools. """ if salt is None: salt = Rand.rand_bytes(8) # pylint: disable=E1101,E1121 hashes = [md5(passwd + salt).digest()] for i in range(1, 3): hashes.append(md5(hashes[i - 1] + passwd + salt).digest()) # pylint: enable=E1101,E1121 key = hashes[0] + hashes[1] iv = hashes[2] crypted = str_encrypt(plaintext, key=key, salt=salt, iv=iv, algorithm=algorithm) return b64encode("Salted__" + salt + crypted) + "\n"
def _import_Path(self, entry, state): name = entry.get("name") exists = entry.get("current_exists", default="true").lower() == "true" path_type = entry.get("type").lower() act_dict = dict(name=name, state=state, exists=exists, path_type=path_type) target_dict = dict( owner=entry.get("owner", default="root"), group=entry.get("group", default="root"), mode=entry.get("mode", default=entry.get("perms", default="")), ) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict["target_perms"] = fperm current_dict = dict( owner=entry.get("current_owner", default=""), group=entry.get("current_group", default=""), mode=entry.get("current_mode", default=entry.get("current_perms", default="")), ) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict["current_perms"] = fperm if path_type in ("symlink", "hardlink"): act_dict["target_path"] = entry.get("to", default="") act_dict["current_path"] = entry.get("current_to", default="") self.logger.debug("Adding link %s" % name) return LinkEntry.entry_get_or_create(act_dict) elif path_type == "device": # TODO devices self.logger.warn("device path types are not supported yet") return # TODO - vcs output act_dict["detail_type"] = PathEntry.DETAIL_UNUSED if path_type == "directory" and entry.get("prune", "false") == "true": unpruned_elist = [e.get("path") for e in entry.findall("Prune")] if unpruned_elist: act_dict["detail_type"] = PathEntry.DETAIL_PRUNED act_dict["details"] = "\n".join(unpruned_elist) elif entry.get("sensitive", "false").lower() == "true": act_dict["detail_type"] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get("current_bfile", None): act_dict["detail_type"] = PathEntry.DETAIL_BINARY cdata = entry.get("current_bfile") elif entry.get("current_bdiff", None): act_dict["detail_type"] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get("current_bdiff")) elif entry.get("current_diff", None): act_dict["detail_type"] = PathEntry.DETAIL_DIFF cdata = entry.get("current_bdiff") if cdata: if len(cdata) > Bcfg2.Options.setup.file_limit: act_dict["detail_type"] = PathEntry.DETAIL_SIZE_LIMIT act_dict["details"] = md5(cdata).hexdigest() else: act_dict["details"] = cdata self.logger.debug("Adding path %s" % name) return PathEntry.entry_get_or_create(act_dict)
def cachekey(self): """ A unique key for this source that will be used to generate :attr:`cachefile` and other cache paths """ return md5( cPickle.dumps([ self.version, self.components, self.url, self.rawurl, self.arches ])).hexdigest()
def verify_cert_against_key(self, filename, key_filename): """ check that a certificate validates against its private key. """ cert = self.data + filename key = self.data + key_filename cert_md5 = \ md5(Popen(["openssl", "x509", "-noout", "-modulus", "-in", cert], stdout=PIPE, stderr=STDOUT).stdout.read().strip()).hexdigest() key_md5 = \ md5(Popen(["openssl", "rsa", "-noout", "-modulus", "-in", key], stdout=PIPE, stderr=STDOUT).stdout.read().strip()).hexdigest() if cert_md5 == key_md5: self.debug_log("SSLCA: %s verified successfully against key %s" % (filename, key_filename)) return True self.logger.warning("SSLCA: %s failed verification against key %s" % (filename, key_filename)) return False
def _import_Path(self, entry, state): name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" path_type = entry.get("type").lower() act_dict = dict(name=name, state=state, exists=exists, path_type=path_type) target_dict = dict( owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), mode=entry.get('mode', default=entry.get('perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict['target_perms'] = fperm current_dict = dict( owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), mode=entry.get('current_mode', default=entry.get('current_perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) return LinkEntry.entry_get_or_create(act_dict) elif path_type == 'device': # TODO devices self.logger.warn("device path types are not supported yet") return # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get('prune', 'false') == 'true': unpruned_elist = [e.get('path') for e in entry.findall('Prune')] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > self.size_limit: act_dict['detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) return PathEntry.entry_get_or_create(act_dict)
def cachekey(self): """ A unique identifier for the set of sources contained in this ``Collection`` object. This is unique to a set of sources, **not** necessarily to the client, which lets clients with identical sources share cache data.""" return md5(self.sourcelist().encode('UTF-8')).hexdigest()
def _import_Path(self, entry, state): name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" path_type = entry.get("type").lower() act_dict = dict(name=name, state=state, exists=exists, path_type=path_type) target_dict = dict(owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), mode=entry.get('mode', default=entry.get('perms', default=""))) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict['target_perms'] = fperm current_dict = dict(owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), mode=entry.get('current_mode', default=entry.get('current_perms', default=""))) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) return LinkEntry.entry_get_or_create(act_dict) elif path_type == 'device': # TODO devices self.logger.warn("device path types are not supported yet") return # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get('prune', 'false') == 'true': unpruned_elist = [e.get('name') for e in entry.findall('Prune')] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > Bcfg2.Options.setup.file_limit: act_dict['detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) return PathEntry.entry_get_or_create(act_dict)
def cachekey(self): """ A unique key for this source that will be used to generate :attr:`cachefile` and other cache paths """ return md5(cPickle.dumps([self.version, self.components, self.url, self.rawurl, self.arches])).hexdigest()
def _import_interaction(self, interaction): """Real import function""" hostname = interaction['hostname'] stats = etree.fromstring(interaction['stats']) metadata = interaction['metadata'] server = metadata['server'] client = cache.get(hostname) if not client: client, created = Client.objects.get_or_create(name=hostname) if created: self.logger.debug("Client %s added to the db" % hostname) cache.set(hostname, client) timestamp = datetime(*strptime(stats.get('time'))[0:6]) if len(Interaction.objects.filter(client=client, timestamp=timestamp)) > 0: self.logger.warn("Interaction for %s at %s already exists" % (hostname, timestamp)) return if 'profile' in metadata: profile, created = Group.objects.get_or_create(name=metadata['profile']) else: profile = None inter = Interaction(client=client, timestamp=timestamp, state=stats.get('state', default="unknown"), repo_rev_code=stats.get('revision', default="unknown"), good_count=stats.get('good', default="0"), total_count=stats.get('total', default="0"), server=server, profile=profile) inter.save() self.logger.debug("Interaction for %s at %s with INSERTED in to db" % (client.id, timestamp)) #FIXME - this should be more efficient for group_name in metadata['groups']: group = cache.get("GROUP_" + group_name) if not group: group, created = Group.objects.get_or_create(name=group_name) if created: self.logger.debug("Added group %s" % group) cache.set("GROUP_" + group_name, group) inter.groups.add(group) for bundle_name in metadata['bundles']: bundle = cache.get("BUNDLE_" + bundle_name) if not bundle: bundle, created = Bundle.objects.get_or_create(name=bundle_name) if created: self.logger.debug("Added bundle %s" % bundle) cache.set("BUNDLE_" + bundle_name, bundle) inter.bundles.add(bundle) inter.save() counter_fields = {TYPE_BAD: 0, TYPE_MODIFIED: 0, TYPE_EXTRA: 0} pattern = [('Bad/*', TYPE_BAD), ('Extra/*', TYPE_EXTRA), ('Modified/*', TYPE_MODIFIED)] updates = dict(failures=[], paths=[], packages=[], actions=[], services=[]) for (xpath, state) in pattern: for entry in stats.findall(xpath): counter_fields[state] = counter_fields[state] + 1 entry_type = entry.tag name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" # handle server failures differently failure = entry.get('failure', '') if failure: act_dict = dict(name=name, entry_type=entry_type, message=failure) newact = FailureEntry.entry_get_or_create(act_dict) updates['failures'].append(newact) continue act_dict = dict(name=name, state=state, exists=exists) if entry_type == 'Action': act_dict['status'] = entry.get('status', default="check") act_dict['output'] = entry.get('rc', default=-1) self.logger.debug("Adding action %s" % name) updates['actions'].append(ActionEntry.entry_get_or_create(act_dict)) elif entry_type == 'Package': act_dict['target_version'] = entry.get('version', default='') act_dict['current_version'] = entry.get('current_version', default='') # extra entries are a bit different. They can have Instance objects if not act_dict['target_version']: for instance in entry.findall("Instance"): #TODO - this probably only works for rpms release = instance.get('release', '') arch = instance.get('arch', '') act_dict['current_version'] = instance.get('version') if release: act_dict['current_version'] += "-" + release if arch: act_dict['current_version'] += "." + arch self.logger.debug("Adding package %s %s" % (name, act_dict['current_version'])) updates['packages'].append(PackageEntry.entry_get_or_create(act_dict)) else: self.logger.debug("Adding package %s %s" % (name, act_dict['target_version'])) # not implemented yet act_dict['verification_details'] = entry.get('verification_details', '') updates['packages'].append(PackageEntry.entry_get_or_create(act_dict)) elif entry_type == 'Path': path_type = entry.get("type").lower() act_dict['path_type'] = path_type target_dict = dict( owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), mode=entry.get('mode', default=entry.get('perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**target_dict) act_dict['target_perms'] = fperm current_dict = dict( owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), mode=entry.get('current_mode', default=entry.get('current_perms', default="")) ) fperm, created = FilePerms.objects.get_or_create(**current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) updates['paths'].append(LinkEntry.entry_get_or_create(act_dict)) continue elif path_type == 'device': #TODO devices self.logger.warn("device path types are not supported yet") continue # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get('prune', 'false') == 'true': unpruned_elist = [e.get('path') for e in entry.findall('Prune')] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > self.size_limit: act_dict['detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) updates['paths'].append(PathEntry.entry_get_or_create(act_dict)) #TODO - secontext #TODO - acls elif entry_type == 'Service': act_dict['target_status'] = entry.get('status', default='') act_dict['current_status'] = entry.get('current_status', default='') self.logger.debug("Adding service %s" % name) updates['services'].append(ServiceEntry.entry_get_or_create(act_dict)) elif entry_type == 'SELinux': self.logger.info("SELinux not implemented yet") else: self.logger.error("Unknown type %s not handled by reporting yet" % entry_type) inter.bad_count = counter_fields[TYPE_BAD] inter.modified_count = counter_fields[TYPE_MODIFIED] inter.extra_count = counter_fields[TYPE_EXTRA] inter.save() for entry_type in updates.keys(): getattr(inter, entry_type).add(*updates[entry_type]) # performance metrics for times in stats.findall('OpStamps'): for metric, value in list(times.items()): Performance(interaction=inter, metric=metric, value=value).save()
def _import_interaction(self, interaction): """Real import function""" hostname = interaction['hostname'] stats = etree.fromstring(interaction['stats']) metadata = interaction['metadata'] server = metadata['server'] client = cache.get(hostname) if not client: client, created = Client.objects.get_or_create(name=hostname) if created: self.logger.debug("Client %s added to the db" % hostname) cache.set(hostname, client) timestamp = datetime(*strptime(stats.get('time'))[0:6]) if len(Interaction.objects.filter(client=client, timestamp=timestamp)) > 0: self.logger.warn("Interaction for %s at %s already exists" % (hostname, timestamp)) return profile, created = Group.objects.get_or_create( name=metadata['profile']) inter = Interaction(client=client, timestamp=timestamp, state=stats.get('state', default="unknown"), repo_rev_code=stats.get('revision', default="unknown"), good_count=stats.get('good', default="0"), total_count=stats.get('total', default="0"), server=server, profile=profile) inter.save() self.logger.debug("Interaction for %s at %s with INSERTED in to db" % (client.id, timestamp)) #FIXME - this should be more efficient for group_name in metadata['groups']: group = cache.get("GROUP_" + group_name) if not group: group, created = Group.objects.get_or_create(name=group_name) if created: self.logger.debug("Added group %s" % group) cache.set("GROUP_" + group_name, group) inter.groups.add(group) for bundle_name in metadata['bundles']: bundle = cache.get("BUNDLE_" + bundle_name) if not bundle: bundle, created = Bundle.objects.get_or_create( name=bundle_name) if created: self.logger.debug("Added bundle %s" % bundle) cache.set("BUNDLE_" + bundle_name, bundle) inter.bundles.add(bundle) inter.save() counter_fields = {TYPE_BAD: 0, TYPE_MODIFIED: 0, TYPE_EXTRA: 0} pattern = [('Bad/*', TYPE_BAD), ('Extra/*', TYPE_EXTRA), ('Modified/*', TYPE_MODIFIED)] updates = dict(failures=[], paths=[], packages=[], actions=[], services=[]) for (xpath, state) in pattern: for entry in stats.findall(xpath): counter_fields[state] = counter_fields[state] + 1 entry_type = entry.tag name = entry.get('name') exists = entry.get('current_exists', default="true").lower() == "true" # handle server failures differently failure = entry.get('failure', '') if failure: act_dict = dict(name=name, entry_type=entry_type, message=failure) newact = FailureEntry.entry_get_or_create(act_dict) updates['failures'].append(newact) continue act_dict = dict(name=name, state=state, exists=exists) if entry_type == 'Action': act_dict['status'] = entry.get('status', default="check") act_dict['output'] = entry.get('rc', default=-1) self.logger.debug("Adding action %s" % name) updates['actions'].append( ActionEntry.entry_get_or_create(act_dict)) elif entry_type == 'Package': act_dict['target_version'] = entry.get('version', default='') act_dict['current_version'] = entry.get('current_version', default='') # extra entries are a bit different. They can have Instance objects if not act_dict['target_version']: for instance in entry.findall("Instance"): #TODO - this probably only works for rpms release = instance.get('release', '') arch = instance.get('arch', '') act_dict['current_version'] = instance.get( 'version') if release: act_dict['current_version'] += "-" + release if arch: act_dict['current_version'] += "." + arch self.logger.debug( "Adding package %s %s" % (name, act_dict['current_version'])) updates['packages'].append( PackageEntry.entry_get_or_create(act_dict)) else: self.logger.debug("Adding package %s %s" % (name, act_dict['target_version'])) # not implemented yet act_dict['verification_details'] = entry.get( 'verification_details', '') updates['packages'].append( PackageEntry.entry_get_or_create(act_dict)) elif entry_type == 'Path': path_type = entry.get("type").lower() act_dict['path_type'] = path_type target_dict = dict( owner=entry.get('owner', default="root"), group=entry.get('group', default="root"), perms=entry.get('perms', default=""), ) fperm, created = FilePerms.objects.get_or_create( **target_dict) act_dict['target_perms'] = fperm current_dict = dict( owner=entry.get('current_owner', default=""), group=entry.get('current_group', default=""), perms=entry.get('current_perms', default=""), ) fperm, created = FilePerms.objects.get_or_create( **current_dict) act_dict['current_perms'] = fperm if path_type in ('symlink', 'hardlink'): act_dict['target_path'] = entry.get('to', default="") act_dict['current_path'] = entry.get('current_to', default="") self.logger.debug("Adding link %s" % name) updates['paths'].append( LinkEntry.entry_get_or_create(act_dict)) continue elif path_type == 'device': #TODO devices self.logger.warn( "device path types are not supported yet") continue # TODO - vcs output act_dict['detail_type'] = PathEntry.DETAIL_UNUSED if path_type == 'directory' and entry.get( 'prune', 'false') == 'true': unpruned_elist = [ e.get('path') for e in entry.findall('Prune') ] if unpruned_elist: act_dict['detail_type'] = PathEntry.DETAIL_PRUNED act_dict['details'] = "\n".join(unpruned_elist) elif entry.get('sensitive', 'false').lower() == 'true': act_dict['detail_type'] = PathEntry.DETAIL_SENSITIVE else: cdata = None if entry.get('current_bfile', None): act_dict['detail_type'] = PathEntry.DETAIL_BINARY cdata = entry.get('current_bfile') elif entry.get('current_bdiff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = b64decode(entry.get('current_bdiff')) elif entry.get('current_diff', None): act_dict['detail_type'] = PathEntry.DETAIL_DIFF cdata = entry.get('current_bdiff') if cdata: if len(cdata) > self.size_limit: act_dict[ 'detail_type'] = PathEntry.DETAIL_SIZE_LIMIT act_dict['details'] = md5(cdata).hexdigest() else: act_dict['details'] = cdata self.logger.debug("Adding path %s" % name) updates['paths'].append( PathEntry.entry_get_or_create(act_dict)) #TODO - secontext #TODO - acls elif entry_type == 'Service': act_dict['target_status'] = entry.get('status', default='') act_dict['current_status'] = entry.get('current_status', default='') self.logger.debug("Adding service %s" % name) updates['services'].append( ServiceEntry.entry_get_or_create(act_dict)) elif entry_type == 'SELinux': self.logger.info("SELinux not implemented yet") else: self.logger.error( "Unknown type %s not handled by reporting yet" % entry_type) inter.bad_count = counter_fields[TYPE_BAD] inter.modified_count = counter_fields[TYPE_MODIFIED] inter.extra_count = counter_fields[TYPE_EXTRA] inter.save() for entry_type in updates.keys(): getattr(inter, entry_type).add(*updates[entry_type]) # performance metrics for times in stats.findall('OpStamps'): for metric, value in list(times.items()): Performance(interaction=inter, metric=metric, value=value).save()