def load_content(self, evidence, ignore_ttl=False, evidence_dt=None): """ Populate the content of the evidence from the locker. :param evidence: an evidence object. :param ignore_ttl: Boolean for TTL validation. Defaults to False. :param evidence_dt: The date of the evidence content to load. :returns: The evidence object with content. """ self._validate_evidence(evidence, ignore_ttl) if getattr(evidence, 'is_partitioned', False): metadata = self.get_evidence_metadata(evidence.path, evidence_dt) content = None for part_hash in metadata['partitions'].keys(): data = json.loads( self._get_file_content( f'{evidence.dir_path}/{part_hash}_{evidence.name}', evidence_dt)) if content is None: content = data root = content if evidence.part_root: root = parse_dot_key(root, evidence.part_root) continue if evidence.part_root: root.extend(parse_dot_key(data, evidence.part_root)) else: root.extend(data) evidence.set_content(format_json(content)) else: evidence.set_content( self._get_file_content(evidence.path, evidence_dt)) return evidence
def filtered_content(self): """Provide evidence content minus the ignored fields as JSON.""" if self.content: if not hasattr(self, '_filtered_content'): metadata = json.loads(self.content) for field in IGNORE_REPO_METADATA[self.name[:2]]: try: metadata.pop(field) except KeyError: pass self._filtered_content = str(format_json(metadata)) return self._filtered_content
def generate_check_results(self, rpt_metadata): """ Combine the check execution results with associated reports metadata. This method combines check results with details about associated reports and evidences used, found in the report metadata. It returns a dictionary keyed by check class dot path. :param rpt_metadata: Metadata from all report evidence index.json files """ chk_results = {} for rpt, meta in rpt_metadata.items(): check_methods = {} if not meta.get('checks'): continue for check in meta['checks']: check_class, check_method = check.rsplit('.', 1) check_methods[check_method] = {} if self.results.get(check): test = self.results[check]['test'].test check_methods[check_method] = { 'status': self.results[check]['status'], 'timestamp': self.results[check]['timestamp'], 'warnings': test.warnings, 'failures': test.failures, 'successes': test.successes, 'warnings_count': test.warnings_count(), 'failures_count': test.failures_count(), 'successes_count': test.successes_count() } if not chk_results.get(check_class): chk_results[check_class] = { 'checks': check_methods, 'reports': { rpt: meta['description'] }, 'evidence': meta['evidence'], 'accreditations': list(self.controls.get_accreditations(check_class)) } else: chk_results[check_class]['reports'][rpt] = meta['description'] self.locker.add_content_to_locker(format_json(chk_results, skipkeys=True, default=str), filename='check_results.json')
def index(self, evidence, checks=None, evidence_used=None): """ Add external evidence to the git index. Overrides the base Locker index method called by add_evidence. """ with self.lock: index_file = self.get_index_file(evidence) if not os.path.exists(index_file): metadata = {} else: metadata = json.loads(open(index_file).read()) planter = self.repo.config_reader().get_value('user', 'email') metadata[evidence.name] = { 'last_update': self.commit_date, 'ttl': evidence.ttl, 'planted_by': planter, 'description': evidence.description } with open(index_file, 'w') as f: f.write(format_json(metadata)) self.repo.index.add([index_file, self.get_file(evidence.path)]) self.planted.append(evidence.path)
def get_partition(self, key): """ Provide a slice of content based on the supplied partition key. Return a JSON document that is a slice of the original evidence content based on the list of key values provided. Key values are expected to match the key fields provided during evidence instantiation and are relative to the root partition provided during evidence instantiation. :param key: A list of key values to partition by :returns: A JSON document filtered by the key values provided """ data = json.loads(self._content) if not self.part_root: data = self._partition(data, key) else: part = data root = self.part_root.split('.') for field in root[:-1]: part = part[field] part[root[-1]] = self._partition(part[root[-1]], key) return format_json(data)
def set_content(self, str_content): self._content = str_content if self.extension == 'json': self._content = format_json(json.loads(str_content))
def index(self, evidence, checks=None, evidence_used=None): """ Add an evidence to the git index. :param evidence: the evidence object. :param checks: A list of checks used to generate report content. Only applicable for check generated ReportEvidence. :param evidence_used: metadata for evidence used by a check. Only applicable for check generated ReportEvidence. """ with self.lock: index_file = self.get_index_file(evidence) repo_files = [index_file] if not os.path.exists(index_file): metadata = {} else: metadata = json.loads(open(index_file).read()) ev_meta = metadata.get(evidence.name, {}) old_parts = ev_meta.get('partitions', {}).keys() metadata[evidence.name] = { 'last_update': self.commit_date, 'ttl': evidence.ttl, 'description': evidence.description } tombstones = None if getattr(evidence, 'is_partitioned', False): unpartitioned = self.get_file(evidence.path) if os.path.isfile(unpartitioned): # Remove/tombstone unpartitioned evidence file # replaced by partitioned evidence files self.repo.index.remove([unpartitioned], working_tree=True) tombstones = self.create_tombstone_metadata( evidence.name, ev_meta, 'Evidence is partitioned') parts = {} for key in evidence.partition_keys: sha256_hash = get_sha256_hash(key, 10) parts[sha256_hash] = key repo_file = self.get_file( f'{evidence.dir_path}/{sha256_hash}_{evidence.name}') repo_files.append(repo_file) dead_parts = set(old_parts) - set(parts.keys()) if dead_parts: # Remove/tombstone partitioned evidence files # no longer part of the evidence content self.remove_partitions(evidence, dead_parts) tombstones = self.create_tombstone_metadata( dead_parts, ev_meta, 'Partition no longer part of evidence') metadata[evidence.name].update({ 'partition_fields': evidence.part_fields, 'partition_root': evidence.part_root, 'partitions': parts }) if tombstones is None: # Preserve prior tombstones tombstones = ev_meta.get('tombstones') else: # Remove/tombstone partitioned evidence files # replaced by unpartitioned evidence file self.remove_partitions(evidence, old_parts) tombstones = self.create_tombstone_metadata( old_parts, ev_meta, 'Evidence no longer partitioned') repo_files.append(self.get_file(evidence.path)) if tombstones: metadata[evidence.name]['tombstones'] = tombstones if checks is not None: metadata[evidence.name]['checks'] = checks if evidence_used is not None: metadata[evidence.name]['evidence'] = evidence_used with open(index_file, 'w') as f: f.write(format_json(metadata)) self.repo.index.add(repo_files)