class Suite(Profile): paths = related.SequenceField(str, default=None) queued = related.MappingField(Case, "file_path", default={}) skipped = related.MappingField(Case, "file_path", default={}) def __attrs_post_init__(self): from . import collect collect(self) get_logger().debug("suite constructed", host=self.host, paths=self.paths, prefixes=self.prefixes, extensions=self.extensions, includes=self.includes, excludes=self.excludes, concurrency=self.concurrency) def get_case(self, path, filename=None): file_path = os.path.join(path, filename) if filename else path return self.queued.get(file_path) or self.skipped.get(file_path) def add_case(self, case): if case.is_active(self.includes, self.excludes): self.queued.add(case) get_logger().debug("case queued", case=case.file_path) else:
class Suite(Profile): paths = related.SequenceField(str, default=None) queued = related.MappingField(Case, "file_path", default={}) skipped = related.MappingField(Case, "file_path", default={}) semaphores = related.MappingField(Semaphore, "semaphore", default={}) app = related.ChildField(object, default=None) def __attrs_post_init__(self): from . import collect collect(self) get_logger().debug( "suite constructed", host=self.host, paths=self.paths, prefixes=self.prefixes, extensions=self.extensions, includes=self.includes, excludes=self.excludes, concurrency=self.concurrency, retry_failed=self.retry_failed, ) def get_case(self, path, filename=None): file_path = os.path.join(path, filename) if filename else path return self.queued.get(file_path) or self.skipped.get(file_path) def add_case(self, case): if case.is_active(self.includes, self.excludes): self.queued.add(case) if (case.semaphore is not None and case.semaphore not in self.semaphores.keys()): self.semaphores[case.semaphore] = Semaphore() get_logger().debug("case queued", case=case.file_path)
class PathReport(object): url = related.StringField() obj = related.ChildField(Path) methods = related.MappingField(MethodReport, "method", default={}) def get_method_report(self, method): return self.methods.setdefault(method, MethodReport(self.url, method))
class DataLoaderDescription(RelatedLoadSaveMixin): """Class representation of dataloader.yaml """ type = related.StringField() defined_as = related.StringField() args = related.MappingField(DataLoaderArgument, "name") info = related.ChildField(Info) output_schema = related.ChildField(DataLoaderSchema) dependencies = related.ChildField(Dependencies, default=Dependencies(), required=False) path = related.StringField(required=False) postprocessing = related.ChildField(dict, default=OrderedDict(), required=False) def get_example_kwargs(self): return example_kwargs(self.args) def print_kwargs(self, format_examples_json=False): from kipoi.external.related.fields import UNSPECIFIED if hasattr(self, "args"): logger.warn( "No keyword arguments defined for the given dataloader.") return None for k in self.args: print("Keyword argument: `{0}`".format(k)) for elm in ["doc", "type", "optional", "example"]: if hasattr(self.args[k], elm) and \ (not isinstance(getattr(self.args[k], elm), UNSPECIFIED)): print(" {0}: {1}".format(elm, getattr(self.args[k], elm))) example_kwargs = self.example_kwargs print("-" * 80) if hasattr(self, "example_kwargs"): if format_examples_json: import json example_kwargs = json.dumps(example_kwargs) print("Example keyword arguments are: {0}".format( str(example_kwargs))) def __attrs_post_init__(self): # load additional objects for k in self.postprocessing: k_observed = k if k == 'variant_effects': k = 'kipoi_veff' if is_installed(k): # Load the config properly if the plugin is installed try: parser = get_dataloader_yaml_parser(k) self.postprocessing[k_observed] = parser.from_config( self.postprocessing[k_observed]) object.__setattr__(self, "postprocessing", self.postprocessing) except Exception: logger.warn( "Unable to parse {} filed in DataLoaderDescription: {}" .format(k_observed, self))
class TestConfig(RelatedConfigMixin): """Models config.yaml in the model root """ constraints = related.MappingField(TestModelConfig, "name", required=False, repr=True)
class MyModel(object): is_for = related.StringField(key="for") criss = related.StringField(key="cross") cross = related.StringField(key="criss") is_not = related.BooleanField(key="not") is_list = related.SequenceField(str, key="list") is_type = related.ChildField(DataType, key="type") is_dict = related.MappingField(MyChild, "int", key="dict", required=False)
class Model(object): # non-child fields sequence_field = related.SequenceField(str, default=set()) set_field = related.SetField(str, default=[]) mapping_field = related.MappingField(Child, "name", default={}) # child fields child_list = related.ChildField(list, default=list) child_set = related.ChildField(set, default=set) child_dict = related.ChildField(dict, default=dict) child_obj = related.ChildField(Child, default=Child)
class MethodReport(object): url = related.StringField() method = related.StringField() counts = related.MappingField(Counts, "key", default={}) def add(self, case_pass, scenario_pass, step_pass, params): self.case_counts.add(case_pass) self.scenario_counts.add(scenario_pass) self.step_counts.add(step_pass) all_pass = case_pass and scenario_pass and step_pass for param in params: self.param_counts(param).add(all_pass) @property def case_counts(self): return self.param_counts(CASE) @property def scenario_counts(self): return self.param_counts(SCENARIO) @property def step_counts(self): return self.param_counts(STEP) def param_counts(self, param): return self.counts.setdefault(param, Counts(param)) @property def cases_passed(self): return self.case_counts.passed @property def cases_failed(self): return self.case_counts.failed @property def scenarios_passed(self): return self.scenario_counts.passed @property def scenarios_failed(self): return self.scenario_counts.failed @property def steps_passed(self): return self.step_counts.passed @property def steps_failed(self): return self.step_counts.failed
class DataLoaderDescription(RelatedLoadSaveMixin): """Class representation of dataloader.yaml """ defined_as = related.StringField() args = related.MappingField(DataLoaderArgument, "name") output_schema = related.ChildField(DataLoaderSchema) type = related.StringField(required=False) info = related.ChildField(Info, default=Info(), required=False) dependencies = related.ChildField(Dependencies, default=Dependencies(), required=False) path = related.StringField(required=False) writers = related.ChildField(dict, default=OrderedDict(), required=False) def get_example_kwargs(self): # return self.download_example() if self.path is None: path = "." else: path = self.path return example_kwargs( self.args, os.path.join(os.path.dirname(path), "downloaded/example_files")) def download_example(self, output_dir, absolute_path=False, dry_run=False): return example_kwargs(self.args, output_dir, absolute_path=absolute_path, dry_run=dry_run) def print_kwargs(self, format_examples_json=False): from kipoi_utils.external.related.fields import UNSPECIFIED if not hasattr(self, "args"): logger.warning( "No keyword arguments defined for the given dataloader.") return None args = self.args for k in args: print("{0}:".format(k)) for elm in ["doc", "type", "optional", "example"]: if hasattr(args[k], elm) and \ (not isinstance(getattr(args[k], elm), UNSPECIFIED)): print(" {0}: {1}".format(elm, getattr(args[k], elm))) # example_kwargs = self.get_example_kwargs() # if format_examples_json: # import json # example_kwargs = json.dumps(example_kwargs) # print("Example keyword arguments are: {0}".format(str(example_kwargs))) print_args = print_kwargs
class Operation(object): responses = related.MappingField(Response, "code", required=True) tags = related.SequenceField(str, required=False) summary = related.StringField(required=False) description = related.StringField(required=False) operationId = related.StringField(required=False) parameters = related.SequenceField(Parameter, required=False) consumes = related.SequenceField(MIMEType, required=False) produces = related.SequenceField(MIMEType, required=False) externalDocs = related.ChildField(dict, required=False) schemes = related.SequenceField(Scheme, required=False) deprecated = related.BooleanField(required=False) security = related.ChildField(dict, required=False)
class DashboardSpecification: version = related.StringField() date_ranges = related.MappingField( DateRangeSpecification, 'id', {}, key='dateRanges' ) filters = related.MappingField(FilterDefinition, 'id', {}) items = related.MappingField(LayoutItem, 'id', {}) queries = related.MappingField(QueryDefinition, 'id', {}) settings = related.MappingField(VisualizationSettings, 'id', {}) sizes = related.MappingField(LayoutSize, 'id', {}) options = related.ChildField(DashboardOptions, DashboardOptions())
class DataLoaderDescription(RelatedLoadSaveMixin): """Class representation of dataloader.yaml """ type = related.StringField() defined_as = related.StringField() args = related.MappingField(DataLoaderArgument, "name") info = related.ChildField(Info) output_schema = related.ChildField(DataLoaderSchema) dependencies = related.ChildField(Dependencies, default=Dependencies(), required=False) path = related.StringField(required=False) postprocessing = related.ChildField(PostProcDataLoaderStruct, default=PostProcDataLoaderStruct(), required=False) def get_example_kwargs(self): return example_kwargs(self.args) def print_kwargs(self, format_examples_json=False): from kipoi.external.related.fields import UNSPECIFIED if hasattr(self, "args"): logger.warn( "No keyword arguments defined for the given dataloader.") return None for k in self.args: print("Keyword argument: `{0}`".format(k)) for elm in ["doc", "type", "optional", "example"]: if hasattr(self.args[k], elm) and \ (not isinstance(getattr(self.args[k], elm), UNSPECIFIED)): print(" {0}: {1}".format(elm, getattr(self.args[k], elm))) example_kwargs = self.example_kwargs print("-" * 80) if hasattr(self, "example_kwargs"): if format_examples_json: import json example_kwargs = json.dumps(example_kwargs) print("Example keyword arguments are: {0}".format( str(example_kwargs)))
class DataLoaderDescription(RelatedLoadSaveMixin): """Class representation of dataloader.yaml """ defined_as = related.StringField() args = related.MappingField(DataLoaderArgument, "name") output_schema = related.ChildField(DataLoaderSchema) type = related.StringField(required=False) info = related.ChildField(Info, default=Info(), required=False) dependencies = related.ChildField(Dependencies, default=Dependencies(), required=False) path = related.StringField(required=False) postprocessing = related.ChildField(dict, default=OrderedDict(), required=False) def get_example_kwargs(self): # return self.download_example() if self.path is None: path = "." else: path = self.path return example_kwargs(self.args, os.path.join(os.path.dirname(path), "downloaded/example_files")) def download_example(self, output_dir, absolute_path=False, dry_run=False): return example_kwargs(self.args, output_dir, absolute_path=absolute_path, dry_run=dry_run) def print_kwargs(self, format_examples_json=False): from kipoi_utils.external.related.fields import UNSPECIFIED if not hasattr(self, "args"): logger.warning("No keyword arguments defined for the given dataloader.") return None args = self.args for k in args: print("{0}:".format(k)) for elm in ["doc", "type", "optional", "example"]: if hasattr(args[k], elm) and \ (not isinstance(getattr(args[k], elm), UNSPECIFIED)): print(" {0}: {1}".format(elm, getattr(args[k], elm))) # example_kwargs = self.get_example_kwargs() # if format_examples_json: # import json # example_kwargs = json.dumps(example_kwargs) # print("Example keyword arguments are: {0}".format(str(example_kwargs))) print_args = print_kwargs def __attrs_post_init__(self): # load additional objects for k in self.postprocessing: k_observed = k if k == 'variant_effects': k = 'kipoi_veff' if is_installed(k): # Load the config properly if the plugin is installed try: parser = get_dataloader_yaml_parser(k) self.postprocessing[k_observed] = parser.from_config(self.postprocessing[k_observed]) object.__setattr__(self, "postprocessing", self.postprocessing) except Exception: logger.warning("Unable to parse {} filed in DataLoaderDescription: {}".format(k_observed, self))
class Node(object): name = related.StringField() node_child = related.ChildField(node_cls, required=False) node_list = related.SequenceField(node_cls, required=False) node_map = related.MappingField(node_cls, "name", required=False)
class DataSpec(RelatedLoadSaveMixin): """Dataset specification """ # Dictionary of different bigwig files task_specs = related.MappingField(TaskSpec, "task", required=True, repr=True) # Path to the reference genome fasta file fasta_file = related.StringField(required=True) # Bias track specification bias_specs = related.MappingField(BiasSpec, "task", required=False, repr=True) # # Set of peaks to consider # peaks = related.StringField(required=True) # Original path to the file path = related.StringField(required=False) def abspath(self): return DataSpec( task_specs={k: v.abspath() for k, v in self.task_specs.items()}, fasta_file=os.path.abspath(self.fasta_file), # peaks=os.path.abspath(self.peaks), path=self.path) def task2idx(self, task, dtype='counts'): """Get the index output Args: task: task name dtype: 'counts' or 'profile' Returns: index for the list of predicted arrays """ # TODO - this is not the right location for the code # it should be next to the model n2idx = {k: i for i, k in enumerate(self.task_specs)} if dtype == "counts": return len(self.task_specs) + n2idx[task] elif dtype == "profile": return n2idx[task] else: raise ValueError("type is not from 'counts' or 'profile'") def get_bws(self): return OrderedDict([(task, task_spec.get_bw_dict()) for task, task_spec in self.task_specs.items()]) def touch_all_files(self, verbose=True): from basepair.utils import touch_file touch_file(self.fasta_file, verbose) for ts in self.task_specs.values(): ts.touch_all_files(verbose=verbose) for ts in self.bias_specs.values(): touch_file(ts.pos_counts, verbose) touch_file(ts.neg_counts, verbose) # touch_file(ts.peaks) def load_counts(self, intervals, use_strand=True, progbar=False): return { task: ts.load_counts(intervals, use_strand=use_strand, progbar=progbar) for task, ts in self.task_specs.items() } def load_bias_counts(self, intervals, use_strand=True, progbar=False): return { task: ts.load_counts(intervals, use_strand=use_strand, progbar=progbar) for task, ts in self.bias_specs.items() }
class GroupBySettings(object): groupings = related.MappingField(GroupByObject, 'id', {}, key='groupings')
class SeriesSettings(object): series_objects = related.MappingField(SeriesSettingsObject, 'id', {}, key='seriesObjects') series_order = related.SequenceField(str, [], key='seriesOrder')
class DataSpec(RelatedLoadSaveMixin): """Dataset specification """ # Dictionary of different bigwig files task_specs = related.MappingField(TaskSpec, "task", required=True, repr=True) # Path to the reference genome fasta file fasta_file = related.StringField(required=True) # Bias track specification bias_specs = related.MappingField(BiasSpec, "task", required=False, repr=True) # # Set of peaks to consider # peaks = related.StringField(required=True) # Original path to the file path = related.StringField(required=False) def abspath(self): return DataSpec( task_specs={k: v.abspath() for k, v in self.task_specs.items()}, fasta_file=os.path.abspath(self.fasta_file), # peaks=os.path.abspath(self.peaks), path=self.path) def get_bws(self): return OrderedDict([(task, task_spec.tracks) for task, task_spec in self.task_specs.items()]) def list_all_files(self, include_peaks=False): """List all file paths specified """ files = [] files.append(self.fasta_file) for ts in self.task_specs.values(): files += ts.list_all_files(include_peaks=include_peaks) for ts in self.bias_specs.values(): files += ts.list_all_files(include_peaks=include_peaks) return files def touch_all_files(self, verbose=True): from bpnet.utils import touch_file touch_file(self.fasta_file, verbose) for ts in self.task_specs.values(): ts.touch_all_files(verbose=verbose) for ts in self.bias_specs.values(): ts.touch_all_files(verbose=verbose) def load_counts(self, intervals, use_strand=True, progbar=False): return {task: ts.load_counts(intervals, use_strand=use_strand, progbar=progbar) for task, ts in self.task_specs.items()} def load_bias_counts(self, intervals, use_strand=True, progbar=False): return {task: ts.load_counts(intervals, use_strand=use_strand, progbar=progbar) for task, ts in self.bias_specs.items()} def get_all_regions(self): """Get all the regions """ from pybedtools import BedTool regions = [] for task, task_spec in self.task_specs.items(): if task_spec.peaks is not None: regions += list(BedTool(task_spec.peaks)) return regions
class Services: services = related.MappingField(Service, "name")
class CoverageReport(object): suite_result = related.ChildField(SuiteResult) schemas = related.SequenceField(Swagger) paths = related.MappingField(PathReport, "url") def prepare(self): """iterate through all scenario-steps and record the counts.""" for scenario_result, scenario_pass, case_pass in self.iterate(): for step_result in scenario_result.step_results: step_pass = step_result.success url, method = step_result.fetch.url, step_result.fetch.method params = step_result.fetch.kwargs.get("params") method_report = self.get_method_report(url, method) if method_report: method_report.add(case_pass, scenario_pass, step_pass, params) def get_method_report(self, url, method): """return method report for a given url & method combination""" path_obj = None # scan schemas for schema in self.schemas: path_obj = schema.resolve(url) if path_obj is not None: break # hard fail # assert path_obj, "No path report for: %s" % url # resolve path report and then method report if path_obj: path_report = self.paths.get(path_obj.path) return path_report.get_method_report(method) def iterate(self): # pragma: no mccabe """yields scenario_result, scenario_pass, case_pass""" for case_result in self.suite_result.passed: for scenario_result in case_result.passed: yield scenario_result, True, True for scenario_result in case_result.failed: yield scenario_result, False, True # pragma: no cover for case_result in self.suite_result.failed: for scenario_result in case_result.passed: yield scenario_result, True, False # pragma: no cover for scenario_result in case_result.failed: yield scenario_result, False, False def generate(self, output_path): output_path = output_path or tempfile.mkdtemp() timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") output_path = os.path.join(output_path, "coverage-%s.xls" % timestamp) book = xlwt.Workbook() sheet = book.add_sheet("coverage") self.write_row(sheet.row(0), HEADERS, True) index = 1 for url, path_report in self.paths.items(): for method, method_report in path_report.methods.items(): values = [getattr(method_report, column) for column in COLUMNS] self.write_row(sheet.row(index), values) index += 1 for index, width in enumerate(WIDTHS): sheet.col(index).width = width book.save(output_path) return output_path def write_row(self, row, values, bolded=False): style = xlwt.XFStyle() if bolded: style.font = xlwt.Font() style.font.bold = True for index, value in enumerate(values): row.write(index, value, style=style) @classmethod def create(cls, suite_result): schemas = Swagger.gather_schemas(suite=suite_result.suite) # populate report with 0 counts for all discovered path/methods paths = {} for schema in schemas: for url, obj in schema.paths.items(): report = PathReport(obj=obj, url=url) paths[url] = report for method in obj.methods: report.get_method_report(method) # create and prepare report report = CoverageReport(suite_result=suite_result, schemas=schemas, paths=paths) report.prepare() return report
class Swagger(object): swagger = related.StringField(required=True) info = related.ChildField(Info, required=True) paths = related.MappingField(Path, "path", required=True) host = related.StringField(required=False) basePath = related.StringField(required=False) schemes = related.SequenceField(Scheme, required=False) consumes = related.SequenceField(MIMEType, required=False) produces = related.SequenceField(MIMEType, required=False) definitions = related.MappingField(Definition, "key", required=False) _lookup = related.ChildField(dict, required=False) def __attrs_post_init__(self): """ populate the nested _lookup dictionary.""" self.populate_lookup() @classmethod def loads(cls, content): as_dict = related.from_yaml(content, object_pairs_hook=dict) return related.to_model(cls, as_dict) def resolve(self, url): """ Resolve to Path object based on the URL path provided. """ return self.descend_lookup(url, try_var=True).get(OBJ) def populate_lookup(self): """ Populate the lookup nested dictionary with paths. """ # todo: use basePath to ensure mapping is correct? self._lookup = {} for path_url, path_obj in self.paths.items(): nested = self.descend_lookup(path_url) nested[OBJ] = path_obj def descend_lookup(self, url, try_var=False): """ Descend lookup nested dictionary, finding bottom dict and tuple.""" as_tuple = self.path_as_tuple(url) sub_lookup = self._lookup for item in as_tuple: # if in resolve mode, and VAR is only option, return var if try_var and (item not in sub_lookup) and (VAR in sub_lookup): sub_lookup = sub_lookup.get(VAR) # if build mode or item exists, return that. else: sub_lookup = sub_lookup.setdefault(item, {}) return sub_lookup @classmethod def path_as_tuple(cls, url): """ Convert URL path into a set of tuples replacing variables. """ # remove leading and trailing blanks items = hyperlink.URL.from_text(url).path items = items[1:] if items and not items[0] else items items = items[:-1] if items and not items[-1] else items # replace variables with VAR indicator for resolve return tuple([VAR if cls.is_var(item) else item for item in items]) @staticmethod def is_var(s): """ Determines if follows path variable format of {pk}. """ return isinstance(s, str) and s.startswith("{") and s.endswith("}") @classmethod def gather_schemas(cls, suite): from . import utils schemas = [] for name, path in suite.schemas.items(): json = utils.download_json_with_headers(suite, path) schema = related.to_model(cls, json) schemas.append(schema) return schemas
class Compose(object): version = related.StringField(required=False, default=None) services = related.MappingField(Service, "name", required=False)
class DescriptorSet: agents = related.MappingField(AgentDescriptor, "name") def __iter__(self): return self.agents.values().__iter__()
class Config(Profile): profiles = related.MappingField(Profile, "name", default={}) file_path = related.StringField(required=False, default=None) def get_profile(self, name): # todo: determine if name is valid, rather than just returning default. # todo: handle case insensitivity? return self.profiles.get(name, self) # return default (self) if fail @classmethod def load(cls, paths): """Find rigor.yml file and load it into a Config object.""" file_path = cls.find_file_path(paths) if file_path and os.path.exists(file_path): content = open(file_path).read() config = cls.loads(content, file_path) get_logger().info("config file", file_path=file_path) get_logger().debug("config details", **related.to_dict(config)) else: config = cls() get_logger().info("no config file not found", paths=paths) return config @classmethod def find_file_path(cls, paths): """Return valid 'rigor.yml' file in the paths or parents of paths.""" for path in paths: path = os.path.abspath(path) file_path = os.path.join(path, const.RIGOR_YML) reached_top = path == "/" # iterate until finding a pyconduct.yml file or reaching top while not os.path.exists(file_path) and not reached_top: file_path = os.path.join(path, const.RIGOR_YML) path = os.path.dirname(path) reached_top = path == "/" if os.path.exists(file_path): return file_path @classmethod def loads(cls, content, file_path=None): """Load JSON string into a Config object.""" vals = related.from_yaml(content, file_path=file_path, object_pairs_hook=dict) # environment namespace (RIGOR_) env_ns = Namespace(env={ k[6:]: v for k, v in os.environ.items() if k.startswith("RIGOR_") }) # pop profiles and file_path from root config profiles = vals.pop("profiles", {}) file_path = vals.pop("file_path") # iterate and construct profile sub-dictionaries with root info for name, profile in profiles.items(): from_root_profile = copy.deepcopy(vals) profile = utils.nested_update(from_root_profile, profile) eval_update_ns(profile, env_ns) profiles[name] = profile # construct root config profile vals["name"] = "__root__" vals["file_path"] = file_path vals["profiles"] = profiles eval_update_ns(vals, env_ns) return related.to_model(cls, vals)
class ProblemSet: """Stores all question metadata and defaults""" uuid = related.UUIDField(required=False) title = related.StringField('') questions = related.MappingField(ExamQuestionMeta, 'id', required=False) question_defaults = related.ChildField(ExamQuestionMeta, ExamQuestionMeta.from_defaults(), required=False) def create_question(self, **kwargs) -> ExamQuestionMeta: """Creates a new Question from defaults""" return ExamQuestionMeta.from_defaults(self.question_defaults.__dict__, kwargs) def insert_question(self, allow_edit: bool, id: str, **kwargs) -> ExamQuestionMeta: found_question = self.find_question(id) if found_question: if not allow_edit: raise KeyError("Question with specified `id` already exists") sum_dict = combine_dictionaries(found_question.__dict__, kwargs) new_question = self.create_question(**sum_dict) else: new_question = self.create_question(**kwargs) self.questions[id] = new_question return new_question def find_question(self, id: str) -> ExamQuestionMeta: """ Returns the question with specified `id` if it exists; otherwise None """ return self.questions.get(id, None) def generate_exam(self, num_questions: int, description: str, title: str = None) -> Exam: """ Creates an Exam instance from randomly selected questions and answers Args: num_questions: the number of questions to include description: exam description shown to user title: short exam title """ questions_meta = weighted_random(self.questions.items(), lambda q: q[1].likelihood, num_questions) questions = [ ExamQuestion.from_meta(id, meta) for id, meta in questions_meta ] now = datetime.now() return Exam(meta_uuid=self.uuid, generated_at=now, title=title or self.title, questions=questions, description=description) def ensure_exam_compatibility(self, exam: Exam) -> None: """ Checks whether the `exam` was created from this problem set. Raises SchemeError: if the `exam` isn't compatible """ def fail(reason): raise SchemeError( f"Problem set `{self.title}` does not match the exam `{self.title}` ({reason})" ) if self.uuid != exam.meta_uuid: fail('UUID') # check for missing questions for question in exam.questions: if self.find_question(question.id) is None: fail(f"missing question: {question.id}") def save(self, path: str) -> None: """ Saves this object instance as YAML to the specified path """ self.validate() with open(path, "w") as file: related.to_yaml(self, file, suppress_empty_values=True, suppress_map_key_values=True) def validate(self): """Validates object state""" for key, question in self.questions.items(): question.validate() @staticmethod def from_file(path: str): with open(path, "r") as file: obj = related.from_yaml(file, ProblemSet) obj.validate() return obj