Example #1
0
    def make_simulation_docs(path: Path):
        instantiations = ReflectionHandler.all_nonabstract_subclasses(
            MotifInstantiationStrategy, "Instantiation",
            "motif_instantiation_strategy/")
        instantiations = [
            DocumentationFormat(inst,
                                inst.__name__.replace('Instantiation', ""),
                                DocumentationFormat.LEVELS[2])
            for inst in instantiations
        ]

        implanting_strategies = ReflectionHandler.all_nonabstract_subclasses(
            SignalImplantingStrategy, 'Implanting',
            'signal_implanting_strategy/')
        implanting_strategies = [
            DocumentationFormat(implanting,
                                implanting.__name__.replace('Implanting', ""),
                                DocumentationFormat.LEVELS[2])
            for implanting in implanting_strategies
        ]

        classes_to_document = [DocumentationFormat(Motif, Motif.__name__, DocumentationFormat.LEVELS[1])] + instantiations + \
                              [DocumentationFormat(Signal, Signal.__name__, DocumentationFormat.LEVELS[1])] + implanting_strategies + \
                               [DocumentationFormat(Implanting, Implanting.__name__, DocumentationFormat.LEVELS[1])]

        file_path = path / "simulation.rst"
        with file_path.open("w") as file:
            for doc_format in classes_to_document:
                write_class_docs(doc_format, file)
Example #2
0
    def generate_docs(path: Path):
        inst_path = PathBuilder.build(path / "instructions")
        instructions = sorted(ReflectionHandler.all_nonabstract_subclasses(
            Instruction, "Instruction", subdirectory='instructions/'),
                              key=lambda x: x.__name__)

        inst_paths = {}

        for instruction in instructions:
            instruction_name = instruction.__name__[:-11]
            if hasattr(InstructionParser,
                       f"make_{instruction_name.lower()}_docs"):
                fn = getattr(InstructionParser,
                             f"make_{instruction_name.lower()}_docs")
                file_path = fn(inst_path)
            else:
                file_path = InstructionParser.make_docs(
                    instruction, instruction_name, inst_path)

            inst_paths[instruction_name] = file_path

        inst_file_path = inst_path / "instructions.rst"
        with inst_file_path.open('w') as file:
            for key, item in inst_paths.items():
                lines = f"{key}\n---------------------------\n.. include:: {os.path.relpath(item, EnvironmentSettings.source_docs_path)}\n"
                file.writelines(lines)
Example #3
0
    def make_reports_docs(path: Path):
        filename = "reports.rst"
        file_path = path / filename

        with file_path.open("w") as file:
            pass

        for report_type_class in [
                DataReport, EncodingReport, MLReport, TrainMLModelReport,
                MultiDatasetReport
        ]:
            with file_path.open("a") as file:
                doc_format = DocumentationFormat(
                    cls=report_type_class,
                    cls_name=f"**{report_type_class.get_title()}**",
                    level_heading=DocumentationFormat.LEVELS[1])
                write_class_docs(doc_format, file)

            subdir = DefaultParamsLoader.convert_to_snake_case(
                report_type_class.__name__) + "s"

            classes = ReflectionHandler.all_nonabstract_subclasses(
                report_type_class, "", f"reports/{subdir}/")
            make_docs(path, classes, filename, "", "a")
Example #4
0
def parse_commandline_arguments(args):
    ReflectionHandler.get_classes_by_partial_name("", "ml_methods/")
    ml_method_names = [
        cl.__name__
        for cl in ReflectionHandler.all_nonabstract_subclasses(MLMethod)
    ] + ["SimpleLogisticRegression"]

    parser = argparse.ArgumentParser(
        description="tool for building immuneML Galaxy YAML from arguments")
    parser.add_argument(
        "-o",
        "--output_path",
        required=True,
        help="Output location for the generated yaml file (directiory).")
    parser.add_argument(
        "-f",
        "--file_name",
        default="specs.yaml",
        help=
        "Output file name for the yaml file. Default name is 'specs.yaml' if not specified."
    )
    parser.add_argument(
        "-l",
        "--labels",
        required=True,
        help=
        "Which metadata labels should be predicted for the dataset (separated by comma)."
    )
    parser.add_argument(
        "-m",
        "--ml_methods",
        nargs="+",
        choices=ml_method_names,
        required=True,
        help="Which machine learning methods should be applied.")
    parser.add_argument("-t",
                        "--training_percentage",
                        type=float,
                        required=True,
                        help="The percentage of data used for training.")
    parser.add_argument(
        "-c",
        "--split_count",
        type=int,
        required=True,
        help=
        "The number of times to repeat the training process with a different random split of the data."
    )
    parser.add_argument(
        "-s",
        "--sequence_type",
        choices=["complete", "subsequence"],
        default=["subsequence"],
        nargs="+",
        help="Whether complete CDR3 sequences are used, or k-mer subsequences."
    )
    parser.add_argument(
        "-p",
        "--position_type",
        choices=["invariant", "positional"],
        nargs="+",
        help=
        "Whether IMGT-positional information is used for k-mers, or the k-mer positions are position-invariant."
    )
    parser.add_argument("-g",
                        "--gap_type",
                        choices=["gapped", "ungapped"],
                        nargs="+",
                        help="Whether the k-mers contain gaps.")
    parser.add_argument("-k", "--k", type=int, nargs="+", help="K-mer size.")
    parser.add_argument("-kl",
                        "--k_left",
                        type=int,
                        nargs="+",
                        help="Length before gap when k-mers are used.")
    parser.add_argument("-kr",
                        "--k_right",
                        type=int,
                        nargs="+",
                        help="Length after gap when k-mers are used.")
    parser.add_argument("-gi",
                        "--min_gap",
                        type=int,
                        nargs="+",
                        help="Minimal gap length when gapped k-mers are used.")
    parser.add_argument("-ga",
                        "--max_gap",
                        type=int,
                        nargs="+",
                        help="Maximal gap length when gapped k-mers are used.")
    parser.add_argument(
        "-r",
        "--reads",
        choices=[ReadsType.UNIQUE.value, ReadsType.ALL.value],
        nargs="+",
        default=[ReadsType.UNIQUE.value],
        help=
        "Whether k-mer counts should be scaled by unique clonotypes or all observed receptor sequences"
    )

    return parser.parse_args(args)
Example #5
0
 def make_preprocessing_docs(path: Path):
     classes = ReflectionHandler.all_nonabstract_subclasses(
         Preprocessor, "", "preprocessing/")
     make_docs(path, classes, "preprocessings.rst", "")
Example #6
0
 def make_ml_methods_docs(path: Path):
     classes = ReflectionHandler.all_nonabstract_subclasses(
         MLMethod, "", "ml_methods/")
     make_docs(path, classes, "ml_methods.rst", "")
Example #7
0
 def make_dataset_docs(path: Path):
     import_classes = ReflectionHandler.all_nonabstract_subclasses(
         DataImport, "Import", "dataset_import/")
     make_docs(path, import_classes, "datasets.rst", "Import")