Exemple #1
0
def generateReport(tableGenerator):
    report = reporting.ReportPDF()
    report.add(reporting.color_scheme_gray_light.toBlockLatex())

    verticalBorder_list = [0, 1, 2]
    horizontalBorder_list = [0, 1, 2]
    useBooktabs_list = [False, True]

    for ub in useBooktabs_list:
        sec1 = reporting.SectionRelative("useBooktabs={0}".format(ub))
        report.add(sec1)
        # report.add(reporting.BlockLatex(r"\bigskip"))
        for hb in horizontalBorder_list:
            sec2 = reporting.SectionRelative("horizontalBorder={0}".format(hb))
            sec1.add(sec2)
            report.add(reporting.BlockLatex(r"\bigskip\bigskip"))
            for vb in verticalBorder_list:
                subsec = reporting.SectionRelative("verticalBorder={2}".format(
                    ub, hb, vb))
                subsec.add(
                    tableGenerator(verticalBorder=vb,
                                   horizontalBorder=hb,
                                   useBooktabs=ub))
                subsec.add(reporting.BlockLatex(r"\bigskip"))
                sec2.add(subsec)
    return report
Exemple #2
0
def prepare_report_for_dims(props,
                            dim_rows,
                            dim_cols,
                            sects,
                            fname,
                            exp_prefix,
                            print_status_matrix=True,
                            paperwidth=75,
                            include_all_row=True,
                            dim_cols_listings=None):
    """Creating a LaTeX report of the results, where in each table data are presented along the
    sam dimensions."""
    report = reporting.ReportPDF(
        geometry_params="[paperwidth={0}cm, paperheight=40cm, margin=0.3cm]".
        format(paperwidth))

    # dim_rows = dim_rows.sort()
    if include_all_row:
        dim_rows += dim_true

    latex_sects = []
    for title, desc, folders, subs, figures in sects:
        if print_status_matrix:
            d = dim_rows * dim_cols

            matrix = produce_status_matrix(d, props)
            print("\n****** Status matrix:")
            print(matrix + "\n")
            print("Saving status matrix to file: {0}".format(STATUS_FILE_NAME))
            utils.save_to_file(STATUS_FILE_NAME, matrix)

        if dim_cols_listings is not None:
            save_listings(props, dim_rows, dim_cols_listings)

        subsects = []
        for fun, args in subs:
            if args[0] is None:  # no dimensions for rows, take benchmarks as the default
                args[0] = dim_rows
            args2 = [props] + args
            subsects.append(fun(*args2))

        s = create_section_and_plots(title, desc, props, subsects, figures,
                                     exp_prefix)
        latex_sects.append(s)

    for s in latex_sects:
        if s is not None:
            report.add(s)
    print("\n\nGenerating PDF report ...")
    cwd = os.getcwd()
    os.chdir("results/")
    report.save_and_compile(fname)
    os.chdir(cwd)
Exemple #3
0
if __name__ == "__main__":
    folders_exp3 = ["exp3", "exp3_fix1", "exp3_fix2", "exp3_fix3", 'rsconf', "exp3gpr",
                    "exp3gpr_fix1", "exp3gpr_fix2", "exp3gpr_fix3", "exp3gpr_fix4", "exp3formal"]
    name_exp3 = "Experiments for parametrized CDGP (stop: number of iterations)"
    desc_exp3 = r"""
    Important information:
    \begin{itemize}
    \item All configurations, unless specified otherwise, has \textbf{population size 500}, and \textbf{number of iterations 100}.
    \item GPR uses range [-100, 100] and a population size 1000.
    \item SteadyState configurations has population\_size * 100 (number of iterations), so that the total number
     of generated solutions is the same.
    \item SteadyState configurations use always Tournament ($k=7$) deselection. Selection may be Tournament ($k=7$) or Lexicase.
    \end{itemize}
    """


    # print_time_bounds_for_benchmarks(props_expEvalsFINAL)



    # -------- Creating nice LaTeX report of the above results --------
    report = reporting.ReportPDF(geometry_params = "[paperwidth=75cm, paperheight=40cm, margin=0.3cm]")
    sects = [
        create_section_with_results(name_exp3, desc_exp3, folders_exp3, numRuns=10),
    ]
    for s in sects:
        if s is not None:
            report.add(s)
    print("\n\nGenerating PDF report ...")
    report.save_and_compile("cdgp_results2.tex")
Exemple #4
0
def prepare_report(sects,
                   filename,
                   exp_prefix,
                   simple_bench_names=True,
                   print_status_matrix=True,
                   reuse_props=False,
                   paperwidth=75,
                   include_all_row=True,
                   dim_cols_listings=None):
    """Creating nice LaTeX report of the results."""
    global _prev_props  # used in case reuse_props was set to True
    user_declarations = """\definecolor{darkred}{rgb}{0.56, 0.05, 0.0}
\definecolor{darkgreen}{rgb}{0.0, 0.5, 0.0}
\definecolor{darkblue}{rgb}{0.0, 0.0, 0.55}
\definecolor{darkorange}{rgb}{0.93, 0.53, 0.18}"""
    report = reporting.ReportPDF(
        geometry_params="[paperwidth={0}cm, paperheight=40cm, margin=0.3cm]".
        format(paperwidth),
        packages=["pbox", "makecell"],
        user_declarations=user_declarations)
    latex_sects = []
    for title, desc, folders, subs, figures in sects:
        print("\nLoading props for: " + title)
        print("Scanned folders:")
        for f in folders:
            print(f)

        # Load props
        if reuse_props:
            props = _prev_props
        else:
            props = load_correct_props(folders)
            _prev_props = props

        print("\nFiltered Info:")
        for p in props:
            # if "nguyen4" in p["benchmark"]:
            #     print("file: {0}".format(p["thisFileName"]))
            if float(p["result.best.testMSE"]
                     ) > 2432971527315918274461803655258245399.0:
                print("file: {0}".format(p["thisFileName"]))

        # Automatically detect benchmarks used
        dim_benchmarks = get_benchmarks_from_props(
            props, simple_names=simple_bench_names)

        if print_status_matrix:
            d = dim_benchmarks * (dim_methodGP * dim_sel * dim_evoMode +
                                  (dim_methodCDGP + dim_methodCDGPprops) *
                                  dim_sel * dim_evoMode * dim_testsRatio)

            matrix = produce_status_matrix(d, props)
            print("\n****** Status matrix:")
            print(matrix + "\n")
            print("Saving status matrix to file: {0}".format(STATUS_FILE_NAME))
            utils.save_to_file(STATUS_FILE_NAME, matrix)

        dim_rows = dim_benchmarks  #.sort()
        if include_all_row:
            dim_rows += dim_true

        if dim_cols_listings is not None:
            save_listings(props, dim_rows, dim_cols_listings)

        subsects = []
        for fun, args in subs:
            if args[0] is None:  # no dimensions for rows, take benchmarks as the default
                args[0] = dim_rows
            args2 = [props] + args
            subsects.append(fun(*args2))

        s = create_section(title, desc, subsects, figures, exp_prefix)
        latex_sects.append(s)

    for s in latex_sects:
        if s is not None:
            report.add(s)
    print("\n\nGenerating PDF report ...")
    cwd = os.getcwd()
    os.chdir("results/")
    report.save_and_compile(filename)
    os.chdir(cwd)
Exemple #5
0
def prepare_report(sects,
                   fname,
                   exp_prefix,
                   simple_bench_names=True,
                   print_status_matrix=True,
                   reuse_props=False,
                   paperwidth=75,
                   include_all_row=True,
                   dim_cols_listings=None):
    """Creating nice LaTeX report of the results."""
    global _prev_props  # used in case reuse_props was set to True
    report = reporting.ReportPDF(
        geometry_params="[paperwidth={0}cm, paperheight=40cm, margin=0.3cm]".
        format(paperwidth))
    latex_sects = []
    for title, desc, folders, subs, figures in sects:
        print("\nLoading props for: " + title)
        print("Scanned folders:")
        for f in folders:
            print(f)

        # Load props
        if reuse_props:
            props = _prev_props
        else:
            props = load_correct_props(folders)
            _prev_props = props

        print("\nFiltered Info:")
        for p in props:
            if "result.best.verificationDecision" not in p:
                print("file: {0}".format(p["thisFileName"]))

        # Automatically detect benchmarks used
        dim_benchmarks = get_benchmarks_from_props(
            props, simple_names=simple_bench_names)

        if print_status_matrix:
            d = dim_benchmarks * (
                dim_methodGP * dim_sel * dim_evoMode +
                dim_methodCDGP * dim_sel * dim_evoMode * dim_testsRatio)

            matrix = produce_status_matrix(d, props)
            print("\n****** Status matrix:")
            print(matrix + "\n")
            print("Saving status matrix to file: {0}".format(STATUS_FILE_NAME))
            utils.save_to_file(STATUS_FILE_NAME, matrix)

        dim_rows = dim_benchmarks  #.sort()
        if include_all_row:
            dim_rows += dim_true

        if dim_cols_listings is not None:
            save_listings(props, dim_rows, dim_cols_listings)

        subsects = []
        for fun, args in subs:
            if args[0] is None:  # no dimensions for rows, take benchmarks as the default
                args[0] = dim_rows
            args2 = [props] + args
            subsects.append(fun(*args2))

        s = create_section_and_plots(title, desc, props, subsects, figures,
                                     exp_prefix)
        latex_sects.append(s)

    for s in latex_sects:
        if s is not None:
            report.add(s)
    print("\n\nGenerating PDF report ...")
    cwd = os.getcwd()
    os.chdir("results/")
    report.save_and_compile(fname)
    os.chdir(cwd)
Exemple #6
0
def print_table(props, dim_rows, dim_cols):
    def fun0(filtered):
        return str(len(filtered))
    textStatus = printer.latex_table(props, dim_rows, dim_cols, fun0, latexize_underscores=False)
    textStatus = printer.table_color_map(textStatus, 0.0, 50.0, 100.0)
    print(textStatus)
    print("\n\n")

    def fun1(filtered):
        if len(filtered) == 0:
            return "-"
        num_opt = get_num_optimal(filtered)
        # return "{0}/{1}".format(str(num_opt), str(len(filtered)))
        return "{0}".format(str(num_opt))
    textNumOptimal = printer.latex_table(props, dim_rows, dim_cols, fun1, latexize_underscores=False)
    textNumOptimal = printer.table_color_map(textNumOptimal, 0.0, 50.0, 100.0)
    print(textNumOptimal)
    print("\n\n")


    def fun2(filtered):
        if len(filtered) == 0:
            return "-"
        avgFit = round(get_stats_fitness(filtered)[0], 2)
        return "{0}".format(str(avgFit))
    textAvgFitness = printer.latex_table(props, dim_rows, dim_cols, fun2, latexize_underscores=False)
    textAvgFitness = printer.table_color_map(textAvgFitness, 0.0, 25.0, 50.0)
    print(textAvgFitness)
    print("\n\n")


    def fun3(filtered):
        if len(filtered) == 0:
            return "-"
        avg_time = round(get_stats_duration(filtered)[0], 1)
        return "{0}".format(str(avg_time))
    textAvgRuntime = printer.latex_table(props, dim_rows, dim_cols, fun3, latexize_underscores=False)
    textAvgRuntime = printer.table_color_map(textAvgRuntime, 0.0, 1000, 10000)
    print(textAvgRuntime)
    print("\n\n")


    def fun4(filtered):
        if len(filtered) == 0:
            return "-"
        evalSolver = get_sum(filtered, "result.stats.evaluatedSolver")
        evalSolverUnknown = get_sum(filtered, "result.stats.evaluatedSolverUnknown")
        evalSolverTimeout = get_sum(filtered, "result.stats.evaluatedSolverTimeout")
        if evalSolver > 0:
            percentUnsuccessful = float(evalSolverTimeout + evalSolverUnknown) / float(evalSolver)
            return str(round(percentUnsuccessful,3))
        else:
            return "-"
    textRatioOfUnknowns = printer.latex_table(props, dim_rows, dim_cols, fun4, latexize_underscores=False)
    textRatioOfUnknowns = printer.table_color_map(textRatioOfUnknowns, 0.0, 0.3, 0.6)
    print(textRatioOfUnknowns)
    print("\n\n")

    report = reporting.ReportPDF()
    section1 = reporting.Section("Experiments", [])
    subsects = [("Status (correctly finished processes)", textStatus, reversed(reporting.color_scheme_red)),
                ("Number of optimal solutions (max=100)", textNumOptimal, reporting.color_scheme_green),
                ("Average fitness", textAvgFitness, reporting.color_scheme_green),
                ("Average runtime", textAvgRuntime, reporting.color_scheme_blue),
                ("Ratio of unknowns", textRatioOfUnknowns, reporting.color_scheme_yellow)]
    for title, table, cs in subsects:
        if isinstance(cs, reporting.ColorScheme3):
            cs = cs.toBlockLatex()
        sub = reporting.Subsection(title, [cs, reporting.BlockLatex(table + "\n")])
        section1.add(sub)
    report.add(section1)
    report.save_and_compile("eps_results.tex")
Exemple #7
0
def prepare_report(sects,
                   fname,
                   use_bench_simple_names=True,
                   print_status_matrix=True,
                   reuse_props=False,
                   paperwidth=75,
                   include_all_row=True):
    """Creating nice LaTeX report of the results."""
    global _prev_props
    report = reporting.ReportPDF(
        geometry_params="[paperwidth={0}cm, paperheight=40cm, margin=0.3cm]".
        format(paperwidth))
    latex_sects = []
    for title, desc, folders, subs, figures in sects:
        print("\nLoading props for: " + title)
        print("Scanned folders:")
        for f in folders:
            print(f)

        # Load props
        if reuse_props:
            props = _prev_props
        else:
            props = load_correct_props(folders)
            _prev_props = props
        dim_benchmarks = Dim.from_dict(props, "benchmark")

        print("\nFiltered Info:")
        for p in props:
            if p["method"] in {
                    "CDGP"
            } and p["benchmark"].endswith("resistance_par2_25.sl"):
                # print(p["thisFileName"] + "   --->  " + "{0}, best={1}".format(p["result.best"], p["result.best.mse"]))
                print("isOptimal: {0};  finalVer={3};  mse={1};  program={2}".
                      format(is_optimal_solution(p), p["result.best.mse"],
                             p["result.best"],
                             p["result.best.verificationDecision"]))
            # Print file names of certain config
            # if "fg_array_search_2" in p["benchmark"] and "searchAlgorithm" in p and\
            #    p["searchAlgorithm"] == "Lexicase" and p["method"] == "CDGP" and\
            #    p["CDGPtestsRatio"] == "0.0":
            #     print(p["thisFileName"] + "   --->  " + str(float(p["result.totalTimeSystem"]) / 1000.0))
            #     print("BEST: " + p["result.best.smtlib"])
            # Print bests
            # if p["method"] in {"GPR", "CDGP"} and is_optimal_solution(p) and\
            #    p["benchmark"] == "benchmarks/SLIA/cdgp_ecj1/initials.sl":
            #     print(p["thisFileName"] + "   --->  " + str(float(p["result.totalTimeSystem"]) / 1000.0))
            #     print("BEST: " + p["result.best.smtlib"])

        if use_bench_simple_names:
            configs = [
                Config(simplify_benchmark_name(c.get_caption()),
                       c.filters[0][1],
                       benchmark=c.get_caption())
                for c in dim_benchmarks.configs
            ]
            dim_benchmarks = Dim(configs)
            dim_benchmarks.sort()
        if print_status_matrix:
            d = dim_benchmarks * dim_methodGP  * dim_sel * dim_evoMode +\
                dim_benchmarks * dim_methodCDGP * dim_sel * dim_evoMode * dim_testsRatio

            matrix = produce_status_matrix(d, props)
            print("\n****** Status matrix:")
            print(matrix + "\n")
            print("Saving status matrix to file: {0}".format(STATUS_FILE_NAME))
            save_to_file(STATUS_FILE_NAME, matrix)

        dim_rows = dim_benchmarks.sort()
        if include_all_row:
            dim_rows += dim_true
        subsects = []
        for fun, args in subs:
            if args[0] is None:  # no dimensions for rows, take benchmarks as the default
                args[0] = dim_rows
            args2 = [props] + args
            subsects.append(fun(*args2))

        s = create_section_and_plots(title, desc, props, subsects, figures)
        latex_sects.append(s)

    for s in latex_sects:
        if s is not None:
            report.add(s)
    print("\n\nGenerating PDF report ...")
    report.save_and_compile(fname)
Exemple #8
0
def prepare_report(sects,
                   fname,
                   use_bench_simple_names=True,
                   print_status_matrix=True,
                   reuse_props=False):
    """Creating nice LaTeX report of the results."""
    global _prev_props
    report = reporting.ReportPDF(
        geometry_params="[paperwidth=75cm, paperheight=40cm, margin=0.3cm]")
    latex_sects = []
    for title, desc, folders, subs, figures in sects:
        print("\nLoading props for: " + title)
        print("Scanned folders:")
        for f in folders:
            print(f)

        # Load props
        if reuse_props:
            props = _prev_props
        else:
            props = load_correct_props(folders)
            _prev_props = props
        dim_benchmarks = Dim.from_dict(props, "benchmark")

        print("\nFiltered Info:")
        for p in props:
            if p["method"] in {"GPR", "CDGP"} and p["searchAlgorithm"] in {"GP", "Lexicase"} and\
                    int(p["result.best.generation"]) >= 99990:
                print(p["thisFileName"] + "   --->  " +
                      str(p["result.best.generation"]))
            # Print file names of certain config
            # if "fg_array_search_2" in p["benchmark"] and "searchAlgorithm" in p and\
            #    p["searchAlgorithm"] == "Lexicase" and p["method"] == "CDGP" and\
            #    p["CDGPtestsRatio"] == "0.0":
            #     print(p["thisFileName"] + "   --->  " + str(float(p["result.totalTimeSystem"]) / 1000.0))
            #     print("BEST: " + p["result.best.smtlib"])
            # Print bests
            # if p["method"] in {"GPR", "CDGP"} and is_optimal_solution(p) and\
            #    p["benchmark"] == "benchmarks/SLIA/cdgp_ecj1/initials.sl":
            #     print(p["thisFileName"] + "   --->  " + str(float(p["result.totalTimeSystem"]) / 1000.0))
            #     print("BEST: " + p["result.best.smtlib"])

        if use_bench_simple_names:
            configs = [
                Config(benchmarks_simple_names.get(c.get_caption(),
                                                   c.get_caption()),
                       c.filters[0][1],
                       benchmark=c.get_caption())
                for c in dim_benchmarks.configs
            ]
            dim_benchmarks = Dim(configs)
            dim_benchmarks.sort()
        if print_status_matrix:
            # Corrections for whole non-formal LIA
            #d = dim_benchmarks * dim_methodCDGP * dim_testsRatio * dim_sa +\
            #    dim_benchmarks * dim_methodGPR * dim_testsRatioGPR * dim_sa

            # Corrections for Lexicase
            d = dim_benchmarks * dim_methodCDGP * dim_testsRatio * dim_lexicase +\
                dim_benchmarks * dim_methodGPR * dim_testsRatioGPR * dim_lexicase

            # Corrections for Strings
            # d = dim_benchmarks * dim_methodCDGP * dim_testsRatio * dim_sa

            matrix = produce_status_matrix(d, props)
            print("\n****** Status matrix:")
            print(matrix + "\n")

        dim_rows = dim_benchmarks.sort() + dim_true
        subsects = []
        for fun, args in subs:
            if args[0] is None:  # no dimensions for rows, take benchmarks as the default
                args[0] = dim_rows
            args2 = [props] + args
            subsects.append(fun(*args2))

        s = create_section_and_plots(title, desc, props, subsects, figures)
        latex_sects.append(s)

    for s in latex_sects:
        if s is not None:
            report.add(s)
    print("\n\nGenerating PDF report ...")
    report.save_and_compile(fname)