Ejemplo n.º 1
0
def main():
    PNO_GrFN = GrFN.from_fortran_file(
        f"../tests/data/program_analysis/PETPNO.for"
    )
    PEN_GrFN = GrFN.from_fortran_file(
        f"../tests/data/program_analysis/PETPEN.for"
    )

    # Use basenames for variable comparison because the two GrFNs will have those in common
    PNO_nodes = [
        d["basename"]
        for n, d in PNO_GrFN.nodes(data=True)
        if d["type"] == "variable"
    ]
    PEN_nodes = [
        d["basename"]
        for n, d in PEN_GrFN.nodes(data=True)
        if d["type"] == "variable"
    ]

    shared_nodes = list(set(PNO_nodes).intersection(set(PEN_nodes)))
    # Make a map so we can access the original variable names from the basenames
    PNO_input_map = {get_basename(node): node for node in PNO_GrFN.inputs}
    PEN_input_map = {get_basename(node): node for node in PEN_GrFN.inputs}

    PNO_inputs = list(PNO_input_map.keys())
    PEN_inputs = list(PEN_input_map.keys())

    # Reverse the graph so that LCA analysis will work
    mock_PNO_GrFN = nx.DiGraph()
    mock_PNO_GrFN.add_edges_from([(dst, src) for src, dst in PNO_GrFN.edges])

    mock_PEN_GrFN = nx.DiGraph()
    mock_PEN_GrFN.add_edges_from([(dst, src) for src, dst in PEN_GrFN.edges])

    # Find both sets of shared inputs
    shared_input_nodes = list(set(PNO_inputs).intersection(set(shared_nodes)))

    for i, v1 in enumerate(shared_input_nodes):
        for v2 in shared_input_nodes[i + 1 :]:
            (L1, L2) = pairwise_LCAs(
                mock_PNO_GrFN,
                mock_PEN_GrFN,
                PNO_input_map,
                PEN_input_map,
                v1,
                v2,
            )
            if L1 is None and L2 is None:
                print(f"SHARED: {v1}, {v2}\t\tFAILED\n\n")
                continue
            ((L1, L2), LD) = lambda_levenshtein_dist(
                PNO_GrFN, PEN_GrFN, L1, L2
            )
            print(f"SHARED: {v1}, {v2}\tLev Dist: {LD}")
            print(f"LAMBDAS:\n\t{v1}: {L1}\n\t{v2}: {L2}\n\n")
Ejemplo n.º 2
0
def test_petasce_torch_execution():
    lambdas = importlib.__import__("PETASCE_simple_torch_lambdas")
    pgm = json.load(open(data_dir + "PETASCE_simple_torch.json", "r"))
    G = GroundedFunctionNetwork.from_dict(pgm, lambdas)

    N = 100
    samples = {
        "petasce::doy_0": np.random.randint(1, 100, N),
        "petasce::meevp_0": np.where(np.random.rand(N) >= 0.5, 'A', 'W'),
        "petasce::msalb_0": np.random.uniform(0, 1, N),
        "petasce::srad_0": np.random.uniform(1, 30, N),
        "petasce::tmax_0": np.random.uniform(-30, 60, N),
        "petasce::tmin_0": np.random.uniform(-30, 60, N),
        "petasce::xhlai_0": np.random.uniform(0, 20, N),
        "petasce::tdew_0": np.random.uniform(-30, 60, N),
        "petasce::windht_0": np.random.uniform(0, 10, N),
        "petasce::windrun_0": np.random.uniform(0, 900, N),
        "petasce::xlat_0": np.random.uniform(0, 90, N),
        "petasce::xelev_0": np.random.uniform(0, 6000, N),
        "petasce::canht_0": np.random.uniform(0.001, 3, N),
    }

    values = {
        k: torch.tensor(v, dtype=torch.double) if v.dtype != "<U1" else v
        for k, v in samples.items()
    }

    res = G.run(values, torch_size=N)
    assert res.size()[0] == N
Ejemplo n.º 3
0
def test_PETPT_with_torch():
    lambdas = importlib.__import__("PETPT_torch_lambdas")
    pgm = json.load(open("tests/data/program_analysis/PETPT.json", "r"))
    G = GroundedFunctionNetwork.from_dict(pgm, lambdas)

    args = G.inputs
    bounds = {
        "petpt::msalb_-1": [0, 1],
        "petpt::srad_-1": [1, 20],
        "petpt::tmax_-1": [-30, 60],
        "petpt::tmin_-1": [-30, 60],
        "petpt::xhlai_-1": [0, 20],
    }

    problem = {
        'num_vars': len(args),
        'names': args,
        'bounds': [bounds[arg] for arg in args]
    }

    Ns = 1000                      # TODO: Khan, experiment with this value
    Si = G.sobol_analysis(Ns, problem, use_torch=True)
    assert len(Si.keys()) == 6
    assert len(Si["S1"]) == len(args)

    Si = FAST_analysis(G, Ns, problem)
    assert len(Si.keys()) == 3
    assert len(Si["S1"]) == len(args)

    Si = RBD_FAST_analysis(G, Ns, problem)
    assert len(Si.keys()) == 2
    assert len(Si["S1"]) == len(args)
Ejemplo n.º 4
0
def modelAnalysis():
    PETPT_GrFN = GroundedFunctionNetwork.from_fortran_file(
        THIS_FOLDER + "/static/example_programs/petPT.f", tmpdir=TMPDIR
    )
    PETASCE_GrFN = GroundedFunctionNetwork.from_fortran_file(
        THIS_FOLDER + "/static/example_programs/petASCE.f", tmpdir=TMPDIR
    )

    PETPT_FIB = PETPT_GrFN.to_FIB(PETASCE_GrFN)
    PETASCE_FIB = PETASCE_GrFN.to_FIB(PETPT_GrFN)

    asce_inputs = {
        "petasce::msalb_-1": 0.5,
        "petasce::srad_-1": 15,
        "petasce::tmax_-1": 10,
        "petasce::tmin_-1": -10,
        "petasce::xhlai_-1": 10,
    }


    asce_covers = {
        "petasce::canht_-1": 2,
        "petasce::meevp_-1": "A",
        "petasce::cht_0": 0.001,
        "petasce::cn_4": 1600.0,
        "petasce::cd_4": 0.38,
        "petasce::rso_0": 0.062320,
        "petasce::ea_0": 7007.82,
        "petasce::wind2m_0": 3.5,
        "petasce::psycon_0": 0.0665,
        "petasce::wnd_0": 3.5,
    }
    # graphJSON, layout = get_fib_surface_plot(PETASCE_FIB, asce_covers, 10)


    return render_template(
        "modelAnalysis.html",
        petpt_elementsJSON=to_cyjs_cag(PETPT_GrFN.to_CAG()),
        petasce_elementsJSON=to_cyjs_cag(PETASCE_GrFN.to_CAG()),
        fib_elementsJSON=to_cyjs_fib(PETASCE_FIB.to_CAG()),
        # layout=layout,
        # graphJSON=graphJSON,
    )
Ejemplo n.º 5
0
def main():
    data_dir = "scripts/SIR_Demo/"
    sys.path.insert(0, data_dir)
    model_file = "SIR-simple"
    json_file = f"{model_file}_GrFN.json"
    lambdas = importlib.__import__(f"{model_file}_lambdas")

    grfn = GroundedFunctionNetwork.from_json_and_lambdas(json_file, lambdas)
    agraph = grfn.to_agraph()
    agraph.draw('SIR-simple.pdf', prog='dot')
    (D, I, S, F) = to_wiring_diagram(grfn, lambdas)
    write_files(D, I, S, F, model_file)
Ejemplo n.º 6
0
def sensitivity_visualizer():

    N = [10, 100, 1000, 10000]
    tG = GrFN.from_fortran_file("tests/data/program_analysis/PETPT.for")
    var_bounds = {
        "tmax": [-30.0, 60.0],
        "tmin": [-30.0, 60.0],
        "srad": [0.0, 30.0],
        "msalb": [0.0, 1.0],
        "xhlai": [0.0, 20.0],
    }

    sensitivity_indices_lst = []

    var_names = var_bounds.keys()

    for i in range(len(N)):
        (Si, timing_data) = SensitivityAnalyzer.Si_from_Sobol(
            N[i], tG, var_bounds, save_time=True
        )
        (sample_time, exec_time, analysis_time) = timing_data
        sobol_dict = Si.__dict__
        S1_dict = dict(zip(var_names, sobol_dict["O1_indices"].tolist()))

        for k in range(sobol_dict["O2_indices"].shape[0]):
            for l in range(k, sobol_dict["O2_indices"].shape[1]):
                if k != l:
                    sobol_dict["O2_indices"][l][k] = sobol_dict["O2_indices"][
                        k
                    ][l]

        sobol_dict["O2_indices"] = np.nan_to_num(
            sobol_dict["O2_indices"]
        ).tolist()

        S2_dataframe = pd.DataFrame(
            data=sobol_dict["O2_indices"], columns=var_names
        )

        sobol_dict_visualizer = {
            "sample size": np.log10(N[i]),
            "S1": S1_dict,
            "S2": S2_dataframe,
            "sampling time": sample_time,
            "execution time": exec_time,
            "analysis time": analysis_time,
        }

        sensitivity_indices_lst.append(sobol_dict_visualizer)

    yield SensitivityVisualizer(sensitivity_indices_lst)
Ejemplo n.º 7
0
def test_PETASCE_with_torch():
    # Torch model
    sys.path.insert(0, "tests/data/GrFN")
    lambdas = importlib.__import__("PETASCE_simple_torch_lambdas")
    pgm = json.load(open("tests/data/GrFN/PETASCE_simple_torch.json", "r"))
    tG = GroundedFunctionNetwork.from_dict(pgm, lambdas)

    bounds = {
        "petasce::doy_0": [1, 365],
        "petasce::meevp_0": [0, 1],
        "petasce::msalb_0": [0, 1],
        "petasce::srad_0": [1, 30],
        "petasce::tmax_0": [-30, 60],
        "petasce::tmin_0": [-30, 60],
        "petasce::xhlai_0": [0, 20],
        "petasce::tdew_0": [-30, 60],
        "petasce::windht_0":
        [0.1,
         10],  # HACK: has a hole in 0 < x < 1 for petasce__assign__wind2m_1
        "petasce::windrun_0": [0, 900],
        "petasce::xlat_0": [3, 12],  # HACK: south sudan lats
        "petasce::xelev_0": [0, 6000],
        "petasce::canht_0": [0.001, 3],
    }

    type_info = {
        "petasce::doy_0": (int, list(range(1, 366))),
        "petasce::meevp_0": (str, ["A", "W"]),
        "petasce::msalb_0": (float, [0.0]),
        "petasce::srad_0": (float, [0.0]),
        "petasce::tmax_0": (float, [0.0]),
        "petasce::tmin_0": (float, [0.0]),
        "petasce::xhlai_0": (float, [0.0]),
        "petasce::tdew_0": (float, [0.0]),
        "petasce::windht_0": (float, [0.0]),
        "petasce::windrun_0": (float, [0.0]),
        "petasce::xlat_0": (float, [0.0]),
        "petasce::xelev_0": (float, [0.0]),
        "petasce::canht_0": (float, [0.0]),
    }

    args = tG.inputs
    problem = {
        'num_vars': len(args),
        'names': args,
        'bounds': [bounds[arg] for arg in args]
    }

    tSi = tG.sobol_analysis(1000, problem, var_types=type_info, use_torch=True)
    assert len(tSi["S1"]) == len(tG.inputs)
    assert len(tSi["S2"][0]) == len(tG.inputs)
Ejemplo n.º 8
0
def process_text_and_code():
    fortran_file = request.form["source_code"]
    basename = fortran_file[:-2]
    pdf_file = request.form["document"]
    conf_file = get_conf_file(pdf_file)

    fortran_path = os.path.join(SOURCE_FILES, "code", fortran_file)
    norm_json_path = os.path.join(SOURCE_FILES, "code", f"{basename}.json")
    if os.path.isfile(norm_json_path):
        os.remove(norm_json_path)

    GroundedFunctionNetwork.from_fortran_file(fortran_path)
    cur_dir = os.getcwd()
    os.chdir(os.path.join(os.environ["AUTOMATES_LOC"], "text_reading/"))
    sp.run([
        "sbt",
        "-Dconfig.file=" + os.path.join(SOURCE_FILES, "configs", conf_file),
        'runMain org.clulab.aske.automates.apps.ExtractAndAlign'
    ])
    os.chdir(cur_dir)
    tr_json_path = os.path.join(SOURCE_FILES, "models",
                                f"{basename}_with_groundings.json")
    norm_json_path = os.path.join(SOURCE_FILES, "models", f"{basename}.json")
    if os.path.isfile(norm_json_path):
        os.remove(norm_json_path)
    if os.path.isfile(tr_json_path):
        os.rename(tr_json_path, norm_json_path)
    grfn = json.load(open(norm_json_path, "r"))
    return jsonify({
        "link_data": {str(k): v
                      for k, v in make_link_tables(grfn).items()},
        "models": [
            f for f in os.listdir(os.path.join(SOURCE_FILES, "models"))
            if f.endswith(".json")
        ]
    })
Ejemplo n.º 9
0
def processCode():
    form = MyForm()
    code = form.source_code.data
    app.code = code
    if code == "":
        return render_template("index.html", form=form)
    lines = [
        line.replace("\r", "") + "\n"
        for line in [line for line in code.split("\n")]
        if line != ""
    ]

    # dir_name = str(uuid4())
    # os.mkdir(f"/tmp/automates/input_code/{dir_name}")
    # input_code_tmpfile = f"/tmp/automates/input_code/{dir_name}/{orig_file}.f"
    filename = f"input_code_{str(uuid4()).replace('-', '_')}"
    input_code_tmpfile = f"/tmp/automates/{filename}.f"
    with open(input_code_tmpfile, "w") as f:
        f.write(preprocessor.process(lines))

    lambdas = f"{filename}_lambdas"
    lambdas_path = f"/tmp/automates/{lambdas}.py"
    G = GroundedFunctionNetwork.from_fortran_file(input_code_tmpfile,
                                                  tmpdir="/tmp/automates/")

    graphJSON, layout = get_grfn_surface_plot(G)

    scopeTree_elementsJSON = to_cyjs_grfn(G)
    CAG = G.to_CAG()
    program_analysis_graph_elementsJSON = to_cyjs_cag(CAG)

    os.remove(input_code_tmpfile)
    os.remove(f"/tmp/automates/{lambdas}.py")

    return render_template(
        "index.html",
        form=form,
        code=app.code,
        scopeTree_elementsJSON=scopeTree_elementsJSON,
        graphJSON=graphJSON,
        layout=layout,
        program_analysis_graph_elementsJSON=program_analysis_graph_elementsJSON,
    )
Ejemplo n.º 10
0
def processCode():
    form = MyForm()
    code = form.source_code.data
    app.code = code
    if code == "":
        return render_template("index.html", form=form)
    lines = [
        line.replace("\r", "") + "\n"
        for line in [line for line in code.split("\n")] if line != ""
    ]
    filename = f"input_code_{str(uuid4())}"
    input_code_tmpfile = f"/tmp/automates/{filename}.f"

    with open(input_code_tmpfile, "w") as f:
        f.write(preprocessor.process(lines))

    xml_string = sp.run(
        [
            "java",
            "fortran.ofp.FrontEnd",
            "--class",
            "fortran.ofp.XMLPrinter",
            "--verbosity",
            "0",
            input_code_tmpfile,
        ],
        stdout=sp.PIPE,
    ).stdout

    trees = [ET.fromstring(xml_string)]
    comments = get_comments.get_comments(input_code_tmpfile)
    outputDict = translate.XMLToJSONTranslator().analyze(trees, comments)
    pySrc = pyTranslate.create_python_source_list(outputDict)[0][0]

    lambdas = f"{filename}_lambdas"
    lambdas_path = f"/tmp/automates/{lambdas}.py"
    G = GroundedFunctionNetwork.from_python_src(pySrc,
                                                lambdas_path,
                                                f"{filename}.json",
                                                filename,
                                                save_file=False)

    graphJSON, layout = get_grfn_surface_plot(G)

    scopeTree_elementsJSON = to_cyjs_grfn(G)
    CAG = G.to_CAG()
    program_analysis_graph_elementsJSON = to_cyjs_cag(CAG)

    os.remove(input_code_tmpfile)
    os.remove(f"/tmp/automates/{lambdas}.py")

    return render_template(
        "index.html",
        form=form,
        code=app.code,
        python_code=highlight(pySrc, PYTHON_LEXER, PYTHON_FORMATTER),
        scopeTree_elementsJSON=scopeTree_elementsJSON,
        graphJSON=graphJSON,
        layout=layout,
        program_analysis_graph_elementsJSON=program_analysis_graph_elementsJSON,
    )
Ejemplo n.º 11
0
from delphi.GrFN.networks import GroundedFunctionNetwork

# -----------------------------------------------------------------------------
#
# -----------------------------------------------------------------------------

print('Running demo_generate_grfn.py')

source_fortran_file = 'DiscreteSIR-noarrays.f'

print(f'    source_fortran_file: {source_fortran_file}')

grfn = GroundedFunctionNetwork.from_fortran_file(source_fortran_file)
agraph = grfn.to_agraph()
agraph.draw('graph.pdf', prog='dot')

# -----------------------------------------------------------------------------
Ejemplo n.º 12
0
def sir_simple_grfn():
    return GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/SIR-simple.f")
Ejemplo n.º 13
0
def petasce_grfn():
    return GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/PETASCE_simple.for")
Ejemplo n.º 14
0
def petpt_grfn():
    return GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/PETPT.for")
Ejemplo n.º 15
0
def crop_yield_grfn():
    return GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/crop_yield.f")
Ejemplo n.º 16
0
def sir_gillespie_ms_grfn():
    return GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/SIR-Gillespie-MS.f")
Ejemplo n.º 17
0
def crop_yield_grfn():
    yield GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/crop_yield.f"
    )
    os.remove("crop_yield--GrFN.pdf")
    os.remove("crop_yield--CAG.pdf")
Ejemplo n.º 18
0
def petasce_grfn():
    yield GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/PETASCE_simple.for"
    )
    os.remove("PETASCE--GrFN.pdf")
    os.remove("PETASCE--CAG.pdf")
Ejemplo n.º 19
0
from delphi.GrFN.networks import GroundedFunctionNetwork

G = GroundedFunctionNetwork.from_fortran_src("""\
      subroutine relativistic_energy(e, m, c, p)

      implicit none

      real e, m, c, p
      e = sqrt((p**2)*(c**2) + (m**2)*(c**4))

      return
      end subroutine relativistic_energy""")
A = G.to_agraph()
A.draw("relativistic_energy_grfn.png", prog="dot")
Ejemplo n.º 20
0
def sir_gillespie_ms_grfn():
    yield GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/SIR-Gillespie-MS.f"
    )
    os.remove("SIR-Gillespie_ms--CAG.pdf")
    os.remove("SIR-Gillespie_ms--GrFN.pdf")
Ejemplo n.º 21
0
def sir_simple_grfn():
    yield GroundedFunctionNetwork.from_fortran_file(
        "tests/data/program_analysis/SIR-simple.f"
    )
    os.remove("SIR-simple--GrFN.pdf")
    os.remove("SIR-simple--CAG.pdf")