Beispiel #1
0
def processCode():
    form = MyForm()
    code = form.source_code.data
    app.code = code
    if code == "":
        return render_template("index.html", form=form)
    lines = [
        line.replace("\r", "") + "\n"
        for line in [line for line in code.split("\n")]
        if line != ""
    ]

    # dir_name = str(uuid4())
    # os.mkdir(f"/tmp/automates/input_code/{dir_name}")
    # input_code_tmpfile = f"/tmp/automates/input_code/{dir_name}/{orig_file}.f"
    filename = f"input_code_{str(uuid4()).replace('-', '_')}"
    input_code_tmpfile = f"/tmp/automates/{filename}.f"
    with open(input_code_tmpfile, "w") as f:
        f.write(preprocessor.process(lines))

    lambdas = f"{filename}_lambdas"
    lambdas_path = f"/tmp/automates/{lambdas}.py"
    G = GroundedFunctionNetwork.from_fortran_file(input_code_tmpfile,
                                                  tmpdir="/tmp/automates/")

    graphJSON, layout = get_grfn_surface_plot(G)

    scopeTree_elementsJSON = to_cyjs_grfn(G)
    CAG = G.to_CAG()
    program_analysis_graph_elementsJSON = to_cyjs_cag(CAG)

    os.remove(input_code_tmpfile)
    os.remove(f"/tmp/automates/{lambdas}.py")

    return render_template(
        "index.html",
        form=form,
        code=app.code,
        scopeTree_elementsJSON=scopeTree_elementsJSON,
        graphJSON=graphJSON,
        layout=layout,
        program_analysis_graph_elementsJSON=program_analysis_graph_elementsJSON,
    )
def get_python_source(
        original_fortran_file) -> Tuple[str, str, str, str, Dict]:
    stem = original_fortran_file.stem
    preprocessed_fortran_file = stem + "_preprocessed.f"
    lambdas_filename = stem + "_lambdas.py"
    json_filename = stem + ".json"
    python_filename = stem + ".py"

    with open(original_fortran_file, "r") as f:
        inputLines = f.readlines()

    with open(preprocessed_fortran_file, "w") as f:
        f.write(preprocessor.process(inputLines))

    xml_string = sp.run(
        [
            "java",
            "fortran.ofp.FrontEnd",
            "--class",
            "fortran.ofp.XMLPrinter",
            "--verbosity",
            "0",
            preprocessed_fortran_file,
        ],
        stdout=sp.PIPE,
    ).stdout

    trees = [ET.fromstring(xml_string)]
    comments = get_comments.get_comments(preprocessed_fortran_file)
    os.remove(preprocessed_fortran_file)
    xml_to_json_translator = translate.XMLToJSONTranslator()
    mode_mapper_tree = ET.fromstring(xml_string)
    generator = mod_index_generator.moduleGenerator()
    mode_mapper_dict = generator.analyze(mode_mapper_tree)
    outputDict = xml_to_json_translator.analyze(trees, comments)
    pySrc = pyTranslate.create_python_source_list(outputDict)[0][0]
    return pySrc, lambdas_filename, json_filename, python_filename, mode_mapper_dict
Beispiel #3
0
    def from_fortran_file(cls, fortran_file: str, tmpdir: str = "."):
        """Builds GrFN object from a Fortran program."""
        stem = Path(fortran_file).stem
        if tmpdir == "." and "/" in fortran_file:
            tmpdir = Path(fortran_file).parent
        preprocessed_fortran_file = f"{tmpdir}/{stem}_preprocessed.f"
        lambdas_path = f"{tmpdir}/{stem}_lambdas.py"
        json_filename = stem + ".json"

        with open(fortran_file, "r") as f:
            inputLines = f.readlines()

        with open(preprocessed_fortran_file, "w") as f:
            f.write(preprocessor.process(inputLines))

        xml_string = sp.run(
            [
                "java",
                "fortran.ofp.FrontEnd",
                "--class",
                "fortran.ofp.XMLPrinter",
                "--verbosity",
                "0",
                preprocessed_fortran_file,
            ],
            stdout=sp.PIPE,
        ).stdout
        trees = [ET.fromstring(xml_string)]
        comments = get_comments.get_comments(preprocessed_fortran_file)
        os.remove(preprocessed_fortran_file)
        xml_to_json_translator = translate.XMLToJSONTranslator()
        outputDict = xml_to_json_translator.analyze(trees, comments)
        pySrc = pyTranslate.create_python_source_list(outputDict)[0][0]

        G = cls.from_python_src(pySrc, lambdas_path, json_filename, stem)
        return G
Beispiel #4
0
def processCode():
    form = MyForm()
    code = form.source_code.data
    app.code = code
    if code == "":
        return render_template("index.html", form=form)
    lines = [
        line.replace("\r", "") + "\n"
        for line in [line for line in code.split("\n")] if line != ""
    ]
    filename = f"input_code_{str(uuid4())}"
    input_code_tmpfile = f"/tmp/automates/{filename}.f"

    with open(input_code_tmpfile, "w") as f:
        f.write(preprocessor.process(lines))

    xml_string = sp.run(
        [
            "java",
            "fortran.ofp.FrontEnd",
            "--class",
            "fortran.ofp.XMLPrinter",
            "--verbosity",
            "0",
            input_code_tmpfile,
        ],
        stdout=sp.PIPE,
    ).stdout

    trees = [ET.fromstring(xml_string)]
    comments = get_comments.get_comments(input_code_tmpfile)
    outputDict = translate.XMLToJSONTranslator().analyze(trees, comments)
    pySrc = pyTranslate.create_python_source_list(outputDict)[0][0]

    lambdas = f"{filename}_lambdas"
    lambdas_path = f"/tmp/automates/{lambdas}.py"
    G = GroundedFunctionNetwork.from_python_src(pySrc,
                                                lambdas_path,
                                                f"{filename}.json",
                                                filename,
                                                save_file=False)

    graphJSON, layout = get_grfn_surface_plot(G)

    scopeTree_elementsJSON = to_cyjs_grfn(G)
    CAG = G.to_CAG()
    program_analysis_graph_elementsJSON = to_cyjs_cag(CAG)

    os.remove(input_code_tmpfile)
    os.remove(f"/tmp/automates/{lambdas}.py")

    return render_template(
        "index.html",
        form=form,
        code=app.code,
        python_code=highlight(pySrc, PYTHON_LEXER, PYTHON_FORMATTER),
        scopeTree_elementsJSON=scopeTree_elementsJSON,
        graphJSON=graphJSON,
        layout=layout,
        program_analysis_graph_elementsJSON=program_analysis_graph_elementsJSON,
    )
Beispiel #5
0
def fortran_to_grfn(
    original_fortran=None,
    tester_call=False,
    network_test=False,
    temp_dir=None,
):
    """
        This function invokes other appropriate functions
        to process and generate objects to translate fortran
        to python IR. This function will either be invoked by
        local main function or the outer tester functions,
        such as test_program_analysis.py or network.py.

        Args:
            original_fortran (str): A file name of original fortran script.
            tester_call (bool): A boolean condition that will indicate
            whether the program was invoked standalone (False) or
            by tester scripts (True).
            network_test (bool): A boolean condition that will indicate
            whether the script was invoked by network.py or not.
            temp_dir (str): A default temporary directory where output
            files will be stored.

        Returns:
            str {
                'python_src': A string of python code,
                'python_file': A file name of generated python script,
                'lambdas_file': A file name where lambdas will be,
                'json_file': A file name where JSON will be written to,

            }
            dict: mode_mapper_dict, mapper of file info (i.e. filename,
            module, and exports, etc).
    """
    current_dir = "."
    check_classpath()

    # If, for2py runs manually by the user, which receives
    # the path to the file via command line argument
    if not tester_call:
        (fortran_file_path, temp_out_dir) = parse_args()
    # Else, for2py function gets invoked by the test
    # programs, it will be passed with an argument
    # of original fortran file path
    else:
        fortran_file_path = original_fortran
        temp_out_dir = "tmp"

    (original_fortran_file, base) = get_original_file(fortran_file_path)

    # temp_dir is None means that the output file was
    # not set by the program that calls this function.
    # Thus, generate the output temporary file based
    # on the user input or the default path "tmp".
    if temp_dir is None:
        temp_dir = current_dir + "/" + temp_out_dir
    else:
        temp_dir = current_dir + "/" + temp_dir

    # If "tmp" directory does not exist already,
    # simply create one.
    if not os.path.isdir(temp_dir):
        os.mkdir(temp_dir)
    else:
        assert (os.access(temp_dir,
                          os.W_OK)), f"Directory {temp_dir} is not writable.\n\
            Please, provide the directory name to hold files."

    print(f"*** ALL OUTPUT FILES LIVE IN [{temp_dir}]")

    # Output files
    preprocessed_fortran_file = temp_dir + "/" + base + "_preprocessed.f"
    ofp_file = temp_dir + "/" + base + ".xml"
    rectified_xml_file = temp_dir + "/" + "rectified_" + base + ".xml"
    pickle_file = temp_dir + "/" + base + "_pickle"
    translated_python_file = temp_dir + "/" + base + ".py"
    output_file = temp_dir + "/" + base + "_outputList.txt"
    json_suffix = temp_dir + "/" + base + ".json"
    lambdas_suffix = temp_dir + "/" + base + "_lambdas.py"

    # Open and read original fortran file
    try:
        with open(fortran_file_path, "r") as f:
            input_lines = f.readlines()
    except IOError:
        sys.stderr.write(f"Fortran file: {fortran_file_path} Not " f"Found")
        sys.exit(1)

    # Pre-process the read in fortran file
    if not tester_call:
        print("+Generating preprocessed fortran file:\
                Func: <process>, Script: <preprocessor.py>")
    try:
        with open(preprocessed_fortran_file, "w") as f:
            f.write(preprocessor.process(input_lines))
    except IOError:
        assert False, "Unable to write tofile: {preprocessed_fortran_file}"

    # Generate OFP XML from preprocessed fortran
    ofp_xml = generate_ofp_xml(preprocessed_fortran_file, ofp_file,
                               tester_call)

    # Rectify and generate a new xml from OFP XML
    rectified_tree = generate_rectified_xml(ofp_xml, rectified_xml_file,
                                            tester_call)

    if not network_test:
        # Generate separate list of modules file
        mode_mapper_tree = rectified_tree
        generator = mod_index_generator.ModuleGenerator()
        mode_mapper_dictionary = generator.analyze(mode_mapper_tree)
    else:
        # This is a HACK derived from `networks.py`
        mode_mapper_dictionary = {"file_name": f"{base}.py"}

    # Creates a pickle file
    output_dict = generate_outputdict(rectified_tree,
                                      preprocessed_fortran_file, pickle_file,
                                      tester_call)

    # Create a python source file
    python_source = generate_python_src(output_dict, translated_python_file,
                                        output_file, temp_dir, tester_call)

    if tester_call:
        os.remove(preprocessed_fortran_file)

    if not network_test:
        return (
            [src[0] for src in python_source],
            lambdas_suffix,
            json_suffix,
            translated_python_file,
            original_fortran,
            mode_mapper_dictionary,
        )
    else:
        # TODO: This is related to networks.py and subsequent GrFN
        #  generation. Change the python_src index from [0][0] to incorporate
        #  all modules after all GrFN features have been added
        return (python_source[0][0], lambdas_suffix, json_suffix, base,
                original_fortran)