def make_grfn_dict(original_fortran_file) -> Dict: stem = original_fortran_file.stem preprocessed_fortran_file = stem + "_preprocessed.f" lambdas_filename = stem + "_lambdas.py" json_filename = stem + ".json" with open(original_fortran_file, "r") as f: inputLines = f.readlines() with open(preprocessed_fortran_file, "w") as f: f.write(f2py_pp.process(inputLines)) xml_string = sp.run( [ "java", "fortran.ofp.FrontEnd", "--class", "fortran.ofp.XMLPrinter", "--verbosity", "0", preprocessed_fortran_file, ], stdout=sp.PIPE, ).stdout trees = [ET.fromstring(xml_string)] comments = get_comments.get_comments(preprocessed_fortran_file) os.remove(preprocessed_fortran_file) xml_to_json_translator = translate.XMLToJSONTranslator() outputDict = xml_to_json_translator.analyze(trees, comments) pySrc = pyTranslate.create_python_string(outputDict) asts = [ast.parse(pySrc)] pgm_dict = genPGM.create_pgm_dict(lambdas_filename, asts, json_filename) return pgm_dict
def processCode(): form = MyForm() code = form.source_code.data if code == "": return render_template("index.html", form=form) lines = [ line.replace("\r", "") + "\n" for line in [line for line in code.split("\n")] if line != "" ] preprocessed_fortran_file = "/tmp/preprocessed_code.f" with open(preprocessed_fortran_file, "w") as f: f.write(f2py_pp.process(lines)) xml_string = sp.run( [ "java", "fortran.ofp.FrontEnd", "--class", "fortran.ofp.XMLPrinter", "--verbosity", "0", preprocessed_fortran_file, ], stdout=sp.PIPE, ).stdout trees = [ET.fromstring(xml_string)] comments = get_comments.get_comments(preprocessed_fortran_file) translator = translate.XMLToJSONTranslator() outputDict = translator.analyze(trees, comments) pySrc = pyTranslate.create_python_string(outputDict) asts = [ast.parse(pySrc)] pgm_dict = genPGM.create_pgm_dict("/tmp/lambdas.py", asts, "pgm.json") root = Scope.from_dict(pgm_dict) A = root.to_agraph() elements = to_cyjs_elements_json_str(A) return render_template("index.html", form=form, elementsJSON=elements)
parser.add_argument( "-g", "--gen", nargs="*", help= "Pickled version of routines for which dependency graphs should be generated", ) parser.add_argument( "-f", "--files", nargs="+", required=True, help="A list of AST files in XML format to analyze", ) parser.add_argument("-i", "--input", nargs="*", help="Original Fortran Source code file.") args = parser.parse_args(sys.argv[1:]) fortranFile = args.input[0] pickleFile = args.gen[0] trees = get_trees(args.files) comments = get_comments(fortranFile) translator = XMLToJSONTranslator() outputDict = translator.analyze(trees, comments) with open(pickleFile, "wb") as f: pickle.dump(outputDict, f)