Example #1
0
    def testmethod(testcase):
        """this is the test method invoked for each example."""

        dirpath_before = joinpath(BEFORE_DIR, fpath)
        dirpath_after = joinpath(AFTER_DIR, fpath)
        if not os.path.exists(dirpath_after):
            os.makedirs(dirpath_after)

        example_before = joinpath(dirpath_before, ffile)
        example_after = joinpath(dirpath_after, ffile)

        if os.path.isfile(example_after):
            os.remove(example_after)

        def test_result(path, info):
            return [os.path.relpath(path, BEFORE_DIR), info]

        with io.open(example_before, 'r', encoding='utf-8') as infile:

            outstring = io.StringIO()

            try:
                fprettify.reformat_ffile(infile, outstring)
                m = hashlib.sha256()
                m.update(outstring.getvalue().encode('utf-8'))

                test_info = "checksum"
                test_content = test_result(example_before, m.hexdigest())

                with io.open(example_after, 'w', encoding='utf-8') as outfile:
                    outfile.write(outstring.getvalue())
                FPrettifyTestCase.n_success += 1
            except FprettifyParseException as e:
                test_info = "parse error"
                fprettify.log_exception(e, test_info)
                test_content = test_result(example_before, test_info)
                FPrettifyTestCase.n_parsefail += 1
            except FprettifyInternalException as e:
                test_info = "internal error"
                fprettify.log_exception(e, test_info)
                test_content = test_result(example_before, test_info)
                FPrettifyTestCase.n_internalfail += 1
            except:  # pragma: no cover
                FPrettifyTestCase.n_unexpectedfail += 1
                raise

        after_exists = os.path.isfile(example_after)
        if after_exists:
            with io.open(example_before, 'r', encoding='utf-8') as infile:
                before_content = infile.read()
                before_nosp = re.sub(
                    r'\n{3,}', r'\n\n',
                    before_content.lower().replace(' ', '').replace('\t', ''))

            with io.open(example_after, 'r', encoding='utf-8') as outfile:
                after_content = outfile.read()
                after_nosp = after_content.lower().replace(' ', '')

            testcase.assertMultiLineEqual(before_nosp, after_nosp)

        sep_str = ' : '
        with io.open(RESULT_FILE, 'r', encoding='utf-8') as infile:
            found = False
            for line in infile:
                line_content = line.strip().split(sep_str)
                if line_content[0] == test_content[0]:
                    found = True
                    eprint(test_info, end=" ")
                    msg = '{} (old) != {} (new)'.format(
                        line_content[1], test_content[1])
                    if test_info == "checksum" and after_exists and after_content.count(
                            '\n') < 10000:
                        # difflib can not handle large files
                        result = list(
                            difflib.unified_diff(
                                before_content.splitlines(True),
                                after_content.splitlines(True),
                                fromfile=test_content[0],
                                tofile=line_content[0]))
                        msg += '\n' + ''.join(result)
                    try:
                        testcase.assertEqual(line_content[1], test_content[1],
                                             msg)
                    except AssertionError:  # pragma: no cover
                        FPrettifyTestCase.write_result(FAILED_FILE,
                                                       test_content, sep_str)
                        raise
                    break

        if not found:  # pragma: no cover
            eprint(test_info + " new", end=" ")
            FPrettifyTestCase.write_result(RESULT_FILE, test_content, sep_str)
Example #2
0
def prettifyFile(
    infile,
    filename,
    normalize_use,
    decl_linelength,
    decl_offset,
    reformat,
    indent,
    whitespace,
    upcase_keywords,
    upcase_omp,
    replace,
):
    """prettifyes the fortran source in infile into a temporary file that is
    returned. It can be the same as infile.
    if normalize_use normalizes the use statements (defaults to true)
    if upcase_keywords upcases the keywords (defaults to true)
    if replace does the replacements contained in replacer.py (defaults
    to false)

    does not close the input file"""
    max_pretty_iter = 5

    logger = logging.getLogger("prettify-logger")

    if is_fypp(infile):
        logger.warning(
            "fypp directives not fully supported, running only fprettify",
            extra={"ffilename": filename},
        )
        replace = False
        normalize_use = False
        upcase_keywords = False

    # create a temporary file first as a copy of the input file
    inbuf = StringIO(infile.read())

    hash_prev = md5(inbuf.getvalue().encode("utf8"))

    for _ in range(max_pretty_iter):
        if replace:
            outbuf = StringIO()
            replacer.replaceWords(inbuf, outbuf)
            outbuf.seek(0)
            inbuf.close()
            inbuf = outbuf

        if reformat:  # reformat needs to be done first
            outbuf = StringIO()
            try:
                reformat_ffile(
                    inbuf,
                    outbuf,
                    indent_size=indent,
                    whitespace=whitespace,
                    orig_filename=filename,
                )
            except fparse_utils.FprettifyParseException as e:
                log_exception(
                    e,
                    "fprettify could not parse file, file is not prettified")
                outbuf.close()
                inbuf.seek(0)
            else:
                outbuf.seek(0)
                inbuf.close()
                inbuf = outbuf

        normalize_use_succeeded = True

        if normalize_use:
            outbuf = StringIO()
            try:
                normalizeFortranFile.rewriteFortranFile(
                    inbuf,
                    outbuf,
                    indent,
                    decl_linelength,
                    decl_offset,
                    orig_filename=filename,
                )
            except normalizeFortranFile.InputStreamError as exc:
                logger.exception(
                    "normalizeFortranFile could not parse file, file is not normalized",
                    extra={"ffilename": filename},
                )
                outbuf.close()
                inbuf.seek(0)
                normalize_use_succeeded = False
            else:
                outbuf.seek(0)
                inbuf.close()
                inbuf = outbuf

        if upcase_keywords and normalize_use_succeeded:
            outbuf = StringIO()
            upcaseKeywords(inbuf, outbuf, upcase_omp)
            outbuf.seek(0)
            inbuf.close()
            inbuf = outbuf

        hash_new = md5(inbuf.getvalue().encode("utf8"))

        if hash_prev.digest() == hash_new.digest():
            return inbuf

        hash_prev = hash_new

    else:
        raise RuntimeError(
            "Prettify did not converge in {} steps.".format(max_pretty_iter))
Example #3
0
def prettifyFile(infile, filename, normalize_use, decl_linelength, decl_offset,
                 reformat, indent, whitespace, upcase_keywords,
                 upcase_omp, replace):
    """prettifyes the fortran source in infile into a temporary file that is
    returned. It can be the same as infile.
    if normalize_use normalizes the use statements (defaults to true)
    if upcase_keywords upcases the keywords (defaults to true)
    if replace does the replacements contained in replacer.py (defaults
    to false)

    does not close the input file"""
    ifile = infile
    orig_filename = filename
    tmpfile = None
    max_pretty_iter = 5
    n_pretty_iter = 0

    if is_fypp(ifile):
        logger = logging.getLogger('fprettify-logger')
        logger.error(orig_filename + ": fypp directives not supported.\n")
        return ifile

    while True:
        n_pretty_iter += 1
        hash_prev = md5()
        hash_prev.update(ifile.read().encode("utf8"))
        ifile.seek(0)
        try:
            if replace:
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                replacer.replaceWords(ifile, tmpfile2)
                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            if reformat:  # reformat needs to be done first
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                try:
                    reformat_ffile(ifile, tmpfile2,
                                   indent_size=indent, whitespace=whitespace,
                                   orig_filename=orig_filename)
                except fparse_utils.FprettifyParseException as e:
                    log_exception(e, "fprettify could not parse file, file is not prettified")
                    tmpfile2.write(ifile.read())

                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            if normalize_use:
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                normalizeFortranFile.rewriteFortranFile(ifile, tmpfile2, indent,
                                                        decl_linelength, decl_offset,
                                                        orig_filename=orig_filename)
                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            if upcase_keywords:
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                upcaseKeywords(ifile, tmpfile2, upcase_omp)
                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            hash_next = md5()
            hash_next.update(ifile.read().encode("utf8"))
            ifile.seek(0)
            if hash_prev.digest() == hash_next.digest():
                return ifile
            elif n_pretty_iter >= max_pretty_iter:
                raise RuntimeError(
                    "Prettify did not converge in", max_pretty_iter, "steps.")
        except:
            logger = logging.getLogger('fprettify-logger')
            logger.critical("error processing file '" + infile.name + "'\n")
            raise
Example #4
0
def prettifyFile(infile, filename, srcDir, normalize_use, decl_linelength,
                 decl_offset, reformat, indent, whitespace, upcase_keywords,
                 upcase_omp, replace):
    """prettifyes the fortran source in infile into a temporary file that is
    returned. It can be the same as infile.
    if normalize_use normalizes the use statements (defaults to true)
    if upcase_keywords upcases the keywords (defaults to true)
    if replace does the replacements contained in replacer.py (defaults
    to false)

    does not close the input file"""
    ifile = infile
    orig_filename = filename
    tmpfile = None
    max_pretty_iter = 5
    n_pretty_iter = 0

    if is_fypp(ifile):
        logger = logging.getLogger('fprettify-logger')
        logger.error(orig_filename + ": fypp directives not supported.\n")
        return ifile

    while True:
        n_pretty_iter += 1
        hash_prev = md5()
        hash_prev.update(ifile.read().encode("utf8"))
        ifile.seek(0)
        try:
            if replace:
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                replacer.replaceWords(ifile, tmpfile2)
                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            if reformat:  # reformat needs to be done first
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                try:
                    reformat_ffile(ifile,
                                   tmpfile2,
                                   indent_size=indent,
                                   whitespace=whitespace,
                                   orig_filename=orig_filename)
                except fparse_utils.FprettifyParseException as e:
                    log_error(
                        e,
                        "fprettify could not parse file, file is not prettified"
                    )
                    tmpfile2.write(ifile.read())

                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            if normalize_use:
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                normalizeFortranFile.rewriteFortranFile(
                    ifile,
                    tmpfile2,
                    srcDir,
                    indent,
                    decl_linelength,
                    decl_offset,
                    orig_filename=orig_filename)
                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            if upcase_keywords:
                tmpfile2 = tempfile.TemporaryFile(mode="w+")
                upcaseKeywords(ifile, tmpfile2, upcase_omp)
                tmpfile2.seek(0)
                if tmpfile:
                    tmpfile.close()
                tmpfile = tmpfile2
                ifile = tmpfile
            hash_next = md5()
            hash_next.update(ifile.read().encode("utf8"))
            ifile.seek(0)
            if hash_prev.digest() == hash_next.digest():
                return ifile
            elif n_pretty_iter >= max_pretty_iter:
                raise RuntimeError("Prettify did not converge in",
                                   max_pretty_iter, "steps.")
        except:
            logger = logging.getLogger('fprettify-logger')
            logger.critical("error processing file '" + infile.name + "'\n")
            raise