Exemplo n.º 1
0
def main_python(program_to_run,original_name,faster_than_server,save=False):

    fh = StringIO() if save else sys.stdout

    print('Checking {} for EC602 submission.\n'.format(original_name),file=fh)

    Grade={'specs':0,'style':0,'elegance':0,'efficiency':0}

    the_program = ec602lib.read_file(program_to_run)
    authors = ec602lib.get_authors(the_program, 'py')
    imported = ec602lib.get_python_imports(the_program)

 
    passed_short, short_report = wordplayer_tester(program_to_run,'short_wordlist.txt',Tests_Short)
    if not passed_short:
        print(short_report,file=fh)
    passed_big, big_report = wordplayer_tester(program_to_run,'big_wordlist.txt',Tests_Big)
    if not passed_big:
        print(big_report,file=fh)
    
    if not passed_big or not passed_short:
        if save:
            return Grade,fh.getvalue()
        return

    pep8_errors,pep8_report = ec602lib.pep8_check(program_to_run)

    pylint_score,pylint_report = ec602lib.pylint_check(program_to_run)
    

    code_metrics = ec602lib.code_analysis_py(the_program)

    complexity = code_metrics['lines']+code_metrics['words'] + 20*code_metrics['words']/code_metrics['lines']
    rel_times = test_speed(program_to_run,faster_than_server,fh)

    eff_grade = 0
    for ratio,scale in zip(rel_times,[0.5,0.2,0.1,0.05,0.05,0.05,0.05]):
        eff_grade += scale / ratio

    Grade['specs']=3
    Grade['style']=max(0,(10-pep8_errors)/20) + pylint_score/20

    Grade['elegance'] = min(1.5,300/complexity) # 0.5 bonus point possible
    Grade['efficiency'] = min(2.0,eff_grade) # 1.0 bonus point possible

    print('---- analysis of your code structure ----\n',file=fh)

    print('authors          : {}'.format(" ".join(authors)
                                               if authors else ec602lib.AUTHWARN),file=fh)


    print('imported modules : {}'.format(" ".join(imported)),file=fh)
    print(ec602lib.code_size_report(code_metrics, {'lines': 49, 'words': 159}),file=fh)


    print('pep8 check       : {} problems.'.format(pep8_errors),file=fh)
    if pep8_errors:
        print('pep8 report',file=fh)
        print(pep8_report,file=fh)

    print('pylint score     : {}/10'.format(pylint_score),file=fh)
    print(file=fh)
    print('---- grading ----\n',file=fh)

    print('grades           :',Grade,file=fh)
    print('grade total      : {:.2f} / 6'.format(sum(Grade[x] for x in Grade)),file=fh)

    if save:
        res = fh.getvalue()
        return Grade, res
Exemplo n.º 2
0
def main_cpp(source_file,
             program_to_run,
             original_name,
             faster_than_server=1,
             save=False):
    Grade = {'specs': 0, 'style': 0, 'elegance': 0, 'efficiency': 0}

    the_program = ec602lib.read_file(source_file)
    authors = ec602lib.get_authors(the_program, 'cpp')
    included = ec602lib.get_includes(the_program)

    fh = StringIO() if save else sys.stdout

    #run the specification tests

    passed_short, short_report = wordplayer_tester(program_to_run,
                                                   'words680.txt', Tests_680)
    if not passed_short:
        print(short_report, file=fh)
    passed_big, big_report = wordplayer_tester(program_to_run, 'words100k.txt',
                                               Tests_100k)
    if not passed_big:
        print(big_report, file=fh)

    if not passed_big or not passed_short:
        if save:
            return Grade, fh.getvalue()
        return

    code_metrics = ec602lib.code_analysis_cpp(source_file)

    if code_metrics['astyle'] == "error":
        print('astyle is reporting a problem.', file=fh)
        code_metrics['astyle'] = 0

    D = code_metrics['errors']
    cpplint_count = sum(len(D[x]) for x in D)

    complexity = code_metrics['lines'] + code_metrics[
        'words'] + 20 * code_metrics['words'] / code_metrics['lines']

    rel_times = test_speed(program_to_run, faster_than_server, fh)

    eff_grade = []
    for ratio, scale in zip(rel_times,
                            [0.5, 0.2, 0.1, 0.05, 0.05, 0.05, 0.05]):
        eff_grade.append(scale / ratio)

    Grade['specs'] = 3

    Grade['style'] = max(
        0, (10 - cpplint_count) / 20) + code_metrics['astyle'] / 2.0

    Grade['elegance'] = min(1.5, 500 / complexity)  # 0.5 bonus point possible
    Grade['efficiency'] = min(2.0, sum(eff_grade))  # 1.0 bonus point possible

    print('Checking {} for EC602 submission.\n'.format(original_name), file=fh)
    print('---- analysis of your code structure ----\n', file=fh)

    print('authors          : {}'.format(
        " ".join(authors) if authors else ec602lib.AUTHWARN),
          file=fh)

    print('included libs    : {}'.format(" ".join(included)), file=fh)
    print(ec602lib.code_size_report(code_metrics, {
        'lines': 91,
        'words': 332
    }),
          file=fh)

    print("cpplint          : {}".format(
        "{} problems".format(cpplint_count) if cpplint_count else "ok"),
          file=fh)
    for e in code_metrics['errors']:
        for x in code_metrics['errors'][e][:3]:
            print('line {} ({}): {}'.format(*x), file=fh)
    print("astyle           : {:.1%} code unchanged.\n".format(
        code_metrics['astyle']),
          file=fh)

    print('---- grading ----\n', file=fh)

    print('grades           :', Grade, file=fh)
    print('grade total      : {:.2f} / 6'.format(sum(Grade[x] for x in Grade)),
          file=fh)

    if save:
        return Grade, fh.getvalue()
def main_cpp(source_file,
             program_to_run,
             original_name,
             faster_than_server=1,
             save=False):
    fh = StringIO() if save else sys.stdout
    Grade = {'specs': 0, 'style': 0, 'elegance': 0, 'efficiency': 0}
    print('Checking {} for EC602 submission.\n'.format(original_name), file=fh)

    the_program = ec602lib.read_file(source_file)
    authors = ec602lib.get_authors(the_program, 'cpp')
    included = ec602lib.get_includes(the_program)

    # no include tests

    #run the specification tests

    all_passed = True
    for test_suite in [tests_one, tests_two, tests_partial]:
        s1 = time.time()
        passed, report = wordbrainsolver_tester(program_to_run, test_suite)
        if not passed:
            print(report, file=fh)
            all_passed = False
        logging.info('%s %f', test_suite['name'], time.time() - s1)

    if not all_passed:
        print('')
        if save:
            return Grade, fh.getvalue()
        return

    print('Specification test results', file=fh)
    print('==========================', file=fh)
    print(' all specification tests passed.', file=fh)

    code_metrics = ec602lib.code_analysis_cpp(source_file)

    if code_metrics['astyle'] == "error":
        print('astyle is reporting a problem.', file=fh)
        code_metrics['astyle'] = 0

    D = code_metrics['errors']
    cpplint_count = sum(len(D[x]) for x in D)

    s1 = time.time()
    rel_times = test_speed(program_to_run, faster_than_server, fh)
    logging.info('c++ speed %f', time.time() - s1)

    if rel_times:
        avg_log_yourtime_over_target = sum(rel_times) / len(rel_times)
        # 0.3 means twice as slow, 1 means 10x as slow.

        Grade['efficiency'] = max(0, 3.6 - 2 * avg_log_yourtime_over_target)

        print(efficiency_message.format(\
                score_vec=", ".join('{:.2f}'.format(x) for x in rel_times),
                eff_grade=Grade['efficiency']))
    else:
        Grade['efficiency'] = 0

    Grade['specs'] = 4.0

    Grade['style'] = max(
        0, (10 - cpplint_count) / 20) + code_metrics['astyle'] / 2.0

    Grade['elegance'] = 2.0 * min(1.0, 1000 / code_metrics['words'])

    print('Checking {} for EC602 submission.\n'.format(original_name), file=fh)
    print('---- analysis of your code structure ----\n', file=fh)

    print('authors          : {}'.format(
        " ".join(authors) if authors else ec602lib.AUTHWARN),
          file=fh)

    print('included libs    : {}'.format(" ".join(included)), file=fh)
    print(ec602lib.code_size_report(code_metrics, {
        'lines': 267,
        'words': 901
    }),
          file=fh)

    print("cpplint          : {}".format(
        "{} problems".format(cpplint_count) if cpplint_count else "ok"),
          file=fh)
    for e in code_metrics['errors']:
        for x in code_metrics['errors'][e][:3]:
            print('line {} ({}): {}'.format(*x), file=fh)
    print("astyle           : {:.1%} code unchanged.\n".format(
        code_metrics['astyle']),
          file=fh)

    print('---- grading ----\n', file=fh)

    print('grades           :', Grade, file=fh)
    print('grade total      : {:.2f} / 10'.format(sum(Grade[x]
                                                      for x in Grade)),
          file=fh)

    if save:
        return Grade, fh.getvalue()
Exemplo n.º 4
0
def main_cpp():
    Grade = {'specs': 0, 'style': 0, 'elegance': 0, 'efficiency': 0}

    the_program = ec602lib.read_file(PD['fname'])
    authors = ec602lib.get_authors(the_program, testing)
    included = ec602lib.get_includes(the_program)

    #run the specification tests
    test_results = []
    for spec_test in [wordplayerTestCase, wordplayerBigTestCase]:
        results = unittest.result.TestResult()
        unittest.loader.TestLoader().loadTestsFromTestCase(spec_test).run(
            results)
        test_results.append(results.wasSuccessful())
    if test_results != [True, True]:
        print('Initial spec tests failed. Running unittest.')
        unittest.main()

    code_metrics = ec602lib.code_analysis_cpp(PD['fname'])

    if code_metrics['astyle'] == "error":
        print('astyle is reporting a problem.')
        code_metrics['astyle'] = 0

    D = code_metrics['errors']
    cpplint_count = sum(len(D[x]) for x in D)

    complexity = code_metrics['lines'] + code_metrics[
        'words'] + 20 * code_metrics['words'] / code_metrics['lines']

    rel_times = test_speed()

    eff_grade = 0
    for ratio, scale in zip(rel_times,
                            [0.5, 0.2, 0.1, 0.05, 0.05, 0.05, 0.05]):
        eff_grade += scale / ratio

    Grade['specs'] = 3

    Grade['style'] = max(
        0, (10 - cpplint_count) / 20) + code_metrics['astyle'] / 2.0

    Grade['elegance'] = min(1, 500 / complexity)
    Grade['efficiency'] = eff_grade

    print('Checking {} for EC602 submission.\n'.format(PD['original']))
    print('---- analysis of your code structure ----\n')

    print('authors          : {}'.format(
        " ".join(authors) if authors else ec602lib.AUTHWARN))

    print('included libs    : {}'.format(" ".join(included)))
    print(ec602lib.code_size_report(code_metrics, {'lines': 91, 'words': 297}))

    print("cpplint          : {}".format(
        "{} problems".format(cpplint_count) if cpplint_count else "ok"))
    for e in code_metrics['errors']:
        for x in code_metrics['errors'][e][:3]:
            print('line {} ({}): {}'.format(*x))
    print("astyle           : {:.1%} code unchanged.\n".format(
        code_metrics['astyle']))

    print('---- grading ----\n')

    print('grades           :', Grade)
    print('grade total      : {} / 6'.format(sum(Grade[x] for x in Grade)))
def main_python(program_to_run, original_name, faster_than_server, save=False):

    fh = StringIO() if save else sys.stdout
    Grade = {'specs': 0, 'style': 0, 'elegance': 0, 'efficiency': 0}

    print('Checking {} for EC602 submission.\n'.format(original_name), file=fh)

    s1 = time.time()
    the_program = ec602lib.read_file(program_to_run)
    authors = ec602lib.get_authors(the_program, 'py')
    imported = ec602lib.get_python_imports(the_program)

    # include tests
    if 'sys' not in imported:
        print('you will need to import sys for this assignment.', file=fh)
        return  #Grade,fh.getvalue()
    else:
        if the_program.count('sys') > 1 or the_program.count(
                'from sys import argv') != 1:
            print(
                'you must import sys once using "from sys import argv". Please correct',
                file=fh)
            return  #Grade,fh.getvalue()

    logging.info('py init %f', time.time() - s1)

    # specification tests

    all_passed = True
    for test_suite in [tests_one, tests_two, tests_partial]:
        s1 = time.time()
        passed, report = wordbrainsolver_tester(program_to_run, test_suite)
        if not passed:
            print(report, file=fh)
            all_passed = False
        logging.info('%s %f', test_suite['name'], time.time() - s1)

    if not all_passed:
        print('')
        if save:
            return Grade, fh.getvalue()
        return

    print('Specification test results', file=fh)
    print('==========================', file=fh)
    print(' all specification tests passed.', file=fh)

    print(
        '\n...running pep8 and pylint. goal is 0 pep8 problems and pylint >9.5',
        file=fh)
    s1 = time.time()
    pep8_errors, pep8_report = ec602lib.pep8_check(program_to_run)
    logging.info('pep8 %f', time.time() - s1)

    s1 = time.time()
    pylint_score, pylint_report = ec602lib.pylint_check(program_to_run)
    logging.info('pylint %f', time.time() - s1)

    s1 = time.time()
    code_metrics = ec602lib.code_analysis_py(the_program)
    logging.info('analysis %f', time.time() - s1)

    s1 = time.time()
    rel_times = test_speed(program_to_run, faster_than_server, fh)
    logging.info('speed %f', time.time() - s1)

    if rel_times:
        avg_log_yourtime_over_target = sum(rel_times) / len(rel_times)
        # 0.3 means twice as slow, 1 means 10x as slow.

        Grade['efficiency'] = max(0, 3.6 - 2 * avg_log_yourtime_over_target)

        print(efficiency_message.format(\
                score_vec=", ".join('{:.2f}'.format(x) for x in rel_times),
                eff_grade=Grade['efficiency']))
    else:
        Grade['efficiency'] = 0

    Grade['specs'] = 4.0
    Grade['style'] = max(0, (10 - pep8_errors) / 20) + min(
        0.5, (0.5 + pylint_score) / 20)

    Grade['elegance'] = 2.0 * min(1.0, 550 / code_metrics['words'])

    print('---- analysis of your code structure ----\n', file=fh)

    print('authors          : {}'.format(
        " ".join(authors) if authors else ec602lib.AUTHWARN),
          file=fh)

    print('imported modules : {}'.format(" ".join(imported)), file=fh)
    print(ec602lib.code_size_report(code_metrics, {
        'lines': 150,
        'words': 521
    }),
          file=fh)

    print('pep8 check       : {} problems.'.format(pep8_errors), file=fh)
    if pep8_errors:
        print('pep8 report', file=fh)
        print(pep8_report, file=fh)

    print('pylint score     : {}/10'.format(pylint_score), file=fh)
    print(file=fh)
    print('---- grading ----\n', file=fh)

    print('grades           :', Grade, file=fh)
    print('grade total      : {:.2f} / 10'.format(sum(Grade[x]
                                                      for x in Grade)),
          file=fh)

    if save:
        res = fh.getvalue()
        return Grade, res