示例#1
0
def mk_additional_test_info():
    s, d = StrictRedis.from_url(SRC), StrictRedis.from_url(DST)

    for suite in ['evosuite-branch.{0}'.format(i) for i in xrange(0, 10)
                  ] + ['randoop.{0}'.format(i + 1) for i in xrange(0, 10)]:
        for project, version in iter_versions():
            for KN in ['passcnt']:
                fail_key = ':'.join(
                    [PRE_SRC, KN, project,
                     str(version), suite])
                dst_fail_key = ':'.join([KN, project, str(version), suite])

                fail_members = list(s.hkeys(fail_key))
                if len(fail_members) > 0:
                    fail_idxes = tn_i_s(d, fail_members, suite)
                    results = s.hmget(fail_key, *fail_members)
                    mapping = dict(zip(fail_idxes, results))
                    d.hmset(dst_fail_key, mapping)

            for tool in ['cobertura', 'codecover', 'jmockit', 'exec']:
                for KN in ['fail']:
                    fail_key = ':'.join(
                        [PRE_SRC, KN, tool, project,
                         str(version), suite])
                    print fail_key
                    dst_fail_key = ':'.join(
                        [KN, tool, project,
                         str(version), suite])

                    fail_members = list(s.smembers(fail_key))
                    fail_idxes = tn_i_s(d, fail_members, suite)
                    if len(fail_members) > 0:
                        d.sadd(dst_fail_key, fail_idxes)
示例#2
0
def mk_bundles():
    s, d = StrictRedis.from_url(SRC), StrictRedis.from_url(DST)

    for suite in ['evosuite-branch.{0}'.format(i) for i in xrange(0, 10)
                  ] + ['randoop.{0}'.format(i + 1) for i in xrange(0, 10)]:
        for project, version in iter_versions():
            for tool in ['cobertura', 'codecover', 'jmockit']:
                key = ':'.join([
                    PRE_SRC, 'test-methods-exec', tool, project,
                    str(version), suite, 'bundles'
                ])
                result_key = ':'.join(
                    [PRE_SRC, 'cvg', tool, project,
                     str(version), suite])
                print key
                execed_bundles = s.hkeys(key)
                if len(execed_bundles) == 0:
                    continue
                idxes = tn_i_s(d, execed_bundles, suite)
                results = s.hmget(result_key, *execed_bundles)
                results_ = [
                    result if result is not None else json.dumps(None)
                    for result in results
                ]

                dst_key = ':'.join(
                    ['exec', tool, project,
                     str(version), suite])
                mapping = dict(zip(idxes, results_))  #{idx:  for idx in idxes}
                if len(mapping) > 0:
                    d.hmset(dst_key, mapping)
示例#3
0
 def timing(tests, idx_tp=idx_tp):
     tps = map(lambda t: idx_tp[t], tests)
     tps_sorted = sorted(tps, key=lambda (suite, i): suite)
     total_time = 0
     for suite, i_it in groupby(tps_sorted, key=lambda(suite, i): suite):
         i_s = map(lambda (suite, i): i, i_it)
         if suite == 'dev':
             tns = i_tn_s(r, i_s, suite)
             tc_ns = set(tn.partition('::')[0] for tn in tns)
             tc_is = tn_i_s(r, list(tc_ns), suite, allow_create=False)
             all_is = tc_is + i_s
         else:
             all_is = i_s
         method_times = [msgpack.unpackb(b) for b in r.hmget(mk_key('time', [project, version, suite]), all_is)]
         bad_timings = [(time, i) for (time, i) in zip(method_times, all_is) if any(t < 0 for t in time)]
         if bad_timings:
             raise Exception('bad timing for tests: {project}, {version}, {suite} {idxs}'.format(
                 project=project, version=version, suite=suite, idxs=' '.join(map(str, [i for (time, i) in bad_timings]))))
         def aggr(l):
             if any(x == -1 for x in l):
                 raise Exception('bad timing for tests: {project}, {version}, {suite}'.format(
                     project=project, version=version, suite=suite))
             # let's go with average
             return reduce(lambda a, b: a+b, l)/len(l)
         method_times_aggregate = [aggr(timings) for timings in method_times]
         suite_time = reduce(lambda a, b: a+b, method_times_aggregate, 0)
         total_time += suite_time
     return total_time
示例#4
0
def migrate_test_classes(r_from, r_to, projects=[], versions=[]):
    for project, version in iter_versions(projects, versions):
        print project, version
        tm_key = 'tms:{project}:{version}:dev'.format(project=project, version=version)
        tm_is = r_to.lrange(tm_key, 0, -1)
        tms = i_tn_s(r_to, tm_is, 'dev')

        unique_set = set([])
        def is_unique(tt):
            retval = tt not in unique_set
            unique_set.add(tt)
            return retval
        tc_tns = [tc for tc, _, _ in [tm.partition('::') for tm in tms] if is_unique(tc)]
        tc_tis = tn_i_s(r_to, tc_tns, 'dev', allow_create=False)

#        for tool in ['codecover', 'jmockit']:
#            class_key = 'results:test-classes-cvg:{tool}:{project}:{version}'.format(
#                    tool=tool,project=project,version=version
#            )
#            class_cvgs = r_from.hmget(class_key, *tc_tns)
#            assert all(lambda x: x is not None for x in class_cvgs)
#            assert len(tc_tis) == len(class_cvgs)
#            to_key_class = 'exec:{tool}:{project}:{version}:dev'.format(
#                    tool=tool, project=project, version=version
#            )
#            r_to.hmset(to_key_class, {ck: cv for (ck, cv) in zip(tc_tis, class_cvgs)})

        for tool in ['cobertura', 'codecover', 'jmockit', 'major']:
            method_key = 'results:test-methods-run-cvg:{tool}:{project}:{version}'.format(
                    tool=tool, project=project, version=version
            )
            res_dict = r_from.hgetall(method_key)
            assert(type(res_dict) == dict)

            res_list = res_dict.items()
            res_idxs  = tn_i_s(r_to, [k for (k, v) in res_list], 'dev')
            res_vals  = [v for (_, v) in res_list]
            assert len(res_vals) == len(res_idxs)
            res_map = {ki: v for (ki, v) in zip(res_idxs, res_vals)}
            to_key = 'exec:{tool}:{project}:{version}:dev'.format(
                    tool=tool, project=project, version=version
            )
            if res_map:
                r_to.hmset(to_key, res_map)
示例#5
0
def mk_index():
    s, d = StrictRedis.from_url(SRC), StrictRedis.from_url(DST)

    for suite in ['dev'
                  ] + ['evosuite-branch.{0}'.format(i) for i in xrange(0, 10)]:
        for project, version in iter_versions():
            key = ':'.join(
                [PRE_SRC, 'test-methods', project,
                 str(version), suite])
            print key
            tm_list = s.lrange(key, 0, -1)
            idxes = tn_i_s(d, tm_list, suite)
            assert (all(type(idx) is type(0) for idx in idxes))
示例#6
0
def mk_tms():
    s, d = StrictRedis.from_url(SRC), StrictRedis.from_url(DST)

    for suite in ['dev'] + [
            'evosuite-branch.{0}'.format(i) for i in xrange(0, 10)
    ] + ['randoop.{0}'.format(i + 1) for i in xrange(0, 10)]:
        for project, version in iter_versions():
            key = ':'.join(
                [PRE_SRC, 'test-methods', project,
                 str(version), suite])
            print key
            tm_list = s.lrange(key, 0, -1)
            idxes = tn_i_s(d, tm_list, suite)
            dst_key = ':'.join(['tms', project, str(version), suite])
            assert (len(idxes) == len(tm_list))
            for chunk in chunks(idxes, 100):
                if len(chunk) == 0:
                    continue
                d.rpush(dst_key, *chunk)
示例#7
0
def tabulate_tgs(r, rr, work_dir, input):
    project = input['project']
    version = input['version']
    redo = input.get('redo', False)
    suite = input['suite']
    tests = input['tests']
    generated = not (suite == 'dev')
    redo = input.get('redo', False)

    bundle = [project, version, suite]
    with filter_key_list(
            rr,
            key='tgs',
            bundle=bundle,
            list=tests,
            redo=redo,
            other_keys=[],
            worklist_map=lambda tns: tn_i_s(r, tns, suite)) as worklist:
        total = {'t': 0, 'c': 0, 'b': 0}
        count = 0
        for (tc, tc_idx), progress_callback in worklist:

            def handle_single():
                with refresh_dir(work_dir / tc_idx, cleanup=True):
                    print tc_idx, tc

                    map_file_name = '{project}:{version}'.format(
                        project=project, version=version)
                    get_file_from_cache_or_s3(
                        'darioush-map-files', map_file_name,
                        str(work_dir / tc_idx / 'map.txt'))
                    # - prep the tmp dir
                    call_tgs = ALL_TGS
                    for tool in ['cobertura', 'codecover', 'jmockit', 'major']:
                        try:
                            get_files(work_dir / tc_idx, tool, project,
                                      version, suite, tc)
                        except NoFileOnS3:
                            exec_result = json.loads(
                                r.hget(mk_key('exec', [tool] + bundle),
                                       tc_idx))
                            print exec_result, tool
                            if exec_result is None:
                                has_failed = r.sismember(
                                    mk_key('fail', ['exec'] + bundle), tc_idx)
                                if has_failed:
                                    print "-- Has failed"
                                    return [], []
                            is_it_empty = is_empty(tool, exec_result)
                            if is_it_empty:
                                if tool in ('major', 'codecover', 'jmockit'):
                                    print "-> Empty results for {0} noticed, ignoring this tool".format(
                                        tool)
                                    call_tgs = [
                                        tg for tg in call_tgs
                                        if not tg.endswith(tool)
                                    ]
                                else:
                                    raise
                            else:
                                raise

                    result = jar()[work_dir / tc_idx](*call_tgs)
                    all_tgs = result.strip().split('\n')
                    tgs = [
                        tg for covered, _, tg in
                        [s.partition(' ') for s in all_tgs] if covered == '+'
                    ]

                    return all_tgs, tgs

            all_tgs, tgs = handle_single()
            # bandaid
            tgs_jmockit = [tg for tg in tgs if tg.find('jmockit') != -1]
            tg_i_s(rr, tgs_jmockit, project, version, allow_create=True)
            # end bandaid

            tg_idxs = tg_i_s(rr, tgs, project, version, allow_create=False)
            assert len(tg_idxs) == len(tgs)
            result = msgpack.packb(tg_idxs, use_bin_type=True)
            results_readable = {
                't': len(all_tgs),
                'c': len(tgs),
                'b': len(result)
            }
            for key in total:
                total[key] += results_readable[key]
            count += 1
            print '{r[c]}/{r[t]} packed: {r[b]}'.format(r=results_readable)
            progress_callback(result)
    return "Success ({r[c]}/{r[t]} packed: {r[b]} totals, count={count})".format(
        r=total, count=count)
示例#8
0
def check_cvg(r, tool, project, v, suite, t_idxs, ts):
    key = mk_key('exec', [tool, project, v, suite])
    cvg_infos = r.hmget(key, *t_idxs)
    assert len(cvg_infos) == len(t_idxs)

    nils = [(t_idx, t) for (t_idx, t, cvg_info) in zip(t_idxs, ts, cvg_infos)
            if cvg_info is None]
    print len(nils), len(ts)
    if suite == 'dev':
        check_for_nil_classes = [
            tc for tc, _, _ in [t.partition('::') for (t_idx, t) in nils]
        ]
        check_for_nil_classes = list(set(check_for_nil_classes))
        check_for_nil_classes_idxes = tn_i_s(r, check_for_nil_classes, 'dev')
        non_nil_classes_idxes = set(
            cobertura_covers(r, project, v, suite,
                             check_for_nil_classes_idxes))
        nil_class_dict = {
            class_name: idx not in non_nil_classes_idxes
            for (
                class_name,
                idx) in zip(check_for_nil_classes, check_for_nil_classes_idxes)
        }
        nil_idxes = [(t_idx_, tc, tm)
                     for (t_idx_, (tc, _, tm)) in [(t_idx, t.partition('::'))
                                                   for (t_idx, t) in nils]
                     if nil_class_dict.get(tc) is False]
        # really only need the idxes
        nil_idxes = [t_idx for (t_idx, _, _) in nil_idxes]

    else:
        nil_idxes = [t_idx for (t_idx, _) in nils]

    if tool == 'cobertura' and nil_idxes:
        raise Straggler('COBERTURA_NOT_RUN', [tool, project, v, suite],
                        idxes=nil_idxes,
                        fix=('cvgmeasure.cvg.do_cvg',
                             lambda bundle: bundle[1:],
                             lambda bundle: b_pvs(bundle) +
                             " -K cvg_tool -a {tool}".format(tool=tool)))

    cc = cobertura_covers(r, project, v, suite, nil_idxes)
    if cc != []:
        raise Straggler('CVG_NOT_RUN_COBERTURA_NONEMPTY',
                        [tool, project, v, suite],
                        idxes=cc,
                        fix=('cvgmeasure.cvg.do_cvg',
                             lambda bundle: bundle[1:],
                             lambda bundle: b_pvs(bundle) +
                             " -K cvg_tool -a {tool}".format(tool=tool)))

    # time to check s3
    non_nils = [
        (t_idx, t) for (t_idx, t, cvg_info) in zip(t_idxs, ts, cvg_infos)
        if cvg_info is not None and not is_empty(tool, json.loads(cvg_info))
    ]
    print '- non-nil len: {0}'.format(len(non_nils))
    if non_nils:
        s3_list = list_from_s3('cvg-files', [tool, project, v, suite])
        s3_tname_list = set(
            [key.name.rpartition('/')[2] for key in s3_list if key.size > 0])
        non_nils_missing_from_s3 = [(t_idx, t) for (t_idx, t) in non_nils
                                    if t not in s3_tname_list]
        if len(non_nils_missing_from_s3) != 0:
            raise Straggler(
                'NON_NIL_CVG_BUT_NO_S3', [tool, project, v, suite],
                idxes=[t_idx for (t_idx, _) in non_nils_missing_from_s3],
                fix=('cvgmeasure.cvg.do_cvg', lambda bundle: bundle[1:],
                     lambda bundle: b_pvs(bundle) +
                     " -K cvg_tool -a {tool} -j '{{\"redo\": true}}'".format(
                         tool=tool)))
        return "Cvg for in s3 : {0}".format(len(non_nils))