Example #1
0
def jobs_tables_by_algo_rows_sample_groups(context, samples_groups, tables):
    source_descs = comp_store(Stats.all_descriptions())
    
    # Crate a new store, add the key "group"
    allstats = StoreResults()
    for id_group, samples in samples_groups.items():
        for key, value in samples.items():
            nkey = dict(id_group=id_group, **key)
            allstats[nkey] = value
    
    for id_statstable, stats in tables.items():
        for id_algo, samples in allstats.groups_by_field_value('id_algo'):
            job_id = 'byalgo-%s-%s' % (id_algo, id_statstable)

            r = context.comp(table_by_rows,
                     "byalgo-rows-sample-groups-%s-%s" % (id_algo, id_statstable),
                     samples=samples,
                     rows_field='id_group',  # rows = tc
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id) 
            
            report_attrs = dict(id_statstable=id_statstable)
            report_attrs.update(samples.fields_with_unique_values())
            assert report_attrs['id_algo'] == id_algo
           
            context.add_report(r, 'byalgo-rows-sample-groups', **report_attrs)
Example #2
0
def jobs_tables_by_sample_groups(context, samples_groups, tables):
    source_descs = comp_store(Stats.all_descriptions())
    print tables
    # Tables grouping by algorithm
    for g, s in product(samples_groups.items(), tables.items()):
        id_sample_group, samples = g
        id_statstable, stats = s

        r = context.comp(
            table_by_rows,
            "bysamplegroups-%s-%s" %
            (sanitize(id_sample_group), id_statstable),
            samples=samples,
            rows_field='id_algo',  # group by algorithm
            cols_fields=stats,  # which statistics for each col
            source_descs=source_descs)

        report_attrs = dict(id_sample_group=id_sample_group,
                            id_stats_table=id_statstable)

        # print len(samples)
        warnings.warn('not sure what to do')
        # report_attrs.update(samples.fields_with_unique_values())
        # print report_attrs
        context.add_report(r, 'bysamplegroups-%s' % id_statstable,
                           **report_attrs)
Example #3
0
def jobs_tables_by_algo_rows_sample_groups(context, samples_groups, tables):
    source_descs = comp_store(Stats.all_descriptions())

    # Crate a new store, add the key "group"
    allstats = StoreResults()
    for id_group, samples in samples_groups.items():
        for key, value in samples.items():
            nkey = dict(id_group=id_group, **key)
            allstats[nkey] = value

    for id_statstable, stats in tables.items():
        for id_algo, samples in allstats.groups_by_field_value('id_algo'):
            job_id = 'byalgo-%s-%s' % (id_algo, id_statstable)

            r = context.comp(
                table_by_rows,
                "byalgo-rows-sample-groups-%s-%s" % (id_algo, id_statstable),
                samples=samples,
                rows_field='id_group',  # rows = tc
                cols_fields=stats,  # which statistics for each col
                source_descs=source_descs,
                job_id=job_id)

            report_attrs = dict(id_statstable=id_statstable)
            report_attrs.update(samples.fields_with_unique_values())
            assert report_attrs['id_algo'] == id_algo

            context.add_report(r, 'byalgo-rows-sample-groups', **report_attrs)
Example #4
0
def jobs_tables_by_sample_rows_algo(context, allstats, tables):
    source_descs = comp_store(Stats.all_descriptions())
    
    for id_statstable, stats in tables.items():
        for id_tc, tcruns in allstats.groups_by_field_value('id_tc'):
            job_id = 'bysample-%s-%s' % (id_tc, id_statstable)

            r = context.comp(table_by_rows,
                     "bysample-%s-%s" % (id_tc, id_statstable),
                     samples=tcruns,
                     rows_field='id_algo',  # group by algorithm
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id)
            
            report_attrs = dict(id_statstable=id_statstable)  # id_tc=id_tc, 
            report_attrs.update(tcruns.fields_with_unique_values())

            context.add_report(r, 'bysample', **report_attrs)
Example #5
0
def jobs_tables_by_sample_rows_algo(allstats, rm, tables):
    source_descs = comp_store(Stats.all_descriptions())
    
    for id_statstable, stats in tables.items():
        for id_tc, tcruns in allstats.groups_by_field_value('id_tc'):
            job_id = 'bysample-%s-%s' % (id_tc, id_statstable)

            r = comp(table_by_rows,
                     "bysample-%s-%s" % (id_tc, id_statstable),
                     samples=tcruns,
                     rows_field='id_algo',  # group by algorithm
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id)
            
            report_attrs = dict(id_statstable=id_statstable)  # id_tc=id_tc, 
            report_attrs.update(tcruns.fields_with_unique_values())

            rm.add(r, 'bysample', **report_attrs)
Example #6
0
def jobs_tables_by_algo_rows_samples(context, allstats, tables):
    """ One table for each algo, where rows are test cases. """
    source_descs = comp_store(Stats.all_descriptions())
    for id_statstable, stats in tables.items():
        for id_algo, samples in allstats.groups_by_field_value('id_algo'):
            job_id = 'byalgo-%s-%s' % (id_algo, id_statstable)

            r = context.comp(table_by_rows,
                     "byalgo-rows-sample-%s-%s" % (id_algo, id_statstable),
                     samples=samples,
                     rows_field='id_tc',  # rows = tc
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id)

            report_attrs = dict(id_statstable=id_statstable)
            report_attrs.update(samples.fields_with_unique_values())
            assert report_attrs['id_algo'] == id_algo
            
            context.add_report(r, 'byalgo-rows-sample', **report_attrs)
Example #7
0
def jobs_tables_by_sample_groups(samples_groups, rm, tables):
    source_descs = comp_store(Stats.all_descriptions())
    # Tables grouping by algorithm
    for g, s in product(samples_groups.items(), tables.items()):
        id_sample_group, samples = g
        id_statstable, stats = s
        
        r = comp(table_by_rows,
                 "bysamplegroups-%s-%s" % (sanitize(id_sample_group), id_statstable),
                 samples=samples,
                 rows_field='id_algo',  # group by algorithm
                 cols_fields=stats,  # which statistics for each col
                 source_descs=source_descs)

        report_attrs = dict(id_sample_group=id_sample_group,
                            id_stats_table=id_statstable)
        
        report_attrs.update(samples.fields_with_unique_values())
        
        rm.add(r, 'bysamplegroups', **report_attrs)
Example #8
0
def jobs_tables_by_algo_rows_samples(allstats, rm, tables):
    """ One table for each algo, where rows are test cases. """
    source_descs = comp_store(Stats.all_descriptions())
    for id_statstable, stats in tables.items():
        for id_algo, samples in allstats.groups_by_field_value('id_algo'):
            job_id = 'byalgo-%s-%s' % (id_algo, id_statstable)

            r = comp(table_by_rows,
                     "byalgo-rows-sample-%s-%s" % (id_algo, id_statstable),
                     samples=samples,
                     rows_field='id_tc',  # rows = tc
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id)

            report_attrs = dict(id_statstable=id_statstable)
            report_attrs.update(samples.fields_with_unique_values())
            assert report_attrs['id_algo'] == id_algo
            
            rm.add(r, 'byalgo-rows-sample', **report_attrs)
Example #9
0
def jobs_tables_by_sample_groups(context, samples_groups, tables):
    source_descs = comp_store(Stats.all_descriptions())
    print tables
    # Tables grouping by algorithm
    for g, s in product(samples_groups.items(), tables.items()):
        id_sample_group, samples = g
        id_statstable, stats = s
        
        r = context.comp(table_by_rows,
                 "bysamplegroups-%s-%s" % (sanitize(id_sample_group), id_statstable),
                 samples=samples,
                 rows_field='id_algo',  # group by algorithm
                 cols_fields=stats,  # which statistics for each col
                 source_descs=source_descs)

        report_attrs = dict(id_sample_group=id_sample_group,
                            id_stats_table=id_statstable)
        
        # print len(samples)
        warnings.warn('not sure what to do')
        # report_attrs.update(samples.fields_with_unique_values())
        # print report_attrs
        context.add_report(r, 'bysamplegroups-%s' % id_statstable, **report_attrs)
Example #10
0
    def create_index_job(self):
        if self.index_job_created:
            msg = 'create_index_job() was already called once'
            raise ValueError(msg)
        self.index_job_created = True
        
        
        if not self.allreports:
            # no reports necessary
            return
        
        from compmake import comp
        
        # Do not pass as argument, it will take lots of memory!
        # XXX FIXME: there should be a way to make this update or not
        # otherwise new reports do not appear
        optimize_space = False
        if optimize_space and len(self.allreports_filename) > 100:
            allreports_filename = comp_store(self.allreports_filename, 'allfilenames')
        else:
            allreports_filename = self.allreports_filename
        
        type2reports = {}    
        for report_type, xs in self.allreports_filename.groups_by_field_value('report'):
            type2reports[report_type] = StoreResults(**xs.remove_field('report'))
        
            
        for key in self.allreports: 
            job_report = self.allreports[key]
            filename = self.allreports_filename[key] 

            write_job_id = job_report.job_id + '-write'
            
            # Create the links to reports of the same type
            report_type = key['report']
            other_reports_same_type = type2reports[report_type]
            
            key = dict(**key)
            del key['report']

            # find the closest report for different type
            others = []
            for other_type, other_type_reports in type2reports.items():
                if other_type == report_type:
                    continue
                best = get_most_similar(other_type_reports, key)
                if best is not None:
#                     print('Best match:\n-%s %s\n- %s %s' % (report_type, key,
#                                                             other_type, best))
                    others.append((other_type, best, other_type_reports[best]))
            
            report_type_sane = report_type.replace('_', '')
            report_nid = self.html_resources_prefix + report_type_sane
            if key: 
                report_nid += '-' + basename_from_key(key) 
            
            comp(write_report_and_update,
                 report=job_report, report_nid=report_nid,
                report_html=filename, all_reports=allreports_filename,
                index_filename=self.index_filename,
                 write_pickle=False,
                 this_report=key,
                 other_reports_same_type=other_reports_same_type,
                 most_similar_other_type=others,
                 job_id=write_job_id)