def protobuf_ensemble_1() -> SystemBuilder:
    classifier_metadata_path = "../../Definitions/Classifiers/sota_models_caltech256-32-dev_validation"
    files = [os.path.join(classifier_metadata_path, f) for f in os.listdir(classifier_metadata_path)
             if os.path.isfile(os.path.join(classifier_metadata_path, f))]
    ensemble = SystemBuilder(False)
    classifiers = [make.make_classifier(os.path.basename(f), f) for f in files]
    merger = make.make_merger("Merger", [c.id for c in classifiers], merge_type=0)
    for c in classifiers:
        ensemble.add_classifier(c)
    ensemble.add_merger(merger)
    ensemble.set_start("Merger")
    return ensemble
Example #2
0
def create_ensemble():
    ensemble = sb.SystemBuilder()
    c_id = 'ResNet18'
    c_file = os.path.join(os.environ['FCM'], 'Definitions', 'Classifiers', 'sota_models_cifar10-32-dev_validation',
                          'V001_ResNet18_ref_0.pkl')
    classifier = mutils.make_classifier(c_id, c_file)
    merger_id = 'Merger'
    merger = mutils.make_merger(merger_id, [c_id], merge_type=fcm.Merger.AVERAGE)
    ensemble.add_classifier(classifier)
    ensemble.add_merger(merger)
    ensemble.set_start('Merger')
    return ensemble
Example #3
0
def create_evaluate_system(work, results):
    # Creating system
    lock = Lock()
    while True:
        protocol, subset = work.get()
        sys = sb.SystemBuilder(verbose=False)
        classifiers_ids = []
        for m in subset:
            file = m
            model = make.make_classifier(m, file)
            sys.add_classifier(model)
            classifiers_ids.append(model.id)

        merger = make.make_merger("MERGER", classifiers_ids, merge_type=protocol)
        sys.add_merger(merger)
        r = eval.evaluate(sys, merger.id)
        # if results: results = io.read_pickle('./results/R_'+str(protocol)+'_'+str(n_models))
        results['system_' + '_'.join(classifiers_ids) + '_protocol' + str(protocol)] = r

        work.task_done()
def merge_two_chains(a, b):
    """
    :param a: Chain of classifiers
    :param b: Chain of classifiers
    :return: Merged chains a and b
    """
    m_system = a.copy()

    # 1) Get b protobuf structure
    b_proto_mesage = b.get_message()

    # 2) For each component (Trigger, Data, Classifier) add to m_system with different name
    for classifier in b_proto_mesage.classifier:
        classifier_ = copy.deepcopy(classifier)
        classifier_.id = "1_"+classifier_.id
        classifier_.component_id = "_".join(classifier_.component_id.split('_')[0:3] + ['1'] + classifier_.component_id.split(
            '_')[3:]) if classifier_.component_id != "" else ""
        classifier_.data_id = "1_" + classifier_.data_id if classifier_.data_id != "" else ""
        m_system.add_classifier(classifier_)

    for trigger in b_proto_mesage.trigger:
        trigger_ = copy.deepcopy(trigger)
        trigger_.id = '_'.join(trigger_.id.split('_')[0:3]+['1']+trigger_.id.split('_')[3:])
        for i in range(len(trigger_.component_ids)):
            trigger_.component_ids[i] = "1_" + trigger_.component_ids[i]
        trigger_.classifier.data_id = "1_" + trigger_.classifier.data_id if trigger_.classifier.data_id != "" else ""
        m_system.add_trigger(trigger_)

    for data in b_proto_mesage.data:
        data_ = copy.deepcopy(data)
        data_.id = "1_" + data.id
        m_system.add_data(data_)

    # 3) Finally add merger on m_system
    import Source.protobuf.FastComposedModels_pb2 as fcm
    merger = make.make_merger('Merger', [a.get_start(), "1_"+b.get_start()],
                              merge_type=fcm.Merger.AVERAGE)
    m_system.set_start('Merger')
    m_system.add_merger(merger)

    return m_system
Example #5
0
        for n_models in num_merged_models:
            print("N_MODELS:", n_models)
            for protocol in merge_protocols:
                print("MERGE:", protocol)
                model_subsets = itertools.combinations(models, n_models)
                for subset in model_subsets:
                    print(subset)
                    sys = sb.SystemBuilder(verbose=False)
                    classifiers_ids = []
                    for m in subset:
                        file = m
                        model = make.make_classifier(m, file)
                        sys.add_classifier(model)
                        classifiers_ids.append(model.id)

                    merger = make.make_merger("MERGER", classifiers_ids, merge_type=protocol)
                    sys.add_merger(merger)
                    sys.set_start(merger.id)
                    r = eval.evaluate(sys, sys.get_start(), phases=["test"])
                    results[generate_system_id(sys)] = r

                # Save the evaluation results
                import Examples.metadata_manager_results as manager_results

                meta_data_file = os.path.join(os.environ['FCM'],
                                              'Examples',
                                              'compute',
                                              'merger_combinations',
                                              'results',
                                              'metadata.json')
import os

classifiers = [
    os.path.join(
        os.environ['FCM'],
        'Definitions/Classifiers/sota_models_cifar10-32-dev_validation',
        'V001_ResNet18_ref_0.pkl'),
    os.path.join(
        os.environ['FCM'],
        'Definitions/Classifiers/sota_models_cifar10-32-dev_validation',
        'V001_ResNet152_ref_0.pkl')
]

# Build the system
merged_classifiers = sb.SystemBuilder()
merger = make.make_merger('Merger', classifiers, fcm.Merger.AVERAGE)
merged_classifiers.add_merger(merger)
for classifier in classifiers:
    c = make.make_classifier(classifier, classifier)
    merged_classifiers.add_classifier(c)

merged_classifiers.set_start('Merger')
R = evaluate(merged_classifiers, merged_classifiers.get_start())
pretty_print(R)

# Manual check
import Source.io_util as io
import numpy as np

c_dict_0 = io.read_pickle(classifiers[0])
c_dict_1 = io.read_pickle(classifiers[1])