Example #1
0
def replace_classifier(i, c_id, c_id_new, c_file = None, trigger_name = None):

    """
    Replaces a classifier in the chain for a new classifier
    :param i: Ensemble of models (system) representing an individual
    :param c_id: Classifier's id to be replaced
    :param c_id_new: Classifier's id that replaces
    :return: Nothing
    """

    c_file = c_id_new if c_file is None else c_file
    existing_classifier_id = [c.id for c in i.get_message().classifier]

    if c_id_new not in existing_classifier_id:
        classifier = make.make_classifier(c_id_new, c_file)
        if i.get(c_id).component_id != '':

            # Check it is a trigger
            trigger_id = i.get(c_id).component_id
            assert i.get(trigger_id).DESCRIPTOR.name == "Trigger", \
                "ERROR: Classifier in chain should be connected to trigger"

            # Replace previous data component on the system for the new one
            trigger_classifier_old = i.get(trigger_id).classifier
            old_data_id = trigger_classifier_old.data_id
            th = float(trigger_id.split("_")[2])

            trigger_name = "trigger_classifier_" + str(th) + "_" + c_id_new
            trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name+'.pkl')

            if not os.path.exists(trigger_classifier_file):
                data = __make_data_and_dataset(i, trigger_name, c_file, th)
                i.replace(old_data_id, data)
                trigger = make.make_trigger(trigger_name, make.make_empty_classifier(id="", data_id=data.id),
                                            component_ids=i.get(trigger_id).component_ids, model="probability")
            else:
                trigger = make.make_trigger(trigger_name,
                                            make.make_classifier('', classifier_file=trigger_classifier_file),
                                            component_ids=i.get(trigger_id).component_ids)
            i.replace(trigger_id, trigger)

            # Link replacing classifier to trigger
            classifier.component_id = trigger_name

        # Replace classifier
        i.replace(c_id, classifier)

        # If first classifier, point at it
        if i.get_start() == c_id:
            i.set_start(c_id_new)

        # Get trigger connected to the old classifier
        trigger_names = i.get_message().trigger  # All triggers from the ensemble
        for trigger in trigger_names:
            trigger = i.get(trigger.id)
            for ic, c in enumerate(trigger.component_ids):
                if c_id == c:
                    trigger.component_ids[ic] = c_id_new
                    i.replace(trigger.id, trigger)
Example #2
0
def evaluate(work, records, tid):
    lock = Lock()
    while True:

        m, th, m_ = work.get()
        print(m, th, m_)

        small_dict = io.read_pickle(m)
        test_images = len(small_dict['test']['gt'])
        train_images = len(small_dict['train']['gt'])
        data_path = "../../Data/"
        trigger_train_dataset = os.path.join(data_path,
                                             "train_trigger_" + str(tid))
        test_train_dataset = os.path.join(data_path,
                                          "test_trigger_" + str(tid))

        sys = sb.SystemBuilder(verbose=False)

        # Complex classifier
        bigClassifier = make.make_classifier("big", m_)
        sys.add_classifier(bigClassifier)

        # Data
        source = make.make_source(trigger_train_dataset, test_train_dataset, 2)
        data = make.make_data("trigger_data",
                              train_images,
                              test_images,
                              source=source)
        sys.add_data(data)
        update_dataset(m, th, trigger_train_dataset, test_train_dataset)

        # Trigger
        trigger = make.make_trigger(
            "trigger",
            make.make_empty_classifier(data_id="trigger_data"), ["big"],
            model="probability")
        sys.add_trigger(trigger)

        # Simple classifier
        smallClassifier = make.make_classifier("small", m, "trigger")
        sys.add_classifier(smallClassifier)

        results = eval.evaluate(sys, "small", check_classifiers=False)
        records["system_" + m + ';' + m_ + ';' + str(th)] = results.test

        lock.acquire()
        if m_ not in records:  # Evaluate individual models in order to plot
            records[m_] = eval.evaluate(sys, 'big').test
        lock.release()

        work.task_done()
def build_chain(classifiers, id_classifiers, thresholds, id_triggers, data_id):

    assert len(classifiers) == len(
        id_triggers
    ) + 1, "ERROR: Number of triggers in the chain is not consistent"
    assert len(id_triggers) == len(
        thresholds), "ERROR: Each trigger should be assigned a threshold"
    assert len(classifiers) == len(
        id_classifiers
    ), "ERROR: Each classifier file should be assigned a classifier id"

    data_path = os.path.join(os.environ['FCM'], 'Data', data_id)

    if not os.path.exists(data_path):
        os.makedirs(data_path)

    sys = sb.SystemBuilder(verbose=False)
    for i in range(len(classifiers) - 1):

        # Create data for the trigger
        train_path = os.path.join(data_path, id_triggers[i] + "_test.pkl")
        test_path = os.path.join(data_path, id_triggers[i] + "_train.pkl")
        val_path = os.path.join(data_path, id_triggers[i] + "_val.pkl")
        source = make.make_source(train_path, test_path, 2, val_path=val_path)
        data = make.make_data("data_" + id_triggers[i], 1, 1, source=source)
        update_dataset(classifiers[i], train_path, test_path, val_path,
                       thresholds[i])
        sys.add_data(data)

        # Build trigger attached to classifier
        trigger = make.make_trigger(id_triggers[i],
                                    make.make_empty_classifier(
                                        id='',
                                        data_id="data_" + id_triggers[i]),
                                    [id_classifiers[i + 1]],
                                    model="probability")
        sys.add_trigger(trigger)

        # Build classifier
        c_file = classifiers[i]
        classifier = make.make_classifier(id_classifiers[i],
                                          c_file,
                                          component_id=id_triggers[i])
        sys.add_classifier(classifier)

        if i == 0:
            sys.set_start(id_classifiers[i])

    classifier = make.make_classifier(id_classifiers[-1], classifiers[-1])
    sys.add_classifier(classifier)
    return sys
Example #4
0
def set_threshold(i, c_id, th_val):
    """
        Sets the thresold of a Trigger to th_val
        :param i: Ensemble of models (system) representing an individual
        :param c_id: Classifier affected
        :param step: Value to which increment or decrement the threshold
        :return: Nothing
        """

    c_file = i.get(c_id).classifier_file
    if i.get(c_id).component_id != '':

        trigger_id = i.get(c_id).component_id
        trigger_old = i.get(trigger_id)
        assert trigger_old.DESCRIPTOR.name == "Trigger", "Classifiers should be connected to triggers in the chain"

        trigger_name = "trigger_classifier_" + str(th_val) + "_" + c_id
        trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name + '.pkl')

        if not os.path.exists(trigger_classifier_file):
            data = __make_data_and_dataset(i, trigger_name, c_file, th_val)
            i.replace(trigger_old.classifier.data_id, data)
            trigger = make.make_trigger(trigger_name,
                                        make.make_empty_classifier(id="", data_id=data.id),
                                        component_ids=trigger_old.component_ids, model="probability")
        else:
            i.remove(trigger_old.classifier.data_id)  # Remove old data if exists
            trigger = make.make_trigger(trigger_name,
                                        make.make_classifier('', classifier_file=trigger_classifier_file),
                                        component_ids=trigger_old.component_ids)
        i.replace(trigger_old.id, trigger)
        c = i.get(c_id)
        c.component_id = trigger_name
        i.replace(c_id, c)
Example #5
0
def evaluate_single_models(models, results):
    for m in models:
        sys = sb.SystemBuilder()
        c = make.make_classifier(m, m)
        sys.add_classifier(c)
        sys.set_start(m)
        results[generate_system_id(sys)] = eval.evaluate(sys, sys.get_start(), phases=["test"])
Example #6
0
def add_classifier_to_merger(i, merger_id, classifier_id, classifier_file):

    # Add the new classifier to the system
    classifier = make.make_classifier(classifier_id, classifier_file)
    i.add_classifier(classifier)

    # Extend the merger
    merger = i.get(merger_id)
    merger.merged_ids.append(classifier_id)
    i.replace(merger_id, merger)
 def __create_ensemble(self):
     ensemble = sb.SystemBuilder()
     c_id = 'ResNet18'
     c_file = os.path.join(os.environ['FCM'], 'Definitions', 'Classifiers',
                           'sota_models_cifar10-32-dev_validation',
                           'V001_ResNet18_ref_0.pkl')
     classifier = mutils.make_classifier(c_id, c_file)
     ensemble.add_classifier(classifier)
     ensemble.set_start(c_id)
     return ensemble
Example #8
0
def extend_chain_pt(i, c_id, th=None, c_file = None, trigger_name=None):

    """
    Extends the chain with a probability trigger and a classifier
    :param i: Ensemble of models (system) representing an individual
    :param c_id: Classifier to attach at the end fo the chain
    :param th: Threshold of the trigger
    :return: Nothing
    """

    th = 0.5 if th is None else th
    c_file = c_id if c_file is None else c_file
    existing_classifier_id = [c.id for c in i.get_message().classifier]

    if c_id not in existing_classifier_id:  # Add classifier if not present previously in the chain

        # Get the last model in the chain
        for c in i.get_message().classifier:
            if c.component_id == "":
                last_id = c.id

        # Create dataset for new trigger if trigger's classifier not existing
        trigger_name = "trigger_classifier_" + str(th) + "_" + last_id
        trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name+'.pkl')

        if not os.path.exists(trigger_classifier_file):
            data = __make_data_and_dataset(i, trigger_name, i.get(last_id).classifier_file, th)
            i.add_data(data)
            trigger = make.make_trigger(trigger_name, make.make_empty_classifier(id="", data_id=data.id), component_ids=[c_id], model="probability")
        else:
            trigger = make.make_trigger(trigger_name, make.make_classifier('', trigger_classifier_file), component_ids=[c_id])

        i.add_trigger(trigger)

        # Create new mutated classifier
        classifier = make.make_classifier(c_id, c_file)
        i.add_classifier(classifier)

        # Update last classifier to connect to trigger
        last_classifier = i.get(last_id)
        last_classifier.component_id = trigger_name
        i.replace(last_id, last_classifier)
def protobuf_ensemble_1() -> SystemBuilder:
    classifier_metadata_path = "../../Definitions/Classifiers/sota_models_caltech256-32-dev_validation"
    files = [os.path.join(classifier_metadata_path, f) for f in os.listdir(classifier_metadata_path)
             if os.path.isfile(os.path.join(classifier_metadata_path, f))]
    ensemble = SystemBuilder(False)
    classifiers = [make.make_classifier(os.path.basename(f), f) for f in files]
    merger = make.make_merger("Merger", [c.id for c in classifiers], merge_type=0)
    for c in classifiers:
        ensemble.add_classifier(c)
    ensemble.add_merger(merger)
    ensemble.set_start("Merger")
    return ensemble
Example #10
0
def create_ensemble():
    ensemble = sb.SystemBuilder()
    c_id = 'ResNet18'
    c_file = os.path.join(os.environ['FCM'], 'Definitions', 'Classifiers', 'sota_models_cifar10-32-dev_validation',
                          'V001_ResNet18_ref_0.pkl')
    classifier = mutils.make_classifier(c_id, c_file)
    merger_id = 'Merger'
    merger = mutils.make_merger(merger_id, [c_id], merge_type=fcm.Merger.AVERAGE)
    ensemble.add_classifier(classifier)
    ensemble.add_merger(merger)
    ensemble.set_start('Merger')
    return ensemble
Example #11
0
def extend_merged_chain(i, c_id_tail, c_id_new, th, c_file_new=None):

    """
    :param i: Individual
    :param c_id_tail: Last classifier at the chain
    :param c_id_new: Id of new last classifier at the chain
    :param th: Threshold value of the new trigger
    :param c_file_new: File location of the classifier at the chain
    :param t_id: Id of the new trigger
    :return: Nothing
    """

    existing_classifier_id = [c.id for c in i.get_message().classifier]

    if c_id_new not in existing_classifier_id:

        # Add new classifier to the ensemble
        c_file = c_id_new if c_file_new is None else c_file_new
        classifier = make.make_classifier(c_id_new, c_file)
        i.add_classifier(classifier)

        # Build the trigger
        trigger_name = "trigger_classifier_" + str(th) + "_" + c_id_tail
        trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name + '.pkl')

        if not os.path.exists(trigger_classifier_file):
            data = __make_data_and_dataset(i, trigger_name, i.get(c_id_tail).classifier_file, th)
            i.add_data(data)
            trigger = make.make_trigger(trigger_name, make.make_empty_classifier(id="", data_id=data.id),
                                        component_ids=[c_id_new], model="probability")
        else:
            trigger = make.make_trigger(trigger_name, make.make_classifier('', trigger_classifier_file),
                                        component_ids=[c_id_new])

        i.add_trigger(trigger)

        # Connect the (old) last classifier to the trigger
        last_classifier = i.get(c_id_tail)
        last_classifier.component_id = trigger_name
        i.replace(c_id_tail, last_classifier)
Example #12
0
def build_evaluate_chain(files: [str], ths: [float]):
    assert len(files) > 0 and len(files) == len(ths)+1

    sys = sb.SystemBuilder(verbose=False)
    classifier = make.make_classifier(os.path.basename(files[0]), files[0])
    sys.add_classifier(classifier)
    sys.set_start(classifier.id)

    # Automatically build the chain with written mutation operations
    for i, file in enumerate(files[:-1]):
        extend_merged_chain(sys, os.path.basename(file), os.path.basename(files[i+1]), ths[i], files[i+1])

    result = eval.evaluate(sys)
    return result
Example #13
0
def build_train_trigger(model1_dict, th):
    classifier_trigger_dict = {}

    performance = make.make_performance_metrics(**{})

    # Train dict
    L = model1_dict['train']['logits']
    dividend = np.sum(np.exp(L), axis=1)
    P = np.exp(L) / dividend[:, None]
    sort = np.sort(P, axis=1)  # Sort class probabilities
    diff = sort[:, -1] - sort[:, -2]  # Difference
    logits_trigger = np.empty((diff.shape[0], 2))
    logits_trigger[:, 0] = diff < th
    logits_trigger[:, 1] = diff >= th

    pred_model1 = np.argmax(L, axis=1)
    gt_model1 = model1_dict['train']['gt']

    train = make.make_classifier_raw_data(logits_trigger,
                                          (pred_model1 == gt_model1),
                                          np.copy(model1_dict['train']['id']))

    # Test dict
    L = model1_dict['test']['logits']
    dividend = np.sum(np.exp(L), axis=1)
    P = np.exp(L) / dividend[:, None]
    sort = np.sort(P, axis=1)  # Sort class probabilities
    diff = sort[:, -1] - sort[:, -2]  # Difference
    logits_trigger = np.empty((diff.shape[0], 2))
    logits_trigger[:, 0] = diff < th
    logits_trigger[:, 1] = diff >= th

    pred_model1 = np.argmax(L, axis=1)
    gt_model1 = model1_dict['test']['gt']

    test = make.make_classifier_raw_data(logits_trigger,
                                         (pred_model1 == gt_model1),
                                         np.copy(model1_dict['test']['id']))

    classifier_trigger_dict = make.make_classifier_dict(
        "trigger_classifier", "cifar10", train, test, performance)
    io.save_pickle(
        '../../Definitions/Classifiers/tmp/trigger_random_threshold.pkl',
        classifier_trigger_dict)
    classifier = make.make_classifier(
        "trigger_classifier",
        "../../Definitions/Classifiers/tmp/trigger_random_threshold.pkl")
    return classifier
Example #14
0
def generate_initial_population():
    classifier_path = os.path.join(os.environ['FCM'], 'Definitions',
                                   'Classifiers', args.dataset)
    P = []
    classifier_files = [
        os.path.join(classifier_path, f) for f in os.listdir(classifier_path)
        if ".pkl" in f
    ]
    for c_file in classifier_files:
        sys = sb.SystemBuilder(verbose=False)
        c_id = get_classifier_name(c_file)
        classifier = make.make_classifier(c_id, c_file)
        sys.add_classifier(classifier)
        sys.set_start(c_id)
        # sys.set_sysid(utils.generate_system_id(sys))
        P.append(sys)
    return P
Example #15
0
def build_train_trigger2(model1_dict, th):
    classifier_trigger_dict = {}

    performance = make.make_performance_metrics(**{})

    # Train dict
    L = model1_dict['train']['logits']
    dividend = np.sum(np.exp(L), axis=1)
    P = np.exp(L) / dividend[:, None]
    max_P = np.max(P, axis=1)
    logits_trigger = np.empty((max_P.shape[0], 2))
    logits_trigger[:, 0] = max_P < th
    logits_trigger[:, 1] = max_P >= th

    pred_model1 = np.argmax(L, axis=1)
    gt_model1 = model1_dict['train']['gt']

    train = make.make_classifier_raw_data(logits_trigger,
                                          (pred_model1 == gt_model1),
                                          np.copy(model1_dict['train']['id']))

    # Test dict
    L = model1_dict['test']['logits']
    dividend = np.sum(np.exp(L), axis=1)
    P = np.exp(L) / dividend[:, None]
    max_P = np.max(P, axis=1)
    logits_trigger = np.empty((max_P.shape[0], 2))
    logits_trigger[:, 0] = max_P < th
    logits_trigger[:, 1] = max_P >= th

    pred_model1 = np.argmax(L, axis=1)
    gt_model1 = model1_dict['test']['gt']

    test = make.make_classifier_raw_data(logits_trigger,
                                         (pred_model1 == gt_model1),
                                         np.copy(model1_dict['test']['id']))

    classifier_trigger_dict = make.make_classifier_dict(
        "trigger_classifier", "cifar10", train, test, performance)
    io.save_pickle(
        '../../Definitions/Classifiers/tmp/trigger_random_threshold',
        classifier_trigger_dict)
    classifier = make.make_classifier(
        "trigger_classifier",
        "../../Definitions/Classifiers/tmp/trigger_random_threshold")
    return classifier
Example #16
0
def create_evaluate_system(work, results):
    # Creating system
    lock = Lock()
    while True:
        protocol, subset = work.get()
        sys = sb.SystemBuilder(verbose=False)
        classifiers_ids = []
        for m in subset:
            file = m
            model = make.make_classifier(m, file)
            sys.add_classifier(model)
            classifiers_ids.append(model.id)

        merger = make.make_merger("MERGER", classifiers_ids, merge_type=protocol)
        sys.add_merger(merger)
        r = eval.evaluate(sys, merger.id)
        # if results: results = io.read_pickle('./results/R_'+str(protocol)+'_'+str(n_models))
        results['system_' + '_'.join(classifiers_ids) + '_protocol' + str(protocol)] = r

        work.task_done()
Example #17
0
def build_train_trigger3(model1_dict, p):
    classifier_trigger_dict = {}

    performance = make.make_performance_metrics(**{})

    # Train dict
    L = model1_dict['train']['logits']
    logits_trigger = np.empty((L.shape[0], 2))
    logits_trigger[:, 0] = np.random.binomial(1, p, L.shape[0])
    logits_trigger[:, 1] = 1 - logits_trigger[:, 0]

    pred_model1 = np.argmax(L, axis=1)
    gt_model1 = model1_dict['train']['gt']

    train = make.make_classifier_raw_data(logits_trigger,
                                          (pred_model1 == gt_model1),
                                          np.copy(model1_dict['train']['id']))

    # Test dict
    L = model1_dict['test']['logits']
    logits_trigger = np.empty((L.shape[0], 2))
    logits_trigger[:, 0] = np.random.binomial(1, p, L.shape[0])
    logits_trigger[:, 1] = 1 - logits_trigger[:, 0]

    pred_model1 = np.argmax(L, axis=1)
    gt_model1 = model1_dict['test']['gt']

    test = make.make_classifier_raw_data(logits_trigger,
                                         (pred_model1 == gt_model1),
                                         np.copy(model1_dict['test']['id']))

    classifier_trigger_dict = make.make_classifier_dict(
        "trigger_classifier", "cifar10", train, test, performance)
    io.save_pickle(
        '../../Definitions/Classifiers/tmp/trigger_random_threshold',
        classifier_trigger_dict)
    classifier = make.make_classifier(
        "trigger_classifier",
        "../../Definitions/Classifiers/tmp/trigger_random_threshold")
    return classifier
Example #18
0
        Classifier_Path = "../../Definitions/Classifiers/" + dataset + "/"
        models = [
            Classifier_Path + f for f in os.listdir(Classifier_Path)
            if ".pkl" in f
        ]
        out_dir = os.path.join("./results/", dataset)
        data_path = "../../Data/"
        if not os.path.exists(out_dir):
            os.mkdir(out_dir)
        #########################################################################
        import Examples.study.paretto_front as paretto

        R_models = {}
        for model in models:
            sys = sb.SystemBuilder(verbose=False)
            c = make.make_classifier("classifier", model)
            sys.add_classifier(c)
            R_models[model] = eval.evaluate(sys, c.id).test

        io.save_pickle(os.path.join(out_dir, "models"), R_models)

        front_sorted = paretto.sort_results_by_accuracy(R_models)
        models = [val[0] for val in front_sorted]
        print(models)

        sys = sb.SystemBuilder(verbose=False)
        trigger_train_dataset = os.path.join(data_path, "train_trigger_0")
        test_train_dataset = os.path.join(data_path, "test_trigger_0")

        # Model 1
        for im, m in enumerate(models):
Example #19
0
        #########################################################################
        Classifier_Path = os.environ['FCM']+"/Definitions/Classifiers/" + dataset + "/"
        model_paths = [Classifier_Path + f for f in os.listdir(Classifier_Path) if ".pkl" in f]
        out_dir = os.path.join("./results/", dataset)
        data_path = os.environ['FCM']+"/Datasets/"
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
        #########################################################################

        import Examples.study.paretto_front as paretto
        R_models = {}
        for mpath in model_paths:
            sys = sb.SystemBuilder(verbose=False)
            classifier_id = os.path.basename(mpath)
            c = make.make_classifier(classifier_id, mpath)
            sys.add_classifier(c)
            sys.set_start(classifier_id)
            R_models[classifier_id] = eval.evaluate(sys, phases=["test", "val"])

        #front = paretto.get_front_time_accuracy(R_models, phase="test")
        #front_sorted = paretto.sort_results_by_accuracy(front, phase="test")
        models_paths = [Classifier_Path + k for k, v in paretto.sort_results_by_accuracy(R_models, phase="val")]
        records = R_models

        # Combinations
        for ic0 in range(len(models_paths)):
            c0 = models_paths[ic0]
            for th0 in np.arange(0, 1+step_th, step_th):
                for ic1 in range(ic0+1, len(models_paths)):
                    c1 = models_paths[ic1]
Example #20
0
    for i, c_simple_file in enumerate(classifiers_simple):

        c_complex_file = classifiers_complex[i]
        print(c_simple_file, c_complex_file)

        r = {
            'max_class': [],
            'random_chain': [],
            'random_tree': [],
        }

        for th in np.arange(0, 1.01, 0.01):
            print(th)

            # Simple Classifier
            simple = make.make_classifier("small", c_simple_file, trigger.id)
            sys.replace("small", simple)
            simple_dict = io.read_pickle(c_simple_file)

            # Complex Classifier
            complex = make.make_classifier("big", c_complex_file)
            sys.replace("big", complex)
            """
            # Trigger 2 max class
            classifier_trigger = build_train_trigger(simple_dict, th)
            trigger = make.make_trigger("probability_threshold_trigger", classifier_trigger, ["big"], model="probability")
            trigger.id = "probability_threshold_trigger"
            sys.replace(trigger.id, trigger)

            results = eval.evaluate(sys, "small", check_classifiers=False).test
            r['2max_class'].append([results])
Example #21
0

def get_dummy_ClassifierRawData(num_c=3, n=5):
    logits = np.random.rand(n, num_c)
    gt = np.random.randint(0, num_c, n)
    id = np.arange(n)

    # Construct the message
    message = make_util.make_classifier_raw_data(logits, gt, id)
    return message


if __name__ == '__main__':
    # Construct a fake classifier
    msg_train = get_dummy_ClassifierRawData(n=5)
    msg_test = get_dummy_ClassifierRawData(n=4)
    id = "SimpleClassifier001"
    data_id = "SimpleFakeDataset"
    params = 1e5
    flops = 1e15
    perf_time_mean = 0.01012
    msg_metrics = make_util.make_performance_metrics(perf_time_mean,
                                                     int(params), int(flops))

    # def make_classifier(id, train, test, data, metrics=None):
    msg = make_util.make_classifier(id, msg_train, msg_test, data_id,
                                    msg_metrics)

    print(msg)
    # # Save as demo file
    io_util.save_message("demo_classifier", msg)
Example #22
0
    args = argument_parse(sys.argv[1:])
    os.environ['TMP'] = 'Definitions/Classifiers/tmp/' + args.dataset[12:-15]

    # Load 32 DNNs
    S_initial = []
    S_eval_dict = {}
    limits = make_limits_dict()

    classifier_path = os.path.join(os.environ['FCM'], 'Definitions',
                                   'Classifiers', args.dataset)
    classifier_files = [f for f in os.listdir(classifier_path) if ".pkl" in f]
    for c_id in classifier_files:
        sys = sb.SystemBuilder(verbose=False)
        c_file = os.path.join(classifier_path, c_id)
        sys.add_classifier(mutil.make_classifier(c_id, c_file))
        sys.set_start(c_id)
        S_initial.append(sys)
        S_eval_dict[c_id] = evaluate(sys, sys.get_start(), phases=["val"])
    update_limit_dict(limits, S_eval_dict, phase="val")

    # Initialize Q-Learning table
    Qtable = {}

    # Start Q-loop
    bar = ProgressBar(args.steps)
    R_episodes = []
    Acc_episodes = []

    for episode in range(args.episodes):
        print("EPISODE %d" % episode)
Example #23
0
        models = [os.path.join(classifiers_path, f) for f in os.listdir(classifiers_path) if ".pkl" in f]
        evaluate_single_models(models, results)

        for n_models in num_merged_models:
            print("N_MODELS:", n_models)
            for protocol in merge_protocols:
                print("MERGE:", protocol)
                model_subsets = itertools.combinations(models, n_models)
                for subset in model_subsets:
                    print(subset)
                    sys = sb.SystemBuilder(verbose=False)
                    classifiers_ids = []
                    for m in subset:
                        file = m
                        model = make.make_classifier(m, file)
                        sys.add_classifier(model)
                        classifiers_ids.append(model.id)

                    merger = make.make_merger("MERGER", classifiers_ids, merge_type=protocol)
                    sys.add_merger(merger)
                    sys.set_start(merger.id)
                    r = eval.evaluate(sys, sys.get_start(), phases=["test"])
                    results[generate_system_id(sys)] = r

                # Save the evaluation results
                import Examples.metadata_manager_results as manager_results

                meta_data_file = os.path.join(os.environ['FCM'],
                                              'Examples',
                                              'compute',
    os.path.join(
        os.environ['FCM'],
        'Definitions/Classifiers/sota_models_cifar10-32-dev_validation',
        'V001_ResNet18_ref_0.pkl'),
    os.path.join(
        os.environ['FCM'],
        'Definitions/Classifiers/sota_models_cifar10-32-dev_validation',
        'V001_ResNet152_ref_0.pkl')
]

# Build the system
merged_classifiers = sb.SystemBuilder()
merger = make.make_merger('Merger', classifiers, fcm.Merger.AVERAGE)
merged_classifiers.add_merger(merger)
for classifier in classifiers:
    c = make.make_classifier(classifier, classifier)
    merged_classifiers.add_classifier(c)

merged_classifiers.set_start('Merger')
R = evaluate(merged_classifiers, merged_classifiers.get_start())
pretty_print(R)

# Manual check
import Source.io_util as io
import numpy as np

c_dict_0 = io.read_pickle(classifiers[0])
c_dict_1 = io.read_pickle(classifiers[1])

gt = c_dict_0['test']['gt']
logits_0 = c_dict_0['test']['logits']

if __name__ == "__main__":

    # Dict with the results of the evaluations
    records = {}
    path = os.environ['PYTHONPATH']
    train_path = path + '/Data/train_trigger_threshold.pkl'
    test_path = path + '/Data/test_trigger_threshold.pkl'
    small_cfile = "../Definitions/Classifiers/DenseNet121_cifar10"
    big_cfile = "../Definitions/Classifiers/DenseNet201_cifar10"
    th = 0.9

    sys = sb.SystemBuilder(verbose=False)

    bigClassifier = make.make_classifier("big", big_cfile)
    sys.add_classifier(bigClassifier)

    source = make.make_source(train_path, test_path, fcm.Data.Source.NUMPY)
    data = make.make_data("trigger_data", int(5e4), int(1e4), source=source)
    sys.add_data(data)
    update_dataset(small_cfile, th, train_path, test_path)

    trigger = make.make_trigger(
        "trigger",
        make.make_empty_classifier(data_id="trigger_data"), ["big"],
        model="probability")
    sys.add_trigger(trigger)

    smallClassifier = make.make_classifier("small", small_cfile, "trigger")
    sys.add_classifier(smallClassifier)
import Source.protobuf.system_builder_serializable as sb
import Source.protobuf.make_util as make
import Source.io_util as io
import os

if __name__ == "__main__":

    dset = "sota_models_cifar10-32-dev_validation"
    Classifier_Path = os.path.join(os.environ['FCM'], 'Definitions',
                                   'Classifiers', dset)

    P = []
    models = [f for f in os.listdir(Classifier_Path)]

    # Creating system
    records = {}
    for m_ in models:
        # Model 2
        sys = sb.SystemBuilder(verbose=False)
        classifier = make.make_classifier(m_,
                                          os.path.join(Classifier_Path, m_))
        sys.add_classifier(classifier)
        P.append(sys)

    io.save_pickle('initial_population', P)
Example #27
0
import Source.system_builder as sb
import Source.protobuf.make_util as make
import Source.system_evaluator as eval

if __name__ == "__main__":

    Classifier_Path = "../../Definitions/Classifiers/"
    classifier_file = "DenseNet121_cifar10.pkl"

    # Creating system
    sys = sb.SystemBuilder(verbose=False)
    smallClassifier = make.make_classifier("Classifier", Classifier_Path+classifier_file)
    sys.add_classifier(smallClassifier)
    results = eval.evaluate(sys, "Classifier", check_classifiers=True)
    eval.pretty_print(results)

if __name__ == "__main__":

    data_path = "../Data/"

    # Create skeleton
    sys = sb.SystemBuilder(verbose=False)
    trigger0_train_dataset = os.path.join(data_path, "train_trigger0")
    trigger0_test_dataset = os.path.join(data_path, "test_trigger0")
    trigger1_train_dataset = os.path.join(data_path, "train_trigger1")
    trigger1_test_dataset = os.path.join(data_path, "test_trigger1")
    c0_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_VGG13_ref_0.pkl"
    c1_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_ResNet34_ref_0.pkl"
    c2_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_DenseNet121_ref_0.pkl"

    # Classifier/Trigger 0
    c0 = make.make_classifier("c0", c0_file, component_id="trigger0")
    sys.add_classifier(c0)
    source = make.make_source(trigger0_train_dataset, trigger0_test_dataset, 2)
    data0 = make.make_data("trigger0_data", int(5e4), int(1e4), source=source)
    sys.add_data(data0)
    trigger0 = make.make_trigger(
        "trigger0",
        make.make_empty_classifier(data_id="trigger0_data"), ["c1", "c2"],
        model="probability_multiple_classifiers")
    sys.add_trigger(trigger0)

    # Classifier/Trigger 1
    source = make.make_source(trigger1_train_dataset, trigger1_test_dataset, 2)
    data1 = make.make_data("trigger1_data", int(5e4), int(1e4), source=source)
    sys.add_data(data1)
    update_dataset(c1_file, [0], trigger1_train_dataset, trigger1_test_dataset)