Esempio n. 1
0
def set_threshold(i, c_id, th_val):
    """
        Sets the thresold of a Trigger to th_val
        :param i: Ensemble of models (system) representing an individual
        :param c_id: Classifier affected
        :param step: Value to which increment or decrement the threshold
        :return: Nothing
        """

    c_file = i.get(c_id).classifier_file
    if i.get(c_id).component_id != '':

        trigger_id = i.get(c_id).component_id
        trigger_old = i.get(trigger_id)
        assert trigger_old.DESCRIPTOR.name == "Trigger", "Classifiers should be connected to triggers in the chain"

        trigger_name = "trigger_classifier_" + str(th_val) + "_" + c_id
        trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name + '.pkl')

        if not os.path.exists(trigger_classifier_file):
            data = __make_data_and_dataset(i, trigger_name, c_file, th_val)
            i.replace(trigger_old.classifier.data_id, data)
            trigger = make.make_trigger(trigger_name,
                                        make.make_empty_classifier(id="", data_id=data.id),
                                        component_ids=trigger_old.component_ids, model="probability")
        else:
            i.remove(trigger_old.classifier.data_id)  # Remove old data if exists
            trigger = make.make_trigger(trigger_name,
                                        make.make_classifier('', classifier_file=trigger_classifier_file),
                                        component_ids=trigger_old.component_ids)
        i.replace(trigger_old.id, trigger)
        c = i.get(c_id)
        c.component_id = trigger_name
        i.replace(c_id, c)
Esempio n. 2
0
def replace_classifier(i, c_id, c_id_new, c_file = None, trigger_name = None):

    """
    Replaces a classifier in the chain for a new classifier
    :param i: Ensemble of models (system) representing an individual
    :param c_id: Classifier's id to be replaced
    :param c_id_new: Classifier's id that replaces
    :return: Nothing
    """

    c_file = c_id_new if c_file is None else c_file
    existing_classifier_id = [c.id for c in i.get_message().classifier]

    if c_id_new not in existing_classifier_id:
        classifier = make.make_classifier(c_id_new, c_file)
        if i.get(c_id).component_id != '':

            # Check it is a trigger
            trigger_id = i.get(c_id).component_id
            assert i.get(trigger_id).DESCRIPTOR.name == "Trigger", \
                "ERROR: Classifier in chain should be connected to trigger"

            # Replace previous data component on the system for the new one
            trigger_classifier_old = i.get(trigger_id).classifier
            old_data_id = trigger_classifier_old.data_id
            th = float(trigger_id.split("_")[2])

            trigger_name = "trigger_classifier_" + str(th) + "_" + c_id_new
            trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name+'.pkl')

            if not os.path.exists(trigger_classifier_file):
                data = __make_data_and_dataset(i, trigger_name, c_file, th)
                i.replace(old_data_id, data)
                trigger = make.make_trigger(trigger_name, make.make_empty_classifier(id="", data_id=data.id),
                                            component_ids=i.get(trigger_id).component_ids, model="probability")
            else:
                trigger = make.make_trigger(trigger_name,
                                            make.make_classifier('', classifier_file=trigger_classifier_file),
                                            component_ids=i.get(trigger_id).component_ids)
            i.replace(trigger_id, trigger)

            # Link replacing classifier to trigger
            classifier.component_id = trigger_name

        # Replace classifier
        i.replace(c_id, classifier)

        # If first classifier, point at it
        if i.get_start() == c_id:
            i.set_start(c_id_new)

        # Get trigger connected to the old classifier
        trigger_names = i.get_message().trigger  # All triggers from the ensemble
        for trigger in trigger_names:
            trigger = i.get(trigger.id)
            for ic, c in enumerate(trigger.component_ids):
                if c_id == c:
                    trigger.component_ids[ic] = c_id_new
                    i.replace(trigger.id, trigger)
def build_chain(classifiers, id_classifiers, thresholds, id_triggers, data_id):

    assert len(classifiers) == len(
        id_triggers
    ) + 1, "ERROR: Number of triggers in the chain is not consistent"
    assert len(id_triggers) == len(
        thresholds), "ERROR: Each trigger should be assigned a threshold"
    assert len(classifiers) == len(
        id_classifiers
    ), "ERROR: Each classifier file should be assigned a classifier id"

    data_path = os.path.join(os.environ['FCM'], 'Data', data_id)

    if not os.path.exists(data_path):
        os.makedirs(data_path)

    sys = sb.SystemBuilder(verbose=False)
    for i in range(len(classifiers) - 1):

        # Create data for the trigger
        train_path = os.path.join(data_path, id_triggers[i] + "_test.pkl")
        test_path = os.path.join(data_path, id_triggers[i] + "_train.pkl")
        val_path = os.path.join(data_path, id_triggers[i] + "_val.pkl")
        source = make.make_source(train_path, test_path, 2, val_path=val_path)
        data = make.make_data("data_" + id_triggers[i], 1, 1, source=source)
        update_dataset(classifiers[i], train_path, test_path, val_path,
                       thresholds[i])
        sys.add_data(data)

        # Build trigger attached to classifier
        trigger = make.make_trigger(id_triggers[i],
                                    make.make_empty_classifier(
                                        id='',
                                        data_id="data_" + id_triggers[i]),
                                    [id_classifiers[i + 1]],
                                    model="probability")
        sys.add_trigger(trigger)

        # Build classifier
        c_file = classifiers[i]
        classifier = make.make_classifier(id_classifiers[i],
                                          c_file,
                                          component_id=id_triggers[i])
        sys.add_classifier(classifier)

        if i == 0:
            sys.set_start(id_classifiers[i])

    classifier = make.make_classifier(id_classifiers[-1], classifiers[-1])
    sys.add_classifier(classifier)
    return sys
Esempio n. 4
0
def evaluate(work, records, tid):
    lock = Lock()
    while True:

        m, th, m_ = work.get()
        print(m, th, m_)

        small_dict = io.read_pickle(m)
        test_images = len(small_dict['test']['gt'])
        train_images = len(small_dict['train']['gt'])
        data_path = "../../Data/"
        trigger_train_dataset = os.path.join(data_path,
                                             "train_trigger_" + str(tid))
        test_train_dataset = os.path.join(data_path,
                                          "test_trigger_" + str(tid))

        sys = sb.SystemBuilder(verbose=False)

        # Complex classifier
        bigClassifier = make.make_classifier("big", m_)
        sys.add_classifier(bigClassifier)

        # Data
        source = make.make_source(trigger_train_dataset, test_train_dataset, 2)
        data = make.make_data("trigger_data",
                              train_images,
                              test_images,
                              source=source)
        sys.add_data(data)
        update_dataset(m, th, trigger_train_dataset, test_train_dataset)

        # Trigger
        trigger = make.make_trigger(
            "trigger",
            make.make_empty_classifier(data_id="trigger_data"), ["big"],
            model="probability")
        sys.add_trigger(trigger)

        # Simple classifier
        smallClassifier = make.make_classifier("small", m, "trigger")
        sys.add_classifier(smallClassifier)

        results = eval.evaluate(sys, "small", check_classifiers=False)
        records["system_" + m + ';' + m_ + ';' + str(th)] = results.test

        lock.acquire()
        if m_ not in records:  # Evaluate individual models in order to plot
            records[m_] = eval.evaluate(sys, 'big').test
        lock.release()

        work.task_done()
Esempio n. 5
0
def extend_chain_pt(i, c_id, th=None, c_file = None, trigger_name=None):

    """
    Extends the chain with a probability trigger and a classifier
    :param i: Ensemble of models (system) representing an individual
    :param c_id: Classifier to attach at the end fo the chain
    :param th: Threshold of the trigger
    :return: Nothing
    """

    th = 0.5 if th is None else th
    c_file = c_id if c_file is None else c_file
    existing_classifier_id = [c.id for c in i.get_message().classifier]

    if c_id not in existing_classifier_id:  # Add classifier if not present previously in the chain

        # Get the last model in the chain
        for c in i.get_message().classifier:
            if c.component_id == "":
                last_id = c.id

        # Create dataset for new trigger if trigger's classifier not existing
        trigger_name = "trigger_classifier_" + str(th) + "_" + last_id
        trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name+'.pkl')

        if not os.path.exists(trigger_classifier_file):
            data = __make_data_and_dataset(i, trigger_name, i.get(last_id).classifier_file, th)
            i.add_data(data)
            trigger = make.make_trigger(trigger_name, make.make_empty_classifier(id="", data_id=data.id), component_ids=[c_id], model="probability")
        else:
            trigger = make.make_trigger(trigger_name, make.make_classifier('', trigger_classifier_file), component_ids=[c_id])

        i.add_trigger(trigger)

        # Create new mutated classifier
        classifier = make.make_classifier(c_id, c_file)
        i.add_classifier(classifier)

        # Update last classifier to connect to trigger
        last_classifier = i.get(last_id)
        last_classifier.component_id = trigger_name
        i.replace(last_id, last_classifier)
Esempio n. 6
0
def extend_merged_chain(i, c_id_tail, c_id_new, th, c_file_new=None):

    """
    :param i: Individual
    :param c_id_tail: Last classifier at the chain
    :param c_id_new: Id of new last classifier at the chain
    :param th: Threshold value of the new trigger
    :param c_file_new: File location of the classifier at the chain
    :param t_id: Id of the new trigger
    :return: Nothing
    """

    existing_classifier_id = [c.id for c in i.get_message().classifier]

    if c_id_new not in existing_classifier_id:

        # Add new classifier to the ensemble
        c_file = c_id_new if c_file_new is None else c_file_new
        classifier = make.make_classifier(c_id_new, c_file)
        i.add_classifier(classifier)

        # Build the trigger
        trigger_name = "trigger_classifier_" + str(th) + "_" + c_id_tail
        trigger_classifier_file = os.path.join(os.environ['FCM'], os.environ['TMP'], trigger_name + '.pkl')

        if not os.path.exists(trigger_classifier_file):
            data = __make_data_and_dataset(i, trigger_name, i.get(c_id_tail).classifier_file, th)
            i.add_data(data)
            trigger = make.make_trigger(trigger_name, make.make_empty_classifier(id="", data_id=data.id),
                                        component_ids=[c_id_new], model="probability")
        else:
            trigger = make.make_trigger(trigger_name, make.make_classifier('', trigger_classifier_file),
                                        component_ids=[c_id_new])

        i.add_trigger(trigger)

        # Connect the (old) last classifier to the trigger
        last_classifier = i.get(c_id_tail)
        last_classifier.component_id = trigger_name
        i.replace(c_id_tail, last_classifier)
Esempio n. 7
0
                   classifier_trigger_dict)
    classifier = make.make_classifier("trigger_classifier",
                                      "../Definitions/Triggers/Tmp/" + name)
    return classifier


net1 = 'system_DenseNet201_cifar10.pkl_ResNet152_cifar10.pklth=0.7'
net2 = 'GoogleNet_cifar10.pkl'
m = Classifier_Path + net1
m_ = Classifier_Path + net2
th = 0.7

sys = sb.SystemBuilder(verbose=True)

name2 = m_
bigClassifier = make.make_empty_classifier("big")
bigClassifier.classifier_file = name2
sys.add_classifier(bigClassifier)

trigger = make.make_trigger("probability_threshold_trigger",
                            make.make_empty_classifier(), ["big"])
sys.add_trigger(trigger)

name1 = m
smallClassifier = make.make_empty_classifier("small",
                                             "probability_threshold_trigger")
model1_dict = io.read_pickle(name1, suffix="", verbose=False)
smallClassifier.classifier_file = name1
sys.add_classifier(smallClassifier)

classifier_trigger = build_train_trigger(model1_dict, th)
#       show: 13.458 secs
#       save: 4.609 secs

# LOAD TIME of this script:
# PROTO PERFROMANCE:
#  load: 8.132 secs
#  show: 13.598 secs

if __name__ == '__main__':
    # Load classifier
    name = "demo_classifier"

    name = "/dataT/eid/GIT/tpml/ml_experiments/001interns/V001_DPN26_ref_0_old"
    name = "/dataT/eid/GIT/tpml/ml_experiments/001interns/V001_DPN26_ref_0"

    model = make_util.make_empty_classifier()

    print("Load ...")
    t_start = time.time()
    io_util.read_message(name, model)
    t_duration_load = time.time() - t_start

    t_start = time.time()
    print(model)
    t_duration_show = time.time() - t_start

    print("PROTO PERFROMANCE:")
    print(" load: {:.3f} secs".format(t_duration_load))
    print(" show: {:.3f} secs".format(t_duration_show))
    print("")
    print("ALL DONE")
    big_cfile = "../Definitions/Classifiers/DenseNet201_cifar10"
    th = 0.9

    sys = sb.SystemBuilder(verbose=False)

    bigClassifier = make.make_classifier("big", big_cfile)
    sys.add_classifier(bigClassifier)

    source = make.make_source(train_path, test_path, fcm.Data.Source.NUMPY)
    data = make.make_data("trigger_data", int(5e4), int(1e4), source=source)
    sys.add_data(data)
    update_dataset(small_cfile, th, train_path, test_path)

    trigger = make.make_trigger(
        "trigger",
        make.make_empty_classifier(data_id="trigger_data"), ["big"],
        model="probability")
    sys.add_trigger(trigger)

    smallClassifier = make.make_classifier("small", small_cfile, "trigger")
    sys.add_classifier(smallClassifier)

    r = eval.evaluate(sys, "small")
    eval.pretty_print(r)
    print(r.test['system'].instance_model)

    time = 0
    time_small = io.read_pickle(small_cfile)['metrics']['time'] / 128
    time_big = io.read_pickle(big_cfile)['metrics']['time'] / 128
    for id, model in r.test['system'].instance_model.items():
        time += time_small if len(model) == 1 else time_small + time_big
if __name__ == "__main__":
    # Get the CIFAR-100 models form the Classifiers dir
    import os
    Dataset_Path = "../../Definitions/Classifiers/"
    dsets = ["sota_models_caltech256-32-dev_validation"]

    records = {}

    for id, d in enumerate(dsets):
        Classifier_Path = Dataset_Path + d + '/'
        models = [f for f in os.listdir(Classifier_Path)]
        print(models)

        # Creating system
        sys = sb.SystemBuilder(verbose=False)
        smallClassifier = make.make_empty_classifier("Classifier")
        sys.add_classifier(smallClassifier)

        for m_ in models:

            # Model 2
            name2 = Classifier_Path + m_
            model2 = make.make_empty_classifier()
            model2.id = "Classifier"
            model2.classifier_file = name2
            sys.replace(model2.id, model2)

            evaluate_time_start = time.time()
            results = eval.evaluate(sys, model2.id)
            eval_time = time.time() - evaluate_time_start
    trigger0_test_dataset = os.path.join(data_path, "test_trigger0")
    trigger1_train_dataset = os.path.join(data_path, "train_trigger1")
    trigger1_test_dataset = os.path.join(data_path, "test_trigger1")
    c0_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_VGG13_ref_0.pkl"
    c1_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_ResNet34_ref_0.pkl"
    c2_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_DenseNet121_ref_0.pkl"

    # Classifier/Trigger 0
    c0 = make.make_classifier("c0", c0_file, component_id="trigger0")
    sys.add_classifier(c0)
    source = make.make_source(trigger0_train_dataset, trigger0_test_dataset, 2)
    data0 = make.make_data("trigger0_data", int(5e4), int(1e4), source=source)
    sys.add_data(data0)
    trigger0 = make.make_trigger(
        "trigger0",
        make.make_empty_classifier(data_id="trigger0_data"), ["c1", "c2"],
        model="probability_multiple_classifiers")
    sys.add_trigger(trigger0)

    # Classifier/Trigger 1
    source = make.make_source(trigger1_train_dataset, trigger1_test_dataset, 2)
    data1 = make.make_data("trigger1_data", int(5e4), int(1e4), source=source)
    sys.add_data(data1)
    update_dataset(c1_file, [0], trigger1_train_dataset, trigger1_test_dataset)
    trigger1 = make.make_trigger(
        "trigger1",
        make.make_empty_classifier(data_id="trigger1_data"), ["c2"],
        model="probability_multiple_classifiers")
    c1 = make.make_classifier("c1", c1_file, component_id="trigger1")
    sys.add_classifier(c1)
    sys.add_trigger(trigger1)
Esempio n. 12
0
from google.protobuf.json_format import MessageToJson
import Source.protobuf.make_util as make
import Source.io_util as io
import time

protoClassifier = make.make_empty_classifier()
start = time.time()
io.read_message("../Definitions/Classifiers/V001_SIMPLENET_CIFAR100",
                protoClassifier)
print("Protobuf reading time:", time.time() - start)

import json

start = time.time()
with open('JSON.txt') as f:
    data = json.load(f)
print("Json reading time:", time.time() - start)
"""

jsonClassifier = MessageToJson(protoClassifier)
with open('./JSON.txt', 'w') as json_file:
  json.dump(jsonClassifier, json_file)


"""