示例#1
0
def __make_data_and_dataset(i, id, c_file, th):
    """
    This routine creates the dataset to later train the triggers in the chain. This function returns a
    (Protobuf) Data component of the ensemble. Trigger component will look at the Data component to find the dataset for training.
    Data component specifies location of dataset, format of dataset, size of dataset, ...
    :param i: Individual (ensemble)
    :param id: Id of the component in the ensenmble who will use the Data/Dataset
    :param c_file: Classifier file
    :param th: Desired threshold value of the trigger
    :return: Protobuf Data component (in FastComposedModels.pb)
    """

    # individual_id = i.get_sysid()
    # number_individual_id = str(int.from_bytes(individual_id.encode('utf-8'), byteorder='big'))

    # Create dataset for new trigger
    train_path = os.path.join(os.environ['FCM'], 'Data', 'DATASET_' + id + '_train')
    test_path = os.path.join(os.environ['FCM'], 'Data', 'DATASET_' + id + '_test')
    val_path = os.path.join(os.environ['FCM'], 'Data', 'DATASET_' + id + '_val')
    __update_dataset(c_file, train_path, test_path, val_path, th)

    # Define Data (FastComposedModels.pb Data)
    data_id = 'data_' + id
    source = make.make_source(train_path, test_path, 2, val_path)
    data = make.make_data(data_id, int(5e3), int(5e3), None, source)  # TODO: Correct size of dataset

    return data
def build_chain(classifiers, id_classifiers, thresholds, id_triggers, data_id):

    assert len(classifiers) == len(
        id_triggers
    ) + 1, "ERROR: Number of triggers in the chain is not consistent"
    assert len(id_triggers) == len(
        thresholds), "ERROR: Each trigger should be assigned a threshold"
    assert len(classifiers) == len(
        id_classifiers
    ), "ERROR: Each classifier file should be assigned a classifier id"

    data_path = os.path.join(os.environ['FCM'], 'Data', data_id)

    if not os.path.exists(data_path):
        os.makedirs(data_path)

    sys = sb.SystemBuilder(verbose=False)
    for i in range(len(classifiers) - 1):

        # Create data for the trigger
        train_path = os.path.join(data_path, id_triggers[i] + "_test.pkl")
        test_path = os.path.join(data_path, id_triggers[i] + "_train.pkl")
        val_path = os.path.join(data_path, id_triggers[i] + "_val.pkl")
        source = make.make_source(train_path, test_path, 2, val_path=val_path)
        data = make.make_data("data_" + id_triggers[i], 1, 1, source=source)
        update_dataset(classifiers[i], train_path, test_path, val_path,
                       thresholds[i])
        sys.add_data(data)

        # Build trigger attached to classifier
        trigger = make.make_trigger(id_triggers[i],
                                    make.make_empty_classifier(
                                        id='',
                                        data_id="data_" + id_triggers[i]),
                                    [id_classifiers[i + 1]],
                                    model="probability")
        sys.add_trigger(trigger)

        # Build classifier
        c_file = classifiers[i]
        classifier = make.make_classifier(id_classifiers[i],
                                          c_file,
                                          component_id=id_triggers[i])
        sys.add_classifier(classifier)

        if i == 0:
            sys.set_start(id_classifiers[i])

    classifier = make.make_classifier(id_classifiers[-1], classifiers[-1])
    sys.add_classifier(classifier)
    return sys
示例#3
0
def evaluate(work, records, tid):
    lock = Lock()
    while True:

        m, th, m_ = work.get()
        print(m, th, m_)

        small_dict = io.read_pickle(m)
        test_images = len(small_dict['test']['gt'])
        train_images = len(small_dict['train']['gt'])
        data_path = "../../Data/"
        trigger_train_dataset = os.path.join(data_path,
                                             "train_trigger_" + str(tid))
        test_train_dataset = os.path.join(data_path,
                                          "test_trigger_" + str(tid))

        sys = sb.SystemBuilder(verbose=False)

        # Complex classifier
        bigClassifier = make.make_classifier("big", m_)
        sys.add_classifier(bigClassifier)

        # Data
        source = make.make_source(trigger_train_dataset, test_train_dataset, 2)
        data = make.make_data("trigger_data",
                              train_images,
                              test_images,
                              source=source)
        sys.add_data(data)
        update_dataset(m, th, trigger_train_dataset, test_train_dataset)

        # Trigger
        trigger = make.make_trigger(
            "trigger",
            make.make_empty_classifier(data_id="trigger_data"), ["big"],
            model="probability")
        sys.add_trigger(trigger)

        # Simple classifier
        smallClassifier = make.make_classifier("small", m, "trigger")
        sys.add_classifier(smallClassifier)

        results = eval.evaluate(sys, "small", check_classifiers=False)
        records["system_" + m + ';' + m_ + ';' + str(th)] = results.test

        lock.acquire()
        if m_ not in records:  # Evaluate individual models in order to plot
            records[m_] = eval.evaluate(sys, 'big').test
        lock.release()

        work.task_done()
    # Dict with the results of the evaluations
    records = {}
    path = os.environ['PYTHONPATH']
    train_path = path + '/Data/train_trigger_threshold.pkl'
    test_path = path + '/Data/test_trigger_threshold.pkl'
    small_cfile = "../Definitions/Classifiers/DenseNet121_cifar10"
    big_cfile = "../Definitions/Classifiers/DenseNet201_cifar10"
    th = 0.9

    sys = sb.SystemBuilder(verbose=False)

    bigClassifier = make.make_classifier("big", big_cfile)
    sys.add_classifier(bigClassifier)

    source = make.make_source(train_path, test_path, fcm.Data.Source.NUMPY)
    data = make.make_data("trigger_data", int(5e4), int(1e4), source=source)
    sys.add_data(data)
    update_dataset(small_cfile, th, train_path, test_path)

    trigger = make.make_trigger(
        "trigger",
        make.make_empty_classifier(data_id="trigger_data"), ["big"],
        model="probability")
    sys.add_trigger(trigger)

    smallClassifier = make.make_classifier("small", small_cfile, "trigger")
    sys.add_classifier(smallClassifier)

    r = eval.evaluate(sys, "small")
    eval.pretty_print(r)
    print(r.test['system'].instance_model)
    # Create skeleton
    sys = sb.SystemBuilder(verbose=False)
    trigger0_train_dataset = os.path.join(data_path, "train_trigger0")
    trigger0_test_dataset = os.path.join(data_path, "test_trigger0")
    trigger1_train_dataset = os.path.join(data_path, "train_trigger1")
    trigger1_test_dataset = os.path.join(data_path, "test_trigger1")
    c0_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_VGG13_ref_0.pkl"
    c1_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_ResNet34_ref_0.pkl"
    c2_file = "../Definitions/Classifiers/sota_models_gtsrb-32-dev_validation/V001_DenseNet121_ref_0.pkl"

    # Classifier/Trigger 0
    c0 = make.make_classifier("c0", c0_file, component_id="trigger0")
    sys.add_classifier(c0)
    source = make.make_source(trigger0_train_dataset, trigger0_test_dataset, 2)
    data0 = make.make_data("trigger0_data", int(5e4), int(1e4), source=source)
    sys.add_data(data0)
    trigger0 = make.make_trigger(
        "trigger0",
        make.make_empty_classifier(data_id="trigger0_data"), ["c1", "c2"],
        model="probability_multiple_classifiers")
    sys.add_trigger(trigger0)

    # Classifier/Trigger 1
    source = make.make_source(trigger1_train_dataset, trigger1_test_dataset, 2)
    data1 = make.make_data("trigger1_data", int(5e4), int(1e4), source=source)
    sys.add_data(data1)
    update_dataset(c1_file, [0], trigger1_train_dataset, trigger1_test_dataset)
    trigger1 = make.make_trigger(
        "trigger1",
        make.make_empty_classifier(data_id="trigger1_data"), ["c2"],
示例#6
0
            smallClassifier = make.make_classifier("small", m, "trigger")
            sys.replace("small", smallClassifier)

            small_dict = io.read_pickle(m)
            test_images = len(small_dict['test']['gt'])
            train_images = len(small_dict['train']['gt'])

            # For different thresholds
            for th in np.arange(0 + step_th, 1, step_th):

                # Data
                source = make.make_source(trigger_train_dataset,
                                          test_train_dataset, 2)
                data = make.make_data("trigger_data",
                                      train_images,
                                      test_images,
                                      source=source)
                sys.replace("trigger_data", data)
                update_dataset(m, th, trigger_train_dataset,
                               test_train_dataset)

                trigger = make.make_trigger(
                    "trigger",
                    make.make_empty_classifier(data_id="trigger_data"),
                    ["big"],
                    model="probability")
                sys.replace("trigger", trigger)

                # Model 2
                for im_, m_ in enumerate(models[im + 1:]):
                    print(m, th, m_)