예제 #1
0
def extend_chain(chain):
    c_id_extend = 'DenseNet161'
    c_file_extend = os.path.join(os.environ['FCM'], 'Definitions',
                                 'Classifiers',
                                 'sota_models_cifar10-32-dev_validation',
                                 'V001_DenseNet161_ref_0.pkl')
    extend_merged_chain(chain, 'ResNet18', c_id_extend, 0.0, c_file_extend)
예제 #2
0
def build_evaluate_chain(files: [str], ths: [float]):
    assert len(files) > 0 and len(files) == len(ths)+1

    sys = sb.SystemBuilder(verbose=False)
    classifier = make.make_classifier(os.path.basename(files[0]), files[0])
    sys.add_classifier(classifier)
    sys.set_start(classifier.id)

    # Automatically build the chain with written mutation operations
    for i, file in enumerate(files[:-1]):
        extend_merged_chain(sys, os.path.basename(file), os.path.basename(files[i+1]), ths[i], files[i+1])

    result = eval.evaluate(sys)
    return result
    def test_threshold_1(self):
        chain = self.__create_ensemble()
        c_id_extend = 'DenseNet-161'
        c_file_extend = os.path.join(os.environ['FCM'], 'Definitions',
                                     'Classifiers',
                                     'sota_models_cifar10-32-dev_validation',
                                     'V001_DenseNet161_ref_0.pkl')
        extend_merged_chain(chain, 'ResNet18', c_id_extend, 1.1, c_file_extend)
        R = evaluate(chain, chain.get_start())
        acc_chain = R.test['system'].accuracy
        time_chain = R.test['system'].time
        ops_chain = R.test['system'].ops
        params_chain = R.test['system'].params

        # Accuracy classifier by hand
        dict_classifier = io.read_pickle(
            os.path.join(os.environ['FCM'], 'Definitions', 'Classifiers',
                         'sota_models_cifar10-32-dev_validation',
                         'V001_ResNet18_ref_0.pkl'))
        acc_net0 = np.sum(
            np.argmax(dict_classifier['test']['logits'], 1) ==
            dict_classifier['test']['gt']) / len(dict_classifier['test']['gt'])
        time_net0 = dict_classifier['metrics']['time']
        params_net0 = dict_classifier['metrics']['params']
        ops_net0 = dict_classifier['metrics']['ops']

        # Accuracy classifier by hand
        dict_classifier = io.read_pickle(c_file_extend)
        acc_net1 = np.sum(
            np.argmax(dict_classifier['test']['logits'], 1) ==
            dict_classifier['test']['gt']) / len(dict_classifier['test']['gt'])
        time_net1 = dict_classifier['metrics']['time']
        params_net1 = dict_classifier['metrics']['params']
        ops_net1 = dict_classifier['metrics']['ops']

        correct = acc_chain == acc_net1 and \
                    math.isclose(time_net0/128 + time_net1/128, time_chain/5e3) and \
                    ops_chain/5e3 == ops_net0 + ops_net1 and \
                    params_chain == params_net0 + params_net1 + 1

        self.assertEqual(correct, True)
    def test_structure(self):

        chain = self.__create_ensemble()
        c_id_extend = 'DenseNet-161'
        c_file_extend = os.path.join(os.environ['FCM'], 'Definitions',
                                     'Classifiers',
                                     'sota_models_cifar10-32-dev_validation',
                                     'V001_DenseNet161_ref_0.pkl')
        extend_merged_chain(chain, 'ResNet18', c_id_extend, 1.1, c_file_extend)

        correct_structure = True

        # Check connections between components
        correct_structure = correct_structure and \
                            "trigger_classifier_1.1_ResNet18" == chain.get(chain.get_start()).component_id

        correct_structure = correct_structure and \
                            len(chain.get("trigger_classifier_1.1_ResNet18").component_ids) == 1

        correct_structure = correct_structure and \
                            chain.get("trigger_classifier_1.1_ResNet18").component_ids[0] == c_id_extend

        self.assertEqual(correct_structure, True)
예제 #5
0
def mutation_operation(P):

    offspring = []
    p = P[random.randint(0, len(P) - 1)]

    operation = random.randint(0, 3) if len(
        p.get_message().merger) == 1 else random.randint(0, 2)

    # Extend a chain
    if operation == 0:
        new_p = p.copy()

        # Pick a random classifier from the pool of solutions
        c_file_new = utils.pick_random_classifier(args)
        c_id_new = get_classifier_name(c_file_new)

        # Find the tail of any chain to extend
        merger = None if 'Merger' not in new_p.get_start(
        ) else new_p.get_message().merger[0]
        n_chains = len(merger.merged_ids) if merger is not None else 1
        tail_chains = [
            c.id for c in new_p.get_message().classifier
            if c.component_id == ""
        ]
        c_id_extend = tail_chains[random.randint(0, n_chains - 1)]
        c_id_new = (c_id_extend[0] + '_' if c_id_extend[0] > '0'
                    and c_id_extend[0] <= '9' else '') + c_id_new

        # Perform the operation
        om.extend_merged_chain(new_p,
                               c_id_extend,
                               c_id_new,
                               th=utils.pick_random_threshold(args),
                               c_file_new=c_file_new)
        #new_p.set_sysid(utils.generate_system_id(new_p))
        offspring.append(new_p)

    # Replace a classifier
    if operation == 1:
        new_p = p.copy()
        c_id_existing = utils.pick_random_classifier(args, new_p)
        c_file_new = utils.pick_random_classifier(args)
        c_id_new = (c_id_existing[0] +
                    '_' if c_id_existing[0] > '0' and c_id_existing[0] <= '9'
                    else '') + get_classifier_name(c_file_new)
        om.replace_classifier_merger(new_p,
                                     c_id_existing,
                                     c_id_new,
                                     c_file=c_file_new)
        #new_p.set_sysid(utils.generate_system_id(new_p))
        offspring.append(new_p)

    # Update threshold
    if operation == 2:
        new_p = p.copy()
        sign = 2 * (random.random() > 0.5) - 1
        om.update_threshold(new_p, utils.pick_random_classifier(args, new_p),
                            sign * args.step_th)
        #new_p.set_sysid(utils.generate_system_id(new_p))
        offspring.append(new_p)

    # Add classifier to be merged (Assuming 1 merger)
    if operation == 3:
        merger = p.get_message().merger[0]
        merger_id = merger.id
        merger_number_chains = len(merger.merged_ids)

        if merger_number_chains < 3:
            new_p = p.copy()

            c_file_new = utils.pick_random_classifier(args)
            c_id_new = get_classifier_name(c_file_new)
            c_id_new = str(merger_number_chains) + "_" + c_id_new

            om.add_classifier_to_merger(new_p, merger_id, c_id_new, c_file_new)
            #new_p.set_sysid(utils.generate_system_id(new_p))
            offspring.append(new_p)

    return offspring
]
classifiers_id = ['VGG13', 'ResNeXt29_32x4d', 'VGG11']
thresholds = [0.9, 0.8]

trigger_ids = [
    'trigger_classifier_0.1_VGG13', 'trigger_classifier_0.4_ResNeXt29_32x4d'
]

chain = build_chain(classifiers, classifiers_id, thresholds, trigger_ids,
                    'chain_extend_operation')

# Test automatic id generator
from Examples.compute.chain_genetic_algorithm.utils import generate_system_id
chain_id = generate_system_id(chain)

# Test new extend chain operation
from Source.genetic_algorithm.operations_mutation import extend_merged_chain
c_id_new = 'DenseNet-161'
c_file_new = os.path.join(os.environ['FCM'], 'Definitions', 'Classifiers',
                          'sota_models_cifar10-32-dev_validation',
                          'V001_DenseNet161_ref_0')
extend_merged_chain(chain,
                    classifiers_id[-1],
                    c_id_new,
                    0.9,
                    c_file_new=c_file_new)
chain.set_sysid(generate_system_id(chain))

# Evaluate
R = evaluate(chain, chain.get_start())