def train_fit(sys, id_trigger): trigger = sys.get(id_trigger) data_id = trigger.classifier.data_id data = sys.get(data_id) # Test test = __get_trigger_raw_data(data, "test") # Train train = __get_trigger_raw_data(data, "train") # Validation val = None if data.source.HasField('val_path'): val = __get_trigger_raw_data(data, "val") # obtain metrics metrics = make.make_performance_metrics(**{ 'time': __time(), 'ops': __ops(), 'params': __parameters() }) # Create dict classifier_trigger_dict = make.make_classifier_dict("trigger_classifier", data_id, train, test, metrics, val_data=val) return classifier_trigger_dict
def build_train_trigger(model1_dict, th): classifier_trigger_dict = {} performance = make.make_performance_metrics(**{}) # Train dict L = model1_dict['train']['logits'] dividend = np.sum(np.exp(L), axis=1) P = np.exp(L) / dividend[:, None] sort = np.sort(P, axis=1) # Sort class probabilities diff = sort[:, -1] - sort[:, -2] # Difference logits_trigger = np.empty((diff.shape[0], 2)) logits_trigger[:, 0] = diff < th logits_trigger[:, 1] = diff >= th pred_model1 = np.argmax(L, axis=1) gt_model1 = model1_dict['train']['gt'] train = make.make_classifier_raw_data(logits_trigger, (pred_model1 == gt_model1), np.copy(model1_dict['train']['id'])) # Test dict L = model1_dict['test']['logits'] dividend = np.sum(np.exp(L), axis=1) P = np.exp(L) / dividend[:, None] sort = np.sort(P, axis=1) # Sort class probabilities diff = sort[:, -1] - sort[:, -2] # Difference logits_trigger = np.empty((diff.shape[0], 2)) logits_trigger[:, 0] = diff < th logits_trigger[:, 1] = diff >= th pred_model1 = np.argmax(L, axis=1) gt_model1 = model1_dict['test']['gt'] test = make.make_classifier_raw_data(logits_trigger, (pred_model1 == gt_model1), np.copy(model1_dict['test']['id'])) classifier_trigger_dict = make.make_classifier_dict( "trigger_classifier", "cifar10", train, test, performance) io.save_pickle( '../../Definitions/Classifiers/tmp/trigger_random_threshold.pkl', classifier_trigger_dict) classifier = make.make_classifier( "trigger_classifier", "../../Definitions/Classifiers/tmp/trigger_random_threshold.pkl") return classifier
def build_train_trigger2(model1_dict, th): classifier_trigger_dict = {} performance = make.make_performance_metrics(**{}) # Train dict L = model1_dict['train']['logits'] dividend = np.sum(np.exp(L), axis=1) P = np.exp(L) / dividend[:, None] max_P = np.max(P, axis=1) logits_trigger = np.empty((max_P.shape[0], 2)) logits_trigger[:, 0] = max_P < th logits_trigger[:, 1] = max_P >= th pred_model1 = np.argmax(L, axis=1) gt_model1 = model1_dict['train']['gt'] train = make.make_classifier_raw_data(logits_trigger, (pred_model1 == gt_model1), np.copy(model1_dict['train']['id'])) # Test dict L = model1_dict['test']['logits'] dividend = np.sum(np.exp(L), axis=1) P = np.exp(L) / dividend[:, None] max_P = np.max(P, axis=1) logits_trigger = np.empty((max_P.shape[0], 2)) logits_trigger[:, 0] = max_P < th logits_trigger[:, 1] = max_P >= th pred_model1 = np.argmax(L, axis=1) gt_model1 = model1_dict['test']['gt'] test = make.make_classifier_raw_data(logits_trigger, (pred_model1 == gt_model1), np.copy(model1_dict['test']['id'])) classifier_trigger_dict = make.make_classifier_dict( "trigger_classifier", "cifar10", train, test, performance) io.save_pickle( '../../Definitions/Classifiers/tmp/trigger_random_threshold', classifier_trigger_dict) classifier = make.make_classifier( "trigger_classifier", "../../Definitions/Classifiers/tmp/trigger_random_threshold") return classifier
def build_train_trigger3(model1_dict, p): classifier_trigger_dict = {} performance = make.make_performance_metrics(**{}) # Train dict L = model1_dict['train']['logits'] logits_trigger = np.empty((L.shape[0], 2)) logits_trigger[:, 0] = np.random.binomial(1, p, L.shape[0]) logits_trigger[:, 1] = 1 - logits_trigger[:, 0] pred_model1 = np.argmax(L, axis=1) gt_model1 = model1_dict['train']['gt'] train = make.make_classifier_raw_data(logits_trigger, (pred_model1 == gt_model1), np.copy(model1_dict['train']['id'])) # Test dict L = model1_dict['test']['logits'] logits_trigger = np.empty((L.shape[0], 2)) logits_trigger[:, 0] = np.random.binomial(1, p, L.shape[0]) logits_trigger[:, 1] = 1 - logits_trigger[:, 0] pred_model1 = np.argmax(L, axis=1) gt_model1 = model1_dict['test']['gt'] test = make.make_classifier_raw_data(logits_trigger, (pred_model1 == gt_model1), np.copy(model1_dict['test']['id'])) classifier_trigger_dict = make.make_classifier_dict( "trigger_classifier", "cifar10", train, test, performance) io.save_pickle( '../../Definitions/Classifiers/tmp/trigger_random_threshold', classifier_trigger_dict) classifier = make.make_classifier( "trigger_classifier", "../../Definitions/Classifiers/tmp/trigger_random_threshold") return classifier
def fill_classifier(eval, classifier_dict, contribution, contribution_train, contribution_val): metrics = make.make_performance_metrics(**{"time": eval.test['system'].time / len(contribution['gt'].keys()) * 128, "ops": eval.test['system'].ops / len(contribution['gt'].keys()), "params": eval.test['system'].params}) # Test raw data test_raw = make.make_classifier_raw_data([], [], []) if contribution is not None: keys_test = [key for key in contribution['logits'].keys()] logits = [contribution['logits'][key] for key in keys_test] gt = [contribution['gt'][key] for key in keys_test] test_raw = make.make_classifier_raw_data(logits, gt, keys_test) classifier_dict['test']['time_instance'] = \ np.array([contribution['time_instance'][key] for key in keys_test]) # Train raw data train_raw = make.make_classifier_raw_data([], [], []) if contribution_train is not None: keys_train = [key for key in contribution_train['logits'].keys()] logits = [contribution_train['logits'][key] for key in keys_train] gt = [contribution_train['gt'][key] for key in keys_train] train_raw = make.make_classifier_raw_data(logits, gt, keys_train) classifier_dict['train']['time_instance'] = np.array( [contribution_train['time_instance'][key] for key in keys_train]) # Validation raw data val_raw = make.make_classifier_raw_data([], [], []) if contribution_val is not None: keys_val = [key for key in contribution_val['logits'].keys()] logits = [contribution_val['logits'][key] for key in keys_val] gt = [contribution_val['gt'][key] for key in keys_val] val_raw = make.make_classifier_raw_data(logits, gt, keys_val) classifier_dict['val']['time_instance'] = np.array( [contribution_val['time_instance'][key] for key in keys_val]) # Fill the dict classifier_dict.update(make.make_classifier_dict(classifier_dict['name'], "", train_raw, test_raw, metrics, val_data=val_raw))