def main():
    random_seed = af.get_random_seed()
    af.set_random_seeds()
    print('Random Seed: {}'.format(random_seed))
    device = af.get_pytorch_device()
    models_path = 'networks/{}'.format(af.get_random_seed())
    af.create_path(models_path)
    af.set_logger('outputs/train_models')  # .format(af.get_random_seed()))

    train_models(models_path, device)
def main():
    torch.manual_seed(af.get_random_seed())    # reproducible
    np.random.seed(af.get_random_seed())
    device = af.get_pytorch_device()
    trained_models_path = 'networks/{}'.format(af.get_random_seed())
    
    wasteful_overthinking_experiment(trained_models_path, device)
    get_simple_complex(trained_models_path, device)

    destructive_overthinking_experiment(trained_models_path, device)
    get_destructive_overthinking_samples(trained_models_path, device)
예제 #3
0
def main(mode, load):
    random_seed = af.get_random_seed()
    models_path = 'networks/{}'.format(random_seed)
    device = af.get_pytorch_device()
    create_params = [
        # type, training, (prune?, keep_ratio for ics, batch size)
        # ('dense', '0', (True, [0.75, 0.66, 0.58, 0.46], 128), [0, 0, 1, 0, 0, 1, 0, 1, 0])
        ('dense', '0', (False, [0.66, 0.46, 0.36, 0.36], 128), [1, 1, 1]),
        ('dense', '0', (False, [0.66, 0.46, 0.36, 0.36], 128), [1, 1, 1]),
        ('dense', '0', (False, [0.66, 0.46, 0.36, 0.36], 128), [1, 1, 1]),
        ('dense', '0', (False, [0.66, 0.46, 0.36, 0.36], 128), [1, 1, 1]),
        ('dense', '0', (False, [0.66, 0.46, 0.36, 0.36], 128), [1, 1, 1]),
    ]
    create_bool = [1 if True else 0 for i in range(len(create_params))]
    if load is not None:
        model, param = arcs.load_model(models_path, load, -1)
        arr = [(model, param)]
    else:
        arr = list(
            multi_experiments(models_path, zip(create_params, create_bool),
                              device))
    #af.print_acc(arr, groups=[5], extend=True)
    af.print_acc(arr, extend=True)
    #af.print_acc(arr, extend=False)
    #af.plot_acc([m[1] for m in arr])
    #print the numbers of paramters
    for m in [t[0] for t in arr]:
        print("")
        print("flops: {}".format(af.calculate_flops(m, (3, 32, 32))))
    for m, p in arr:
        arcs.save_model(m, p, models_path, p['name'], -1)
    print("model: {}".format(arr[0][0]))
예제 #4
0
def main(confusion, model_name):
    random_seed = af.get_random_seed()
    model = None
    models_path = 'networks/{}'.format(random_seed)
    device = af.get_pytorch_device()

    if model_name == "":
        # af.create_path(models_path)
        # af.set_logger('outputs/train_models')

        model, dataset = train_model(models_path, device)
    else:
        model, model_params = arcs.load_model(models_path,
                                              model_name,
                                              epoch=-1)

    if confusion:
        confusion, confusion_correct = af.calculate_confusion(
            model, 'cifar10', device)
        confusion_df = pd.DataFrame(confusion)
        correct_df = pd.DataFrame(confusion_correct)
        confusion_df.to_csv("confusion.csv")
        correct_df.to_csv("confusion_correct.csv")
예제 #5
0
import aux_funcs as af
import network_architectures as arcs


def train_model(models_path, params, device):
    res56_model, res56_params = arcs.create_resnet56(models_path,
                                                     'cifar10',
                                                     'd',
                                                     return_model=True)
    dense_model, dense_params = arcs.create_dense_iterative(
        models_path, params)


if __name__ == "__main__":
    random_seed = af.get_random_seed()
    models_path = 'networks/{}'.format(random_seed)
    device = af.get_pytorch_device()
    create_params = [
        # keep_ratio, min_ratio, pruning mode
        ([0.46, 0.46, 0.46, 0.46], [0.1, 0.1, 0.1, 0.1], "2")
    ]
    arr = [train_model(model_path, param, device) for param in create_params]
        cam_mask[i, :, :] = grad_cam_mask

    cam_gb = np.multiply(cam_mask, guided_backprop_mask)
    return cam_gb


if __name__ == '__main__':
    images_path = 'only_first'
    output_file_path = 'gradcam_output'
    af.create_path(output_file_path)

    # load the model
    target_layers = ['12,0']
    output_id = -1; save_name = 'final'

    models_path = 'networks/{}'.format(af.get_random_seed())
    sdn_name = 'tinyimagenet_vgg16bn_sdn_ic_only'
    sdn_model, sdn_params = arcs.load_model(models_path, sdn_name, epoch=-1)
    sdn_model.eval()

    # uncomment this line to visualize the first internal classifier
    #output_id = 0; save_name = 'first'; sdn_model, sdn_params = af.sdn_prune(models_path, sdn_name, prune_after_output=output_id); target_layers = ['1,0']
    
    converted_cnn, converted_cnn_params = af.sdn_to_cnn(None, None, epoch=-1, preloaded=(sdn_model, sdn_params))

    for file_id, file_name in enumerate(sorted(os.listdir(images_path))):
        input_img_path = images_path+'/'+file_name
        print('Image: {}'.format(input_img_path))

        img = cv2.imread(input_img_path, 1)
        img = np.float32(img) / 255