def get_custom_activations_dict(filepath=None): """ Import all implemented custom activation functions so they can be used when loading a Keras model. Parameters ---------- filepath : Optional[str] Path to json file containing additional custom objects. """ from snntoolbox.utils.utils import binary_sigmoid, binary_tanh, \ ClampedReLU # Todo: We should be able to load a different activation for each layer. # Need to remove this hack: activation_str = 'relu_Q1.4' activation = get_quantized_activation_function_from_string(activation_str) return {'binary_sigmoid': binary_sigmoid, 'binary_tanh': binary_tanh, # Todo: This should work regardless of the specific attributes of # the ClampedReLU class used during training. 'clamped_relu': ClampedReLU(), activation_str: activation, 'precision': precision, 'activity_regularizer': keras.regularizers.l1}
def get_clamped_relu_from_string(activation_str): from snntoolbox.utils.utils import ClampedReLU threshold, max_value = map(eval, activation_str.split('_')[-2:]) activation = ClampedReLU(threshold, max_value) return activation
def get_custom_activations_dict(): """ Import all implemented custom activation functions so they can be used when loading a Keras model. """ from snntoolbox.utils.utils import binary_sigmoid, binary_tanh, ClampedReLU # Todo: We should be able to load a different activation for each layer. # Need to remove this hack: activation_str = 'relu_Q1.4' activation = get_quantized_activation_function_from_string(activation_str) return {'binary_sigmoid': binary_sigmoid, 'binary_tanh': binary_tanh, 'clamped_relu': ClampedReLU(), # Todo: This should work regardless of the specific attributes of the ClampedReLU class used during training. activation_str: activation}
def get_custom_activations_dict(filepath=None): """ Import all implemented custom activation functions so they can be used when loading a Keras model. Parameters ---------- filepath : Optional[str] Path to json file containing additional custom objects. """ from snntoolbox.utils.utils import binary_sigmoid, binary_tanh, \ ClampedReLU, LimitedReLU, NoisySoftplus import keras_metrics as km # Todo: We should be able to load a different activation for each layer. # Need to remove this hack: activation_str = 'relu_Q1.4' activation = get_quantized_activation_function_from_string(activation_str) custom_objects = { 'binary_sigmoid': binary_sigmoid, 'binary_tanh': binary_tanh, # Todo: This should work regardless of the specific attributes of the # ClampedReLU class used during training. 'clamped_relu': ClampedReLU(), 'LimitedReLU': LimitedReLU, 'relu6': LimitedReLU({'max_value': 6}), activation_str: activation, 'Noisy_Softplus': NoisySoftplus, 'precision': precision, 'binary_precision': km.binary_precision(label=0), 'binary_recall': km.binary_recall(label=0), 'activity_regularizer': keras.regularizers.l1} if filepath is not None and filepath != '': with open(filepath) as f: kwargs = json.load(f) for key in kwargs: if 'LimitedReLU' in key: custom_objects[key] = LimitedReLU(kwargs[key]) return custom_objects
# model_list[-2]), compile=False) # # Experiment 3 # label = 'relu_0.1_1_bias_regularizer5' # nonlinearity = ClampedReLU(0.1, 1.0) # bias_regularizer = [l2(0.05), l2(0.9), l2(0.05), l2(0.5), l2(0.5), l2(0.5), # l2(0.5), l2(0.01), l2(0.01)] # model_list = os.listdir(os.path.join(tensorboard_path, 'relu_0.1_1')) # print("Initializing with model {}.".format(model_list[-2])) # model_init = load_model(os.path.join( # tensorboard_path, 'relu_0.1_1', model_list[-2]), # {'clamped_relu_0.1_1.0': nonlinearity}, False) # Experiment 4 label = 'relu_0.1_1_bias_regularizer7' nonlinearity = ClampedReLU(0.1, 1.0) bias_regularizer = [l2(0.01), l2(2.0), l2(0.05), l2(3.0), l2(0.5), l2(1.0), l2(1.0), l2(0.001), l2(0.01)] model_list = os.listdir(os.path.join(tensorboard_path, 'relu_0.1_1_bias_regularizer6')) print("Initializing with model {}.".format(model_list[-2])) model_init = load_model(os.path.join( tensorboard_path, 'relu_0.1_1_bias_regularizer6', model_list[-2]), {'clamped_relu_0.1_1.0': nonlinearity}, False) model = Sequential() model.add(Conv2D(128, (3, 3), padding='same', activation=nonlinearity, input_shape=(3, 32, 32), bias_regularizer=bias_regularizer[0])) model.add(Conv2D(128, (3, 3), padding='same', activation=nonlinearity, bias_regularizer=bias_regularizer[1]))
from keras.preprocessing.image import ImageDataGenerator from keras.callbacks import ModelCheckpoint, TensorBoard from keras.optimizers import adam from keras.regularizers import l2 from snntoolbox.utils.utils import ClampedReLU path = '/home/rbodo/.snntoolbox/data/cifar10/binaryconnect' dataset_path = '/home/rbodo/.snntoolbox/Datasets/cifar10/pylearn2_gcn_whitened' tensorboard_path = os.path.join(path, 'training') batch_size = 64 nb_epoch = 50 # Experiment 1 label = 'relu_schedule_bias_reg' nonlinearity = [ClampedReLU(1 / (l + 1), 1 / l) for l in range(1, 9)] bias_regularizer = [ l2(0.05), l2(0.9), l2(0.05), l2(0.5), l2(0.5), l2(0.5), l2(0.5), l2(0.01), l2(0.01) ] model_list = os.listdir( os.path.join(tensorboard_path, 'relu_0.1_1_bias_regularizer3')) print("Initializing with model {}.".format(model_list[-2])) print(dict({(n.__name__, n) for n in nonlinearity}))