def random_conv_layer_plus(size, num, prob=0.8): def if_cond(switch, size, num): if switch > prob: return [[size, 1, num], [1, size, num]] return [[size, size, num]] return spec.merge([spec.uniform(), size, num], if_cond)
def uniform_snap(start, end): node = spec.uniform(start, end) def snap_to_ends(value): delta = (end - start) / 10.0 if abs(value - start) < delta: return start if abs(value - end) < delta: return end return value return spec.wrap(node, snap_to_ends)
return [[size, size, num]] def random_conv_layer_plus(size, num, prob=0.8): def if_cond(switch, size, num): if switch > prob: return [[size, 1, num], [1, size, num]] return [[size, size, num]] return spec.merge([spec.uniform(), size, num], if_cond) activations = ['relu', 'relu6', 'elu', 'prelu', 'leaky_relu'] hyper_params_spec = { 'init_stdev': spec.uniform(0.04, 0.06), 'augment': { 'crop_size': spec.choice(range(2)), }, 'optimizer': { 'learning_rate': 10**spec.uniform(-2.5, -3.5), 'beta1': 0.9, 'beta2': 0.999, 'epsilon': 1e-8, }, 'conv': { 'layers_num': 3, 1: { 'filters': random_conv_layer(size=spec.choice(range(3, 8)), num=spec.choice(range(24, 41))),
return [[size, 1, num], [1, size, num]] def random_conv_layer_plus(size, num, prob=0.8): def if_cond(switch, size, num): if switch > prob: return [[size, 1, num], [1, size, num]] return [[size, size, num]] return spec.merge([spec.uniform(), size, num], if_cond) activations = ['relu', 'relu6', 'elu', 'prelu', 'leaky_relu'] hyper_params_spec_2_0 = { 'init_stdev': 10**spec.uniform(-1.5, -1), 'optimizer': { 'learning_rate': 10**spec.uniform(-3.2, -3), 'beta1': 0.9, 'beta2': 0.999, 'epsilon': 1e-8, }, 'conv': { 'layers_num': 3, 1: { 'filters': random_conv_layer_plus(size=spec.choice(range(3, 8)), num=spec.choice(range(24, 41))), 'pools': [2, 2], 'activation': spec.choice(activations),