Beispiel #1
0
# im = 255 - imread('data/images/ali-cropped.jpg', mode='I')
heatmap = utils.image_to_square_greyscale_array(im)

seed = 1337
np.random.seed(seed)
train_size = 64_000 * 2

# data_points = np.random.normal(size=(train_size, 3))
# # l2 normalize the points
# data_points /= np.linalg.norm(data_points, axis=1, ord=2).reshape((-1, 1))
input_noise_fn = lambda size: np.random.uniform(size=(size, 100))  # NOQA
data_points = input_noise_fn(train_size)

targets = noise_as_targets.sample_from_heatmap(
    heatmap,
    train_size,
    sampling_method='even',
)

# batching_function = batching_functions.progressive_local_search(targets)
batching_function = batching_functions.random_batching(targets)

config = {
    'dataset_fn':
    lambda: (data_points, targets),
    'model_fn':
    lambda input_t, output_size: models.multi_layer_mlp(
        input_t, output_size, hidden_dims=[512, 512], activation_fn=tf.tanh),
    'batch_size':
    128,
    'batching_fn':
Beispiel #2
0
seed = 1337
np.random.seed(seed)

dataset = input_data.read_data_sets("data/MNIST/",
                                    one_hot=False,
                                    reshape=False)
data_points = np.concatenate(
    [x.images for x in [dataset.train, dataset.validation, dataset.test]])
batch_size = 128
data_points = data_points.reshape(
    (len(data_points), -1))[:-(len(data_points) % batch_size)]
np.random.shuffle(data_points)

targets = noise_as_targets.sample_from_heatmap(
    heatmap,
    len(data_points),
    sampling_method='even',
)

batching_function = batching_functions.random_batching(targets)
# batching_function = batching_functions.progressive_local_search(targets)

config = {
    'dataset_fn':
    lambda: (data_points, targets),
    'model_fn':
    lambda input_t, output_size: models.multi_layer_mlp(
        input_t, output_size, hidden_dims=[128, 128
                                           ], activation_fn=tf.sigmoid),
    'batch_size':
    batch_size,