Exemplo n.º 1
0
            "global_batch_size": det.Constant(value=32),
            "dense1": det.Constant(value=128),
        },
        "searcher": {"name": "single", "metric": "val_accuracy", "max_steps": 40},
    }
    config.update(json.loads(args.config))

    context = init(config, local=args.local, test=args.test, context_dir=str(pathlib.Path.cwd()))

    train_images, train_labels = data.load_training_data()
    train_images = train_images / 255.0
    train_data = _ArrayLikeAdapter(
        x=train_images, y=train_labels, batch_size=context.get_per_slot_batch_size()
    )

    test_images, test_labels = data.load_validation_data()
    test_images = test_images / 255.0
    test_data = _ArrayLikeAdapter(
        x=test_images, y=test_labels, batch_size=context.get_per_slot_batch_size()
    )

    model = build_model(context)

    if args.use_fit:
        model.fit(
            x=train_images,
            y=train_labels,
            batch_size=context.get_per_slot_batch_size(),
            validation_data=test_data,
            use_multiprocessing=False,
            workers=1,
Exemplo n.º 2
0
    def build_validation_data_loader(self) -> InputData:
        test_images, test_labels = data.load_validation_data()
        test_images = test_images / 255.0

        return test_images, test_labels
Exemplo n.º 3
0
import numpy as np
import pickle

from evaluation import load_model, process_data, run
from data import load_validation_data, pair_superset


# Load Model and Training Data
model = load_model()

(x_val, y_val) = load_validation_data()

# Save Predictions
predictions = []

# create test positive and negative pairs
te_pairs, te_y = pair_superset(x_val, y_val)


for i in range(len(te_pairs[:1000])):
    print(f'Running Test {i}')

    data = process_data(te_pairs[i][0], te_pairs[i][1])
 
    # print(f'data Shape : {data.shape}')

    result = run(data, model)
    predictions.append(result)