Exemplo n.º 1
0
 def __iter__(self):
     worker_info = torch.utils.data.get_worker_info()
     if worker_info is None:  # single-process data loading, return the full iterator
         seed = self.base_seed
     else:  # in a worker process
         # split workload
         worker_id = worker_info.id
         seed = self.base_seed + worker_id
     for delta, prev, stop in generate_inf_cases(
             True, seed, return_one_but_last=True):
         yield (process_board(prev), process_board(stop))
Exemplo n.º 2
0
def train_loop(model_name, learner, early_stop_window=100, rseed=9342184):
    errors = []
    latencies = []
    best_mean_err = 1.0
    best_i = -1
    for i, (delta, start, stop) in enumerate(generate_inf_cases(True, rseed)):
        tic = time.perf_counter()
        A = learner.predict(delta, stop)
        toc = time.perf_counter()

        err = 1 - score(delta, A, stop)
        errors.append(err)

        latency = toc - tic
        latencies.append(latency)

        mean_err = np.mean(errors)
        mean_latency = np.mean(latencies)

        print(
            f'Error: mean {mean_err}, cur {err}; latency: mean {mean_latency:0.4f}s, cur {latency:0.4f}; delta {delta}, density: {np.mean(stop)}'
        )

        if mean_err < best_mean_err:
            best_mean_err = mean_err
            best_i = i
            file_path = f'{model_name}_{i:05}'
            print(f'    Best model - saving {file_path}...')
            learner.save_model(file_path)
        elif i - best_i > early_stop_window:
            print(
                f"Haven't found a better model for more than {early_stop_window} iterations - terminating early."
            )
            print(f"Best iteration: {best_i}, mean error: {best_mean_err}")
            break

        learner.train(delta, start, stop)
Exemplo n.º 3
0
def cnnify_batch(batches):
    return (np.expand_dims(batch, 1) for batch in batches)

val_set = bitmap.generate_test_set(set_size=100, seed=9568382)
deltas_val, stops_val = cnnify_batch(zip(*val_set))
ones_val = np.ones_like(deltas_val)

multi_step_errors = []
one_step_errors = []
best_multi_step_error = 1.0
best_multi_step_idx = -1
best_one_step_error = 1.0
best_one_step_idx = -1

for i, batch in tqdm(enumerate(grouper(bitmap.generate_inf_cases(True, 432341, return_one_but_last=True), 2048))):
    deltas, one_but_lasts, stops = zip(*batch)

    deltas_batch = np.expand_dims(deltas, 1)
    one_but_lasts_batch = torch.Tensor(np.expand_dims(one_but_lasts, 1))
    stops_batch = torch.Tensor(np.expand_dims(stops, 1))

    if i % 10 == 0:
        multi_step_pred_batch = predict(deltas_val, stops_val)
        multi_step_mean_err = 1 - np.mean(scoring.score_batch(deltas_val, np.array(multi_step_pred_batch, dtype=np.bool), stops_val))

        one_step_pred_batch = net(torch.Tensor(stops_val)) > 0.5
        one_step_mean_err = 1 - np.mean(scoring.score_batch(ones_val, np.array(one_step_pred_batch, dtype=np.bool), stops_val))
        print(f'Mean error: multi-step {multi_step_mean_err}, one step {one_step_mean_err}')

        #if multi_step_mean_err < best_multi_step_error:
Exemplo n.º 4
0

val_set = bitmap.generate_test_set(set_size=10000, seed=9568382)
deltas_val, stops_val = cnnify_batch(zip(*val_set))
ones_val = np.ones_like(deltas_val)

multi_step_errors = []
one_step_errors = []
best_multi_step_error = 1.0
best_multi_step_idx = -1
best_one_step_error = 1.0
best_one_step_idx = -1

for i, batch in enumerate(
        grouper(
            bitmap.generate_inf_cases(True, 432341, return_one_but_last=True),
            2048)):
    deltas, one_but_lasts, stops = zip(*batch)

    deltas_batch = np.expand_dims(deltas, -1)
    one_but_lasts_batch = np.expand_dims(one_but_lasts, -1)
    stops_batch = np.expand_dims(stops, -1)

    if i % 10 == 0:
        multi_step_pred_batch = predict(deltas_val, stops_val)
        multi_step_mean_err = 1 - np.mean(
            scoring.score_batch(deltas_val, multi_step_pred_batch, stops_val))

        one_step_pred_batch = model.predict(stops_val) > 0.5
        one_step_mean_err = 1 - np.mean(
            scoring.score_batch(ones_val, one_step_pred_batch, stops_val))