def batch_and_learn(i, lock=threading.Lock()): """Thread target for the learning process.""" nonlocal frames, stats timings = prof.Timings() while frames < flags.total_frames: timings.reset() batch, agent_state = get_batch(free_queue, full_queue, buffers, initial_agent_state_buffers, flags, timings) stats = learn(model, learner_model, batch, agent_state, optimizer, scheduler, flags, position_count=position_count, action_hist=action_hist) timings.time('learn') with lock: to_log = dict(frames=frames) to_log.update({k: stats[k] for k in stat_keys}) plogger.log(to_log) frames += T * B if i == 0: log.info('Batch and learn: %s', timings.summary())
def predict_by_batch(model, test_S1, test_S2, test_MS, test_Pan, test_y, batch_size, sensor): ''' Predict batch of test set ''' pred = [] iteration = get_iteration(test_y, batch_size) print(f'Test batchs: {iteration}') start = time.time() if len(sensor) == 3: for batch in range(iteration): batch_s1 = get_batch(test_S1, batch, batch_size) batch_s2 = get_batch(test_S2, batch, batch_size) batch_ms = get_batch(test_MS, batch, batch_size) batch_pan = get_batch(test_Pan, batch, batch_size) _, _, _, batch_pred = model(batch_s1, batch_s2, batch_ms, batch_pan, is_training=False) del batch_s1, batch_s2, batch_ms, batch_pan pred.append(tf.argmax(batch_pred, axis=1)) elif len(sensor) == 2 and 's1' in sensor and 's2' in sensor: for batch in range(iteration): batch_s1 = get_batch(test_S1, batch, batch_size) batch_s2 = get_batch(test_S2, batch, batch_size) _, _, batch_pred = model(batch_s1, batch_s2, is_training=False) del batch_s1, batch_s2 pred.append(tf.argmax(batch_pred, axis=1)) elif len(sensor) == 2 and 's2' in sensor and 'spot' in sensor: for batch in range(iteration): batch_s2 = get_batch(test_S2, batch, batch_size) batch_ms = get_batch(test_MS, batch, batch_size) batch_pan = get_batch(test_Pan, batch, batch_size) _, _, batch_pred = model(batch_s2, batch_ms, batch_pan, is_training=False) del batch_s2, batch_ms, batch_pan pred.append(tf.argmax(batch_pred, axis=1)) elif len(sensor) == 1 and 's1' in sensor: for batch in range(iteration): batch_s1 = get_batch(test_S1, batch, batch_size) batch_pred = model(batch_s1, is_training=False) del batch_s1 pred.append(tf.argmax(batch_pred, axis=1)) elif len(sensor) == 1 and 's2' in sensor: for batch in range(iteration): batch_s2 = get_batch(test_S2, batch, batch_size) batch_pred = model(batch_s2, is_training=False) del batch_s2 pred.append(tf.argmax(batch_pred, axis=1)) elif len(sensor) == 1 and 'spot' in sensor: for batch in range(iteration): batch_ms = get_batch(test_MS, batch, batch_size) batch_pan = get_batch(test_Pan, batch, batch_size) batch_pred = model(batch_ms, batch_pan, is_training=False) del batch_ms, batch_pan pred.append(tf.argmax(batch_pred, axis=1)) stop = time.time() elapsed = stop - start pred = np.hstack(pred) return pred, elapsed
def getEmbedding (model,test_S1,test_S2,test_MS,test_Pan,test_y,batch_size,checkpoint_path,embedding_path,sensor) : ''' Load weights for best configuration and Get Embedding of test set ''' model.load_weights(checkpoint_path) print ('Weights loaded') iteration = get_iteration(test_y,batch_size) print (f'Test batchs: {iteration}') embedding = [] if len(sensor) == 3 : for batch in range(iteration): batch_s1 = get_batch (test_S1,batch,batch_size) batch_s2 = get_batch (test_S2,batch,batch_size) batch_ms = get_batch (test_MS,batch,batch_size) batch_pan = get_batch (test_Pan,batch,batch_size) batch_embedding = model.getEmbedding(batch_s1,batch_s2,batch_ms,batch_pan) del batch_s1,batch_s2,batch_ms,batch_pan embedding.append(batch_embedding) elif len(sensor) == 2 and 's1' in sensor and 's2' in sensor : for batch in range(iteration): batch_s1 = get_batch (test_S1,batch,batch_size) batch_s2 = get_batch (test_S2,batch,batch_size) batch_embedding = model.getEmbedding(batch_s1,batch_s2) del batch_s1,batch_s2 embedding.append(batch_embedding) elif len(sensor) == 2 and 's2' in sensor and 'spot' in sensor : for batch in range(iteration): batch_s2 = get_batch (test_S2,batch,batch_size) batch_ms = get_batch (test_MS,batch,batch_size) batch_pan = get_batch (test_Pan,batch,batch_size) batch_embedding = model.getEmbedding(batch_s2,batch_ms,batch_pan) del batch_s2, batch_ms, batch_pan embedding.append(batch_embedding) elif len(sensor) == 1 and 's1' in sensor : for batch in range(iteration): batch_s1 = get_batch (test_S1,batch,batch_size) batch_embedding = model.getEmbedding(batch_s1) del batch_s1 embedding.append(batch_embedding) elif len(sensor) == 1 and 's2' in sensor : for batch in range(iteration): batch_s2 = get_batch (test_S2,batch,batch_size) batch_embedding = model.getEmbedding(batch_s2) del batch_s2 embedding.append(batch_embedding) elif len(sensor) == 1 and 'spot' in sensor : for batch in range(iteration): batch_ms = get_batch (test_MS,batch,batch_size) batch_pan = get_batch (test_Pan,batch,batch_size) batch_embedding = model.getEmbedding(batch_ms,batch_pan) del batch_ms, batch_pan embedding.append(batch_embedding) embedding = np.vstack(embedding) np.save (embedding_path,embedding)
def run(model, train_S1, train_S2, train_MS, train_Pan, train_y, valid_S1, valid_S2, valid_MS, valid_Pan, valid_y, checkpoint_path, batch_size, lr, n_epochs, sensor, weight): ''' Main function for training models ''' loss_function = tf.keras.losses.SparseCategoricalCrossentropy() optimizer = tf.keras.optimizers.Adam(learning_rate=lr) train_loss = tf.keras.metrics.Mean(name='train_loss') train_acc = tf.keras.metrics.Accuracy(name='train_acc') valid_loss = tf.keras.metrics.Mean(name='valid_loss') valid_acc = tf.keras.metrics.Accuracy(name='valid_acc') best_acc = float("-inf") train_iter = get_iteration(train_y, batch_size) print(f'Training batchs: {train_iter}') valid_iter = get_iteration(valid_y, batch_size) print(f'Validation batchs: {valid_iter}') if len(sensor) == 3: for epoch in range(n_epochs): start = time.time() train_S1, train_S2, train_MS, train_Pan, train_y = shuffle( train_S1, train_S2, train_MS, train_Pan, train_y, random_state=0) for batch in range(train_iter): batch_s1 = get_batch(train_S1, batch, batch_size) batch_s2 = get_batch(train_S2, batch, batch_size) batch_ms = get_batch(train_MS, batch, batch_size) batch_pan = get_batch(train_Pan, batch, batch_size) batch_y = get_batch(train_y, batch, batch_size) train_step(model, batch_s1, batch_s2, batch_ms, batch_pan, batch_y, loss_function, optimizer, train_loss, train_acc, sensor, weight, is_training=True) del batch_s1, batch_s2, batch_ms, batch_pan, batch_y pred = [] for batch in range(valid_iter): batch_s1 = get_batch(valid_S1, batch, batch_size) batch_s2 = get_batch(valid_S2, batch, batch_size) batch_ms = get_batch(valid_MS, batch, batch_size) batch_pan = get_batch(valid_Pan, batch, batch_size) batch_y = get_batch(valid_y, batch, batch_size) batch_pred = train_step(model, batch_s1, batch_s2, batch_ms, batch_pan, batch_y, loss_function, optimizer, valid_loss, valid_acc, sensor, weight, is_training=False) del batch_s1, batch_s2, batch_ms, batch_pan, batch_y pred.append(batch_pred) pred = np.hstack(pred) stop = time.time() elapsed = stop - start best_acc = train_info(model, checkpoint_path, epoch, train_loss, train_acc, valid_loss, valid_acc, elapsed, best_acc, valid_y, pred) elif len(sensor) == 2 and 's1' in sensor and 's2' in sensor: for epoch in range(n_epochs): start = time.time() train_S1, train_S2, train_y = shuffle(train_S1, train_S2, train_y, random_state=0) for batch in range(train_iter): batch_s1 = get_batch(train_S1, batch, batch_size) batch_s2 = get_batch(train_S2, batch, batch_size) batch_y = get_batch(train_y, batch, batch_size) train_step(model, batch_s1, batch_s2, None, None, batch_y, loss_function, optimizer, train_loss, train_acc, sensor, weight, is_training=True) del batch_s1, batch_s2, batch_y pred = [] for batch in range(valid_iter): batch_s1 = get_batch(valid_S1, batch, batch_size) batch_s2 = get_batch(valid_S2, batch, batch_size) batch_y = get_batch(valid_y, batch, batch_size) batch_pred = train_step(model, batch_s1, batch_s2, None, None, batch_y, loss_function, optimizer, valid_loss, valid_acc, sensor, weight, is_training=False) del batch_s1, batch_s2, batch_y pred.append(batch_pred) pred = np.hstack(pred) stop = time.time() elapsed = stop - start best_acc = train_info(model, checkpoint_path, epoch, train_loss, train_acc, valid_loss, valid_acc, elapsed, best_acc, valid_y, pred) elif len(sensor) == 2 and 's2' in sensor and 'spot' in sensor: for epoch in range(n_epochs): start = time.time() train_S2, train_MS, train_Pan, train_y = shuffle(train_S2, train_MS, train_Pan, train_y, random_state=0) for batch in range(train_iter): batch_s2 = get_batch(train_S2, batch, batch_size) batch_ms = get_batch(train_MS, batch, batch_size) batch_pan = get_batch(train_Pan, batch, batch_size) batch_y = get_batch(train_y, batch, batch_size) train_step(model, None, batch_s2, batch_ms, batch_pan, batch_y, loss_function, optimizer, train_loss, train_acc, sensor, weight, is_training=True) del batch_s2, batch_ms, batch_pan, batch_y pred = [] for batch in range(valid_iter): batch_s2 = get_batch(valid_S2, batch, batch_size) batch_ms = get_batch(valid_MS, batch, batch_size) batch_pan = get_batch(valid_Pan, batch, batch_size) batch_y = get_batch(valid_y, batch, batch_size) batch_pred = train_step(model, None, batch_s2, batch_ms, batch_pan, batch_y, loss_function, optimizer, valid_loss, valid_acc, sensor, weight, is_training=False) del batch_s2, batch_ms, batch_pan, batch_y pred.append(batch_pred) pred = np.hstack(pred) stop = time.time() elapsed = stop - start best_acc = train_info(model, checkpoint_path, epoch, train_loss, train_acc, valid_loss, valid_acc, elapsed, best_acc, valid_y, pred) elif len(sensor) == 1 and 's1' in sensor: for epoch in range(n_epochs): start = time.time() train_S1, train_y = shuffle(train_S1, train_y, random_state=0) for batch in range(train_iter): batch_s1 = get_batch(train_S1, batch, batch_size) batch_y = get_batch(train_y, batch, batch_size) train_step(model, batch_s1, None, None, None, batch_y, loss_function, optimizer, train_loss, train_acc, sensor, weight, is_training=True) del batch_s1, batch_y pred = [] for batch in range(valid_iter): batch_s1 = get_batch(valid_S1, batch, batch_size) batch_y = get_batch(valid_y, batch, batch_size) batch_pred = train_step(model, batch_s1, None, None, None, batch_y, loss_function, optimizer, valid_loss, valid_acc, sensor, weight, is_training=False) del batch_s1, batch_y pred.append(batch_pred) pred = np.hstack(pred) stop = time.time() elapsed = stop - start best_acc = train_info(model, checkpoint_path, epoch, train_loss, train_acc, valid_loss, valid_acc, elapsed, best_acc, valid_y, pred) elif len(sensor) == 1 and 's2' in sensor: for epoch in range(n_epochs): start = time.time() train_S2, train_y = shuffle(train_S2, train_y, random_state=0) for batch in range(train_iter): batch_s2 = get_batch(train_S2, batch, batch_size) batch_y = get_batch(train_y, batch, batch_size) train_step(model, None, batch_s2, None, None, batch_y, loss_function, optimizer, train_loss, train_acc, sensor, weight, is_training=True) del batch_s2, batch_y pred = [] for batch in range(valid_iter): batch_s2 = get_batch(valid_S2, batch, batch_size) batch_y = get_batch(valid_y, batch, batch_size) batch_pred = train_step(model, None, batch_s2, None, None, batch_y, loss_function, optimizer, valid_loss, valid_acc, sensor, weight, is_training=False) del batch_s2, batch_y pred.append(batch_pred) pred = np.hstack(pred) stop = time.time() elapsed = stop - start best_acc = train_info(model, checkpoint_path, epoch, train_loss, train_acc, valid_loss, valid_acc, elapsed, best_acc, valid_y, pred) elif len(sensor) == 1 and 'spot' in sensor: for epoch in range(n_epochs): start = time.time() train_MS, train_Pan, train_y = shuffle(train_MS, train_Pan, train_y, random_state=0) for batch in range(train_iter): batch_ms = get_batch(train_MS, batch, batch_size) batch_pan = get_batch(train_Pan, batch, batch_size) batch_y = get_batch(train_y, batch, batch_size) train_step(model, None, None, batch_ms, batch_pan, batch_y, loss_function, optimizer, train_loss, train_acc, sensor, weight, is_training=True) del batch_ms, batch_pan, batch_y pred = [] for batch in range(valid_iter): batch_ms = get_batch(valid_MS, batch, batch_size) batch_pan = get_batch(valid_Pan, batch, batch_size) batch_y = get_batch(valid_y, batch, batch_size) batch_pred = train_step(model, None, None, batch_ms, batch_pan, batch_y, loss_function, optimizer, valid_loss, valid_acc, sensor, weight, is_training=False) del batch_ms, batch_pan, batch_y pred.append(batch_pred) pred = np.hstack(pred) stop = time.time() elapsed = stop - start best_acc = train_info(model, checkpoint_path, epoch, train_loss, train_acc, valid_loss, valid_acc, elapsed, best_acc, valid_y, pred)