def test_entered_non_empty_batcher(): """Test appending to a non-empty DataBatcher.""" data_batcher = DataBatcher() data_batcher.entered(datetime_range(1, 3)) data_batcher.entered(datetime_range(4, 6)) batch = data_batcher.batch() assert batch.entering == datetime_range(1, 6)
def test_left_non_empty_batcher(): """Test appending to a non-empty DataBatcher.""" data_batcher = DataBatcher() data_batcher.left(datetime_range(1, 3)) data_batcher.left(datetime_range(4, 6)) batch = data_batcher.batch() assert batch.leaving == datetime_range(1, 6)
def test_entered_batch_entered(): """Test appending after a batch.""" data_batcher = DataBatcher() data_batcher.entered(datetime_range(1, 3)) data_batcher.batch() data_batcher.entered(datetime_range(4, 6)) batch = data_batcher.batch() assert batch.entering == datetime_range(4, 6)
def test_entered_list_with_non_float(): """Test exception raising on invalid list elements.""" data_batcher = DataBatcher() invalid_list = [1, 2, 'a'] with pytest.raises(TypeError): data_batcher.entered(invalid_list) batch = data_batcher.batch() assert not batch.entering
def test_entered_non_float(): """Test exception raising on non-list arguments.""" data_batcher = DataBatcher() invalid_arg = 100 with pytest.raises(TypeError): data_batcher.entered(invalid_arg) batch = data_batcher.batch() assert not batch.entering
def test_entered_empty_batcher(sample_timestamps): """Test appending to an empty DataBatcher. Args: sample_timestamps: samples for the correct argument """ data_batcher = DataBatcher() data_batcher.entered(sample_timestamps) batch = data_batcher.batch() assert batch.entering == sample_timestamps
def __init__(self): """Initialize signal handlers, API client and run.""" self._should_close = False self._frame_ingestor = FrameIngestor() self._batcher = DataBatcher() # Add signal handlers signal.signal(signal.SIGINT, self._handle_signals) signal.signal(signal.SIGTERM, self._handle_signals) # Extra configuration self._uses_file = False self._display_frame = read_int_from_env('DISPLAY_FRAME', 0) > 0 self._max_frame_width = read_int_from_env( 'MAX_FRAME_WIDTH', DEFAULT_FRAME_WIDTH, ) # Start client self._start()
from model import Model from log_reg_model import LogisticRegressionModel from soft_reg_model import SoftmaxRegressionModel from data_batcher import DataBatcher import matplotlib.pyplot as plt import seaborn as sns import numpy as np import tensorflow as tf epochs = 600 batch_size = 20 batcher = DataBatcher("../normalizedData/trainingSetX.npy", "../normalizedData/trainingSetY.npy", "../normalizedData/testSetX.npy", "../normalizedData/testSetY.npy") model = Model([40]) log_model = LogisticRegressionModel() soft_model = SoftmaxRegressionModel() plot_x = list() plot_y = list() plot_y_log = list() plot_y_soft = list() with tf.Session() as session: session.run(tf.global_variables_initializer()) epoch_index = 0 while epoch_index < epochs: samples, labels = batcher.get_batch(batch_size) model.train_model(session, samples, labels) log_model.train_model(session, samples, labels)
print( f'========================= {opt_name} =========================' ) for learning_rate in learning_rates: print( f'------------------------- {learning_rate} -------------------------' ) batch_size = 100 iterations = 2000 metrics = defaultdict(list) data_path = f'data/processed_data/{data_type}_movement/sliding_window_{version}_sec.p' batcher = DataBatcher(data_path) length, dimensions, n_labels = batcher.get_info() for nc in (4, 8): print( f'_________________________ {nc} Cells _________________________' ) for cs in (16, 32): print( f'......................... {cs} size .........................' ) name = f'rnn/{opt_name}/{data_type}_movement/{str(learning_rate).replace(".", "-")}/{nc}_cells/{cs}_size/{version}_sec' metric_path = os.path.join(*name.split('/')[:-1])
import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' from data_batcher import DataBatcher from vae_model import VAEModel from time import time import numpy as np import tensorflow as tf print("Finding training data...") batcher = DataBatcher("generated_data") print("Building model...") model = VAEModel(50, [40, 35, 30]) batch_size = 5000 training_steps = 200000 print("Starting training...") with tf.Session() as session: session.run(tf.global_variables_initializer()) for i in range(training_steps): batch, epoch_complete = batcher.get_batch(batch_size) model.train_model(session, inputs=batch) if epoch_complete: test_batch = batcher.get_test_batch() loss = model.get_loss(session, inputs=test_batch) print("Epoch complete - loss: {}".format(loss)) if i % 500 == 0: test_batch = batcher.get_test_batch() loss = model.get_loss(session, inputs=test_batch) print("Step {} - loss: {}".format(i, loss))
import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' from resnet import ResNet from data_batcher import DataBatcher from time import time import tensorflow as tf model = ResNet(1) batcher = DataBatcher("cifar") saver = tf.train.Saver() epochs = 1000 batch_size = 512 with tf.Session() as session: print("Beginning training...") session.run(tf.global_variables_initializer()) epoch_index = 0 accuracy_data = [] train_accuracy_data = [] # step_index = 0 epoch_start_time = time() while True: if batcher.epoch_finished(): accuracy = 0 image_batches, label_batches = batcher.get_test_batches(50) for i in range(len(image_batches)): accuracy += model.get_accuracy(session, image_batches[i], label_batches[i]) accuracy /= len(image_batches)
def test_empty_batch(): """Test batching on empty DataBatcher.""" data_batcher = DataBatcher() batch = data_batcher.batch() assert not batch.entering assert not batch.leaving
from data_batcher import DataBatcher from nlp_model import NLPModel import tensorflow as tf batcher = DataBatcher("data/command_data_noisy.json") model = NLPModel((batcher.max_sentence_len, batcher.total_words), batcher.total_classes, [3,4,5]) saver = tf.train.Saver() with tf.Session() as session: session.run(tf.global_variables_initializer()) ckpt = tf.train.get_checkpoint_state("checkpoints/") if ckpt and ckpt.model_checkpoint_path: saver.restore(session, ckpt.model_checkpoint_path) else: print("Failed loading NLP model!") print(batcher.key_map) while True: command = input("› ") command_processed = batcher.preprocess_string(command) tensor = [batcher.sentence_to_tensor(command_processed)] prediction = model.predict(session, tensor) print("\tPredicted %s" % batcher.index_key_map[prediction])