def test_predict(): tempfile_path = os.path.join(gettempdir(), 'test_predict.npz') # set up mock datastream source = [[1], [2], [3], [4]] dataset = IndexableDataset(OrderedDict([('input', source)])) scheme = SequentialScheme(dataset.num_examples, batch_size=2) data_stream = DataStream(dataset, iteration_scheme=scheme) # simulate small "network" that increments the input by 1 input_tensor = tensor.matrix('input') output_tensor = input_tensor + 1 output_tensor.name = 'output_tensor' main_loop = MockMainLoop(extensions=[ PredictDataStream(data_stream=data_stream, variables=[output_tensor], path=tempfile_path, after_training=True), FinishAfter(after_n_epochs=1) ]) main_loop.run() # assert resulting prediction is saved prediction = numpy.load(tempfile_path) assert numpy.all(prediction[output_tensor.name] == numpy.array(source) + 1) try: os.remove(tempfile_path) except: pass
def test_save_the_best(): with NamedTemporaryFile(dir=config.temp_dir) as dst,\ NamedTemporaryFile(dir=config.temp_dir) as dst_best: track_cost = TrackTheBest("cost", after_epoch=False, after_batch=True) main_loop = MockMainLoop( extensions=[FinishAfter(after_n_epochs=1), WriteCostExtension(), track_cost, Checkpoint(dst.name, after_batch=True, save_separately=['log']) .add_condition( ["after_batch"], OnLogRecord(track_cost.notification_name), (dst_best.name,))]) main_loop.run() assert main_loop.log[4]['saved_to'] == (dst.name, dst_best.name) assert main_loop.log[5]['saved_to'] == (dst.name, dst_best.name) assert main_loop.log[6]['saved_to'] == (dst.name,) with open(dst_best.name, 'rb') as src: assert load(src).log.status['iterations_done'] == 5 root, ext = os.path.splitext(dst_best.name) log_path = root + "_log" + ext with open(log_path, 'rb') as src: assert cPickle.load(src).status['iterations_done'] == 5
def test_save_the_best(): skip_if_configuration_set("log_backend", "sqlite", "Known to be flaky with SQLite log backend.") with NamedTemporaryFile(dir=config.temp_dir) as dst, NamedTemporaryFile(dir=config.temp_dir) as dst_best: track_cost = TrackTheBest("cost", after_epoch=False, after_batch=True) main_loop = MockMainLoop( extensions=[ FinishAfter(after_n_epochs=1), WriteCostExtension(), track_cost, Checkpoint(dst.name, after_batch=True, save_separately=["log"]).add_condition( ["after_batch"], OnLogRecord(track_cost.notification_name), (dst_best.name,) ), ] ) main_loop.run() assert main_loop.log[4]["saved_to"] == (dst.name, dst_best.name) assert main_loop.log[5]["saved_to"] == (dst.name, dst_best.name) assert main_loop.log[6]["saved_to"] == (dst.name,) with open(dst_best.name, "rb") as src: assert load(src).log.status["iterations_done"] == 5 root, ext = os.path.splitext(dst_best.name) log_path = root + "_log" + ext with open(log_path, "rb") as src: assert cPickle.load(src).status["iterations_done"] == 5
def test_save_the_best(): skip_if_configuration_set('log_backend', 'sqlite', "Known to be flaky with SQLite log backend.") with NamedTemporaryFile(dir=config.temp_dir) as dst,\ NamedTemporaryFile(dir=config.temp_dir) as dst_best: track_cost = TrackTheBest("cost", after_epoch=False, after_batch=True) main_loop = MockMainLoop(extensions=[ FinishAfter(after_n_epochs=1), WriteCostExtension(), track_cost, Checkpoint(dst.name, after_batch=True, save_separately=['log']). add_condition(["after_batch"], OnLogRecord(track_cost.notification_name), ( dst_best.name, )) ]) main_loop.run() assert main_loop.log[4]['saved_to'] == (dst.name, dst_best.name) assert main_loop.log[5]['saved_to'] == (dst.name, dst_best.name) assert main_loop.log[6]['saved_to'] == (dst.name, ) with open(dst_best.name, 'rb') as src: assert load(src).log.status['iterations_done'] == 5 root, ext = os.path.splitext(dst_best.name) log_path = root + "_log" + ext with open(log_path, 'rb') as src: assert cPickle.load(src).status['iterations_done'] == 5
def test_plot(): class Writer(SimpleExtension): def do(self, *args, **kwargs): self.main_loop.log.current_row['channel'] = ( self.main_loop.status['iterations_done'] ** 2) main_loop = MockMainLoop(extensions=[ Writer(after_batch=True), Plot('test', [['channel']]).set_conditions(after_batch=True), FinishAfter(after_n_batches=11)]) main_loop.run()
def test_plot(): class Writer(SimpleExtension): def do(self, *args, **kwargs): self.main_loop.log.current_row['channel'] = ( self.main_loop.status['iterations_done']**2) main_loop = MockMainLoop(extensions=[ Writer(after_batch=True), Plot('test', [['channel']]).set_conditions(after_batch=True), FinishAfter(after_n_batches=11) ]) main_loop.run()
def test_plot(): class Writer(SimpleExtension): def do(self, *args, **kwargs): self.main_loop.log.current_row["channel"] = self.main_loop.status["iterations_done"] ** 2 main_loop = MockMainLoop( extensions=[ Writer(after_batch=True), Plot("test", [["channel"]]).set_conditions(after_batch=True), FinishAfter(after_n_batches=11), ] ) main_loop.run()
def test_timing(): epochs = 2 main_loop = MockMainLoop(delay_time=0.1, extensions=[Timing(prefix='each'), Timing(prefix='each_second', every_n_epochs=2), FinishAfter(after_n_epochs=epochs)]) main_loop.run() iterations = int(main_loop.log.status['iterations_done'] / epochs) assert_allclose( (main_loop.log[iterations]['each_time_train_this_epoch'] + main_loop.log[iterations]['each_time_train_this_epoch']) / 2, main_loop.log.current_row['each_second_time_train_this_epoch'], atol=1e-2)
def test_timing(): epochs = 2 main_loop = MockMainLoop(delay_time=0.1, extensions=[ Timing(prefix='each'), Timing(prefix='each_second', every_n_epochs=2), FinishAfter(after_n_epochs=epochs) ]) main_loop.run() iterations = int(main_loop.log.status['iterations_done'] / epochs) assert_allclose( (main_loop.log[iterations]['each_time_train_this_epoch'] + main_loop.log[iterations]['each_time_train_this_epoch']) / 2, main_loop.log.current_row['each_second_time_train_this_epoch'], atol=1e-2)
def test_save_the_best(): skip_if_configuration_set('log_backend', 'sqlite', "Known to be flaky with SQLite log backend.") with NamedTemporaryFile(dir=config.temp_dir) as dst,\ NamedTemporaryFile(dir=config.temp_dir) as dst_best: track_cost = TrackTheBest("cost", after_epoch=False, after_batch=True) main_loop = MockMainLoop( extensions=[FinishAfter(after_n_epochs=1), WriteCostExtension(), track_cost, Checkpoint(dst.name, after_batch=True, save_separately=['log']) .add_condition( ["after_batch"], OnLogRecord(track_cost.notification_name), (dst_best.name,))]) main_loop.run() assert main_loop.log[4]['saved_to'] == (dst.name, dst_best.name) assert main_loop.log[5]['saved_to'] == (dst.name, dst_best.name) assert main_loop.log[6]['saved_to'] == (dst.name,) with open(dst_best.name, 'rb') as src: assert load(src).log.status['iterations_done'] == 5
def test_timing(): main_loop = MockMainLoop(extensions=[Timing(), FinishAfter(after_n_epochs=2)]) main_loop.run()