def testGetAvailableTags(self): data = [{ 'tag': 'c', 'histo': 2, 'step': 10 }, { 'tag': 'c', 'histo': 2, 'step': 11 }, { 'tag': 'c', 'histo': 2, 'step': 9 }, { 'tag': 'b', 'simple_value': 2, 'step': 20 }, { 'tag': 'b', 'simple_value': 2, 'step': 15 }, { 'tag': 'a', 'simple_value': 2, 'step': 3 }] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) tags = efi.get_unique_tags(units[0].field_to_obs) self.assertEqual(['a', 'b'], tags['scalars']) self.assertEqual(['c'], tags['histograms'])
def testSessionLogSummaries(self): data = [ { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.START), "step": 0, }, { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.CHECKPOINT), "step": 1, }, { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.CHECKPOINT), "step": 2, }, { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.CHECKPOINT), "step": 3, }, { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.STOP), "step": 4, }, { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.START), "step": 5, }, { "session_log": event_pb2.SessionLog(status=event_pb2.SessionLog.STOP), "step": 6, }, ] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) self.assertEqual(1, len(units)) printable = efi.get_dict_to_print(units[0].field_to_obs) self.assertEqual(printable["sessionlog:start"]["steps"], [0, 5]) self.assertEqual(printable["sessionlog:stop"]["steps"], [4, 6]) self.assertEqual(printable["sessionlog:checkpoint"]["num_steps"], 3)
def testGetAvailableTags(self): data = [{'tag': 'c', 'histo': 2, 'step': 10}, {'tag': 'c', 'histo': 2, 'step': 11}, {'tag': 'c', 'histo': 2, 'step': 9}, {'tag': 'b', 'simple_value': 2, 'step': 20}, {'tag': 'b', 'simple_value': 2, 'step': 15}, {'tag': 'a', 'simple_value': 2, 'step': 3}] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) tags = efi.get_unique_tags(units[0].field_to_obs) self.assertEqual(['a', 'b'], tags['scalars']) self.assertEqual(['c'], tags['histograms'])
def testSessionLogSummaries(self): data = [ { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.START), 'step': 0 }, { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.CHECKPOINT), 'step': 1 }, { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.CHECKPOINT), 'step': 2 }, { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.CHECKPOINT), 'step': 3 }, { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.STOP), 'step': 4 }, { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.START), 'step': 5 }, { 'session_log': event_pb2.SessionLog(status=event_pb2.SessionLog.STOP), 'step': 6 }, ] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) self.assertEqual(1, len(units)) printable = efi.get_dict_to_print(units[0].field_to_obs) self.assertEqual(printable['sessionlog:start']['steps'], [0, 5]) self.assertEqual(printable['sessionlog:stop']['steps'], [4, 6]) self.assertEqual(printable['sessionlog:checkpoint']['num_steps'], 3)
def testInspectAllWithNestedLogdirs(self): data = [ { "tag": "c", "simple_value": 2, "step": 10 }, { "tag": "c", "simple_value": 2, "step": 11 }, { "tag": "c", "simple_value": 2, "step": 9 }, { "tag": "b", "simple_value": 2, "step": 20 }, { "tag": "b", "simple_value": 2, "step": 15 }, { "tag": "a", "simple_value": 2, "step": 3 }, ] subdirs = ["eval", "train"] self._WriteScalarSummaries(data, subdirs=subdirs) units = efi.get_inspection_units(self.logdir) self.assertEqual(2, len(units)) directory_names = [os.path.join(self.logdir, name) for name in subdirs] self.assertEqual(directory_names, sorted([unit.name for unit in units])) for unit in units: printable = efi.get_dict_to_print(unit.field_to_obs)["scalars"] self.assertEqual(printable["max_step"], 20) self.assertEqual(printable["min_step"], 3) self.assertEqual(printable["num_steps"], 6) self.assertEqual(printable["last_step"], 3) self.assertEqual(printable["first_step"], 10) self.assertEqual(printable["outoforder_steps"], [(11, 9), (20, 15), (15, 3)])
def testInspectAll(self): data = [ { "tag": "c", "histo": 2, "step": 10 }, { "tag": "c", "histo": 2, "step": 11 }, { "tag": "c", "histo": 2, "step": 9 }, { "tag": "b", "simple_value": 2, "step": 20 }, { "tag": "b", "simple_value": 2, "step": 15 }, { "tag": "a", "simple_value": 2, "step": 3 }, ] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) printable = efi.get_dict_to_print(units[0].field_to_obs) self.assertEqual(printable["histograms"]["max_step"], 11) self.assertEqual(printable["histograms"]["min_step"], 9) self.assertEqual(printable["histograms"]["num_steps"], 3) self.assertEqual(printable["histograms"]["last_step"], 9) self.assertEqual(printable["histograms"]["first_step"], 10) self.assertEqual(printable["histograms"]["outoforder_steps"], [(11, 9)]) self.assertEqual(printable["scalars"]["max_step"], 20) self.assertEqual(printable["scalars"]["min_step"], 3) self.assertEqual(printable["scalars"]["num_steps"], 3) self.assertEqual(printable["scalars"]["last_step"], 3) self.assertEqual(printable["scalars"]["first_step"], 20) self.assertEqual(printable["scalars"]["outoforder_steps"], [(20, 15), (15, 3)])
def testInspectTag(self): data = [{'tag': 'c', 'histo': 2, 'step': 10}, {'tag': 'c', 'histo': 2, 'step': 11}, {'tag': 'c', 'histo': 2, 'step': 9}, {'tag': 'b', 'histo': 2, 'step': 20}, {'tag': 'b', 'simple_value': 2, 'step': 15}, {'tag': 'a', 'simple_value': 2, 'step': 3}] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir, tag='c') printable = efi.get_dict_to_print(units[0].field_to_obs) self.assertEqual(printable['histograms']['max_step'], 11) self.assertEqual(printable['histograms']['min_step'], 9) self.assertEqual(printable['histograms']['num_steps'], 3) self.assertEqual(printable['histograms']['last_step'], 9) self.assertEqual(printable['histograms']['first_step'], 10) self.assertEqual(printable['histograms']['outoforder_steps'], [(11, 9)]) self.assertEqual(printable['scalars'], None)
def testInspectTag(self): data = [{'tag': 'c', 'histo': 2, 'step': 10}, {'tag': 'c', 'histo': 2, 'step': 11}, {'tag': 'c', 'histo': 2, 'step': 9}, {'tag': 'b', 'histo': 2, 'step': 20}, {'tag': 'b', 'simple_value': 2, 'step': 15}, {'tag': 'a', 'simple_value': 2, 'step': 3}] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir, tag='c') printable = efi.get_dict_to_print(units[0].field_to_obs) self.assertEqual(printable['histograms']['max_step'], 11) self.assertEqual(printable['histograms']['min_step'], 9) self.assertEqual(printable['histograms']['num_steps'], 3) self.assertEqual(printable['histograms']['last_step'], 9) self.assertEqual(printable['histograms']['first_step'], 10) self.assertEqual(printable['histograms']['outoforder_steps'], [(11, 9)]) self.assertEqual(printable['scalars'], None)
def testInspectAllWithNestedLogdirs(self): data = [{ 'tag': 'c', 'simple_value': 2, 'step': 10 }, { 'tag': 'c', 'simple_value': 2, 'step': 11 }, { 'tag': 'c', 'simple_value': 2, 'step': 9 }, { 'tag': 'b', 'simple_value': 2, 'step': 20 }, { 'tag': 'b', 'simple_value': 2, 'step': 15 }, { 'tag': 'a', 'simple_value': 2, 'step': 3 }] subdirs = ['eval', 'train'] self._WriteScalarSummaries(data, subdirs=subdirs) units = efi.get_inspection_units(self.logdir) self.assertEqual(2, len(units)) directory_names = [os.path.join(self.logdir, name) for name in subdirs] self.assertEqual(directory_names, sorted([unit.name for unit in units])) for unit in units: printable = efi.get_dict_to_print(unit.field_to_obs)['scalars'] self.assertEqual(printable['max_step'], 20) self.assertEqual(printable['min_step'], 3) self.assertEqual(printable['num_steps'], 6) self.assertEqual(printable['last_step'], 3) self.assertEqual(printable['first_step'], 10) self.assertEqual(printable['outoforder_steps'], [(11, 9), (20, 15), (15, 3)])
def testSessionLogSummaries(self): data = [ { 'session_log': tf.SessionLog(status=tf.SessionLog.START), 'step': 0 }, { 'session_log': tf.SessionLog(status=tf.SessionLog.CHECKPOINT), 'step': 1 }, { 'session_log': tf.SessionLog(status=tf.SessionLog.CHECKPOINT), 'step': 2 }, { 'session_log': tf.SessionLog(status=tf.SessionLog.CHECKPOINT), 'step': 3 }, { 'session_log': tf.SessionLog(status=tf.SessionLog.STOP), 'step': 4 }, { 'session_log': tf.SessionLog(status=tf.SessionLog.START), 'step': 5 }, { 'session_log': tf.SessionLog(status=tf.SessionLog.STOP), 'step': 6 }, ] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) self.assertEqual(1, len(units)) printable = efi.get_dict_to_print(units[0].field_to_obs) self.assertEqual(printable['sessionlog:start']['steps'], [0, 5]) self.assertEqual(printable['sessionlog:stop']['steps'], [4, 6]) self.assertEqual(printable['sessionlog:checkpoint']['num_steps'], 3)
def testGetAvailableTags(self): data = [ { "tag": "c", "histo": 2, "step": 10 }, { "tag": "c", "histo": 2, "step": 11 }, { "tag": "c", "histo": 2, "step": 9 }, { "tag": "b", "simple_value": 2, "step": 20 }, { "tag": "b", "simple_value": 2, "step": 15 }, { "tag": "a", "simple_value": 2, "step": 3 }, ] self._WriteScalarSummaries(data) units = efi.get_inspection_units(self.logdir) tags = efi.get_unique_tags(units[0].field_to_obs) self.assertEqual(["a", "b"], tags["scalars"]) self.assertEqual(["c"], tags["histograms"])
def testInspectAllWithNestedLogdirs(self): data = [{'tag': 'c', 'simple_value': 2, 'step': 10}, {'tag': 'c', 'simple_value': 2, 'step': 11}, {'tag': 'c', 'simple_value': 2, 'step': 9}, {'tag': 'b', 'simple_value': 2, 'step': 20}, {'tag': 'b', 'simple_value': 2, 'step': 15}, {'tag': 'a', 'simple_value': 2, 'step': 3}] subdirs = ['eval', 'train'] self._WriteScalarSummaries(data, subdirs=subdirs) units = efi.get_inspection_units(self.logdir) self.assertEqual(2, len(units)) directory_names = [os.path.join(self.logdir, name) for name in subdirs] self.assertEqual(directory_names, sorted([unit.name for unit in units])) for unit in units: printable = efi.get_dict_to_print(unit.field_to_obs)['scalars'] self.assertEqual(printable['max_step'], 20) self.assertEqual(printable['min_step'], 3) self.assertEqual(printable['num_steps'], 6) self.assertEqual(printable['last_step'], 3) self.assertEqual(printable['first_step'], 10) self.assertEqual(printable['outoforder_steps'], [(11, 9), (20, 15), (15, 3)])
def test_TensorboardLogger(self, clean_up, tmpdir): data_source = RealFunctionDataLayer(n=100, batch_size=1) trainable_module = TaylorNet(dim=4) loss = MSELoss() # Create the graph by connnecting the modules. x, y = data_source() y_pred = trainable_module(x=x) loss_tensor = loss(predictions=y_pred, target=y) logging_dir = tmpdir.mkdir("temp") writer = SummaryWriter(logging_dir) tb_logger = TensorboardLogger(writer, step_freq=1) callbacks = [tb_logger] self.nf.train( tensors_to_optimize=[loss_tensor], callbacks=callbacks, optimization_params={ "max_steps": 4, "lr": 0.01 }, optimizer="sgd", ) # efi.inspect("temp", tag="loss") inspection_units = efi.get_inspection_units(str(logging_dir), "", "loss") # Make sure there is only 1 tensorboard file assert len(inspection_units) == 1 # Assert that there the loss scalars has been logged 4 times assert len(inspection_units[0].field_to_obs['scalars']) == 4
def testEmptyLogdir(self): # Nothing was written to logdir units = efi.get_inspection_units(self.logdir) self.assertEqual([], units)
def testEmptyLogdir(self): # Nothing was written to logdir units = efi.get_inspection_units(self.logdir) self.assertEqual([], units)
# In[ ]: test_y_hat = output_model(test_peptide.cuda(), test_allele.cuda()) test_loss = float(criterion(test_y_hat.squeeze(), test_y.cuda())) test_acc = float((abs(test_y.cuda() - test_y_hat.squeeze()) < .5).sum()) / test_len print(test_loss) print(test_acc) # In[ ]: # Kills all runs inspect_units = get_inspection_units(logdir='logs/tensorboard') run_len = {} for run in inspect_units: path = run[0] max_length = 0 for key, value in get_dict_to_print(run.field_to_obs).items(): if value is not None: length = value['max_step'] if max_length < length: max_length = length run_len[path] = max_length for run, length in run_len.items(): try: print(f'{run} is {length} and was deleted')
from argparse import ArgumentParser import shutil from tensorboard.backend.event_processing.event_file_inspector import get_inspection_units, print_dict, get_dict_to_print parser = ArgumentParser('delete small runs') parser.add_argument('--logdir', type=str, default='/home/liuxh/Documents/slbo/result') parser.add_argument('--min_run_len', type=int, default=500) parser.add_argument('--list', action='store_true') args = parser.parse_args() run_len = {} inspect_units = get_inspection_units(logdir=args.logdir) for run in inspect_units: path = run[0] max_length = 0 for key, value in get_dict_to_print(run.field_to_obs).items(): if value is not None: length = value['max_step'] if max_length < length: max_length = length run_len[path] = max_length for run, length in run_len.items(): if length < args.min_run_len: if args.list: print(f'{run} is {length} steps long and so will be deleted') else: try: