def __init__(self, dir): os.makedirs(dir, exist_ok=True) self.dir = dir self.step = 1 prefix = 'events' path = osp.join(osp.abspath(dir), prefix) import tensorflow as tf from tensorflow.python import pywrap_tensorflow from tensorflow.core.util import event_pb2 from tensorflow.python.util import compat self.tf = tf self.event_pb2 = event_pb2 self.pywrap_tensorflow = pywrap_tensorflow self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path))
def __init__(self, logdir, graph_def=None, max_queue=10, flush_secs=120): """Creates a `SummaryWriter` and an event file. On construction the summary writer creates a new event file in `logdir`. This event file will contain `Event` protocol buffers constructed when you call one of the following functions: `add_summary()`, `add_event()`, or `add_graph()`. If you pass a `graph_def` protocol buffer to the constructor it is added to the event file. (This is equivalent to calling `add_graph()` later). TensorBoard will pick the graph from the file and display it graphically so you can interactively explore the graph you built. You will usually pass the graph from the session in which you launched it: ```python ...create a graph... # Launch the graph in a session. sess = tf.Session() # Create a summary writer, add the 'graph_def' to the event file. writer = tf.train.SummaryWriter(<some-directory>, sess.graph_def) ``` The other arguments to the constructor control the asynchronous writes to the event file: * `flush_secs`: How often, in seconds, to flush the added summaries and events to disk. * `max_queue`: Maximum number of summaries or events pending to be written to disk before one of the 'add' calls block. Args: logdir: A string. Directory where event file will be written. graph_def: A `GraphDef` protocol buffer. max_queue: Integer. Size of the queue for pending events and summaries. flush_secs: Number. How often, in seconds, to flush the pending events and summaries to disk. """ self._logdir = logdir if not gfile.IsDirectory(self._logdir): gfile.MakeDirs(self._logdir) self._event_queue = Queue.Queue(max_queue) self._ev_writer = pywrap_tensorflow.EventsWriter( os.path.join(self._logdir, "events")) self._worker = _EventLoggerThread(self._event_queue, self._ev_writer, flush_secs) self._worker.start() if graph_def is not None: self.add_graph(graph_def)
def maybe_create_event_file(logdir): """Create an empty event file if not already exists. This event file indicates that we have a plugins/profile/ directory in the current logdir. Args: logdir: log directory. """ for file_name in gfile.ListDirectory(logdir): if file_name.endswith(_EVENT_FILE_SUFFIX): return # TODO(b/127330388): Use summary_ops_v2.create_file_writer instead. event_writer = pywrap_tensorflow.EventsWriter( compat.as_bytes(os.path.join(logdir, 'events'))) event_writer.InitWithSuffix(compat.as_bytes(_EVENT_FILE_SUFFIX))
def main(): global writer config = load_config() # todo: factor out common logic logdir = os.environ["LOGDIR"] writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(logdir + '/events')) if config.task_type == 'worker': run_worker() elif config.task_type == 'ps': run_ps() else: assert False, "Unknown task type " + str(config.task_type) writer.Close()
def __init__(self, dir): try: os.makedirs(dir) except OSError as e: if e.errno != errno.EEXIST: raise self.dir = dir self.step = 1 prefix = 'events' path = osp.join(osp.abspath(dir), prefix) import tensorflow as tf from tensorflow.python import pywrap_tensorflow from tensorflow.core.util import event_pb2 from tensorflow.python.util import compat self.tf = tf self.event_pb2 = event_pb2 self.pywrap_tensorflow = pywrap_tensorflow self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path))
def __init__(self, dir): # Raise no errors ig the directory already exists. os.makedirs(dir, exist_ok=True) self.dir = dir self.step = 1 prefix = 'events' # osp was os.path # Return a normalized absolutized version of the pathname. path = osp.join(osp.abspath(dir), prefix) # Import tensorflow ans some stuff from it import tensorflow as tf from tensorflow.python import pywrap_tensorflow from tensorflow.core.util import event_pb2 from tensorflow.python.util import compat self.tf = tf self.event_pb2 = event_pb2 self.pywrap_tensorflow = pywrap_tensorflow self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path))
def __init__(self, dir): """ Dumps key/value pairs into TensorBoard's numeric format. :param folder: (str) the folder to write the log to """ os.makedirs(dir, exist_ok=True) self.dir = dir self.step = 1 prefix = 'events' path = osp.join(osp.abspath(dir), prefix) import tensorflow as tf from tensorflow.python import pywrap_tensorflow from tensorflow.core.util import event_pb2 from tensorflow.python.util import compat self.tf = tf self.event_pb2 = event_pb2 self.pywrap_tensorflow = pywrap_tensorflow self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path))
def __init__(self, dir): os.makedirs(dir, exist_ok=True) self.dir = dir res_itr=1 with open(RES_FILE) as f: res = json.load(f) res_itr = res['resitr'] self.step = res_itr prefix = 'events' path = osp.join(osp.abspath(dir), prefix) import tensorflow as tf from tensorflow.python import pywrap_tensorflow from tensorflow.core.util import event_pb2 from tensorflow.python.util import compat self.tf = tf self.event_pb2 = event_pb2 self.pywrap_tensorflow = pywrap_tensorflow self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path))
def __init__(self, logdir, max_queue=10, flush_secs=120, filename_suffix=None): """Creates a `EventFileWriter` and an event file to write to. On construction the summary writer creates a new event file in `logdir`. This event file will contain `Event` protocol buffers, which are written to disk via the add_event method. The other arguments to the constructor control the asynchronous writes to the event file: * `flush_secs`: How often, in seconds, to flush the added summaries and events to disk. * `max_queue`: Maximum number of summaries or events pending to be written to disk before one of the 'add' calls block. Args: logdir: A string. Directory where event file will be written. max_queue: Integer. Size of the queue for pending events and summaries. flush_secs: Number. How often, in seconds, to flush the pending events and summaries to disk. filename_suffix: A string. Every event file's name is suffixed with `filename_suffix`. """ self._logdir = logdir if not gfile.IsDirectory(self._logdir): gfile.MakeDirs(self._logdir) self._event_queue = six.moves.queue.Queue(max_queue) self._ev_writer = pywrap_tensorflow.EventsWriter( compat.as_bytes(os.path.join(self._logdir, "events"))) self._flush_secs = flush_secs self._sentinel_event = self._get_sentinel_event() if filename_suffix: self._ev_writer.InitWithSuffix(compat.as_bytes(filename_suffix)) self._closed = False self._worker = _EventLoggerThread(self._event_queue, self._ev_writer, self._flush_secs, self._sentinel_event) self._worker.start()
def __init__(self, dir): os.makedirs(dir, exist_ok=True) self.dir = dir self.data_read_dir = "/".join(self.dir.split("/")[:-3]) self.step = 1 prefix = 'events' path = osp.join(osp.abspath(dir), prefix) self.path = path import tensorflow as tf from tensorflow.python import pywrap_tensorflow from tensorflow.core.util import event_pb2 from tensorflow.python.util import compat self.tf = tf self.event_pb2 = event_pb2 self.pywrap_tensorflow = pywrap_tensorflow self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path)) self.tb_port = None # # Start tensorboard self.launchTensorBoard()
def _create_events_writer(self, directory): """Creates a new events writer. Args: directory: The directory in which to write files containing events. Returns: A new events writer, which corresponds to a new events file. """ total_size = 0 events_files = self._fetch_events_files_on_disk() for file_name in events_files: file_path = os.path.join(self._events_directory, file_name) total_size += tf.io.gfile.stat(file_path).length if total_size >= self.total_file_size_cap_bytes: # The total size written to disk is too big. Delete events files until # the size is below the cap. for file_name in events_files: if total_size < self.total_file_size_cap_bytes: break file_path = os.path.join(self._events_directory, file_name) file_size = tf.io.gfile.stat(file_path).length try: tf.io.gfile.remove(file_path) total_size -= file_size logger.info( "Deleted %s because events files take up over %d bytes", file_path, self.total_file_size_cap_bytes) except IOError as err: logger.error("Deleting %s failed: %s", file_path, err) # We increment this index because each events writer must differ in prefix. self._events_file_count += 1 file_path = "%s.%d.%d" % (os.path.join( directory, DEBUGGER_EVENTS_FILE_STARTING_TEXT), time.time(), self._events_file_count) logger.info("Creating events file %s", file_path) return pywrap_tensorflow.EventsWriter(tf.compat.as_bytes(file_path))
def setUp(self): super(DebuggerPluginTestBase, self).setUp() # Importing the debugger_plugin can sometimes unfortunately produce errors. try: # pylint: disable=g-import-not-at-top from tensorboard.plugins.debugger import debugger_plugin from tensorboard.plugins.debugger import debugger_server_lib # pylint: enable=g-import-not-at-top except Exception as e: # pylint: disable=broad-except raise self.skipTest( 'Skipping test because importing some modules failed: %r' % e) self.debugger_plugin_module = debugger_plugin # Populate the log directory with debugger event for run '.'. self.log_dir = self.get_temp_dir() file_prefix = tf.compat.as_bytes( os.path.join(self.log_dir, 'events.debugger')) writer = pywrap_tensorflow.EventsWriter(file_prefix) device_name = '/job:localhost/replica:0/task:0/cpu:0' writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='layers/Matmul', output_slot=0, wall_time=42, step=2, list_of_values=( list(range(12)) + [float(tf.float32.as_datatype_enum), 1.0, 3.0]))) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='layers/Matmul', output_slot=1, wall_time=43, step=7, list_of_values=( list(range(12)) + [float(tf.float64.as_datatype_enum), 2.0, 3.0, 3.0]))) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='logits/Add', output_slot=0, wall_time=1337, step=7, list_of_values=( list(range(12)) + [float(tf.int32.as_datatype_enum), 2.0, 3.0, 3.0]))) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='logits/Add', output_slot=0, wall_time=1338, step=8, list_of_values=(list(range(12)) + [float(tf.int16.as_datatype_enum), 0.0]))) writer.Close() # Populate the log directory with debugger event for run 'run_foo'. run_foo_directory = os.path.join(self.log_dir, 'run_foo') os.mkdir(run_foo_directory) file_prefix = tf.compat.as_bytes( os.path.join(run_foo_directory, 'events.debugger')) writer = pywrap_tensorflow.EventsWriter(file_prefix) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='layers/Variable', output_slot=0, wall_time=4242, step=42, list_of_values=(list(range(12)) + [float(tf.int16.as_datatype_enum), 1.0, 8.0]))) writer.Close() # Start a server that will receive requests and respond with health pills. multiplexer = event_multiplexer.EventMultiplexer({ '.': self.log_dir, 'run_foo': run_foo_directory, }) multiplexer.Reload() self.debugger_data_server_grpc_port = portpicker.pick_unused_port() # Fake threading behavior so that threads are synchronous. tf.compat.v1.test.mock.patch('threading.Thread.start', threading.Thread.run).start() self.mock_debugger_data_server = tf.compat.v1.test.mock.Mock( debugger_server_lib.DebuggerDataServer) self.mock_debugger_data_server_class = tf.compat.v1.test.mock.Mock( debugger_server_lib.DebuggerDataServer, return_value=self.mock_debugger_data_server) tf.compat.v1.test.mock.patch.object( debugger_server_lib, 'DebuggerDataServer', self.mock_debugger_data_server_class).start() self.context = base_plugin.TBContext(logdir=self.log_dir, multiplexer=multiplexer) self.plugin = debugger_plugin.DebuggerPlugin(self.context) self.plugin.listen(self.debugger_data_server_grpc_port) wsgi_app = application.TensorBoardWSGI([self.plugin]) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) # The debugger data server should be started at the correct port. self.mock_debugger_data_server_class.assert_called_once_with( self.debugger_data_server_grpc_port, self.log_dir) mock_debugger_data_server = self.mock_debugger_data_server start = mock_debugger_data_server.start_the_debugger_data_receiving_server self.assertEqual(1, start.call_count)
def setUp(self): # Populate the log directory with debugger event for run '.'. self.log_dir = self.get_temp_dir() file_prefix = compat.as_bytes(os.path.join(self.log_dir, 'events.debugger')) writer = pywrap_tensorflow.EventsWriter(file_prefix) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( op_name='layers/Matmul', output_slot=0, wall_time=42, step=2, list_of_values=[1, 2, 3])) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( op_name='layers/Matmul', output_slot=1, wall_time=43, step=7, list_of_values=[4, 5, 6])) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( op_name='logits/Add', output_slot=0, wall_time=1337, step=7, list_of_values=[7, 8, 9])) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( op_name='logits/Add', output_slot=0, wall_time=1338, step=8, list_of_values=[10, 11, 12])) writer.Close() # Populate the log directory with debugger event for run 'run_foo'. run_foo_directory = os.path.join(self.log_dir, 'run_foo') os.mkdir(run_foo_directory) file_prefix = compat.as_bytes( os.path.join(run_foo_directory, 'events.debugger')) writer = pywrap_tensorflow.EventsWriter(file_prefix) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( op_name='layers/Variable', output_slot=0, wall_time=4242, step=42, list_of_values=[13, 14, 15])) writer.Close() # Start a server that will receive requests and respond with health pills. self.multiplexer = event_multiplexer.EventMultiplexer({ '.': self.log_dir, 'run_foo': run_foo_directory, }) self.plugin = debugger_plugin.DebuggerPlugin() wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [self.plugin], self.multiplexer, reload_interval=0) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def __init__(self, dir, prefix): self.dir = dir self.step = 1 # Start at 1, because EvWriter automatically generates an object with step=0 self.evwriter = pywrap_tensorflow.EventsWriter( compat.as_bytes(os.path.join(dir, prefix)))
def run_benchmark(master, direction=None): """Connect to master and run simple TF->Python transfer benchmark.""" from tensorflow.python.summary import summary as summary_lib from tensorflow.python import pywrap_tensorflow from tensorflow.python.util import compat from tensorflow.core.util import event_pb2 from tensorflow.core.framework import summary_pb2 def make_event(tag, value, step): event = event_pb2.Event( wall_time=time.time(), step=step, summary=summary_pb2.Summary( value=[summary_pb2.Summary.Value( tag=tag, simple_value=value)])) return event if not direction: os.system('mkdir -p '+FLAGS.logdir) # todo: unique filenames like with contrib.summary writer writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(FLAGS.logdir+'/events')) filename = compat.as_text(writer.FileName()) training_util.get_or_create_global_step() sess = tf.InteractiveSession() step = 0 while True: p_to_t = run_benchmark(master, 'p->t') print("recording", p_to_t, "to", FLAGS.logdir) t_to_p = run_benchmark(master, 't->p') event = make_event('p->t', p_to_t, step) writer.WriteEvent(event) event = make_event('t->p', t_to_p, step) writer.WriteEvent(event) writer.Flush() step+=1 writer.Close() return assert FLAGS.warmup_iters > 0 gc.disable() dtype = tf.int32 params_size = 250*1000*FLAGS.data_mb # 1MB is 250k integers # params = tf.get_variable("params", [params_size], dtype, # initializer=tf.ones_initializer()) params = tf.Variable(tf.ones([params_size], dtype=dtype), name='params') params_read = params.read_value() # prevent caching params_holder = tf.placeholder(dtype) params_write = params.assign(params_holder) done_queue = create_done_queue(0) init_op = tf.global_variables_initializer() sess = tf.Session(master, config=session_config()) sess.run(init_op) result = sess.run(params_read) total = 0 for i in range(FLAGS.iters+FLAGS.warmup_iters): if i == FLAGS.warmup_iters: start_time = time.time() # fetch value into Python runtime if direction == "t->p": result = sess.run(params_read) if FLAGS.sanity_check: total += result.sum() print(float(total)/params_size) elif direction == "p->t": sess.run(params_write.op, feed_dict={params_holder: result}) elapsed_time = time.time() - start_time rate = float(FLAGS.iters)*FLAGS.data_mb/elapsed_time print("%5s %.2f MB/second" % (direction, rate)) sess.run(done_queue.enqueue(1)) return rate
def testWriteEventInvalidType(self): class _Invalid(object): def __str__(self): return "Invalid" with self.assertRaisesRegexp(TypeError, "Invalid"): pywrap_tensorflow.EventsWriter(b"foo").WriteEvent(_Invalid())