def _write_source_file_content(self, file_path): """Send the content of a source file via debug-events writer. Args: file_path: Path to the source file. Returns: An int index for the file. """ if file_path in self._source_file_paths: return self._source_file_paths.index(file_path) with self._source_file_paths_lock: if file_path not in self._source_file_paths: lines = None if source_utils.is_extension_uncompiled_python_source( file_path): try: lines, _ = source_utils.load_source(file_path) except IOError: # Accept the fact that some source files are not readable. Here we # use best effort to send the source-file contents. pass writer = self.get_writer() writer.WriteSourceFile( debug_event_pb2.SourceFile(file_path=file_path, host_name=self._hostname, lines=lines)) self._source_file_paths.append(file_path) return self._source_file_paths.index(file_path)
def _write_source_file_content(self, file_path): """Send the content of a source file via debug-events writer. Args: file_path: Path to the source file. Returns: An int index for the file. """ if file_path in self._source_file_paths: return self._source_file_paths.index(file_path) with self._source_file_paths_lock: if file_path not in self._source_file_paths: lines = None if source_utils.is_extension_uncompiled_python_source( file_path): try: lines, _ = source_utils.load_source(file_path) except IOError as e: logging.warn( "Failed to read source code from path: %s. Reason: %s", file_path, e) writer = self.get_writer() writer.WriteSourceFile( debug_event_pb2.SourceFile(file_path=file_path, host_name=self._hostname, lines=lines)) self._source_file_paths.append(file_path) return self._source_file_paths.index(file_path)
def testWriteSourceFilesAndStackFrames(self): writer = debug_events_writer.DebugEventsWriter(self.dump_root, self.tfdbg_run_id) num_protos = 10 for i in range(num_protos): source_file = debug_event_pb2.SourceFile() source_file.file_path = "/home/tf2user/main.py" source_file.host_name = "machine.cluster" source_file.lines.append("print(%d)" % i) writer.WriteSourceFile(source_file) stack_frame = debug_event_pb2.StackFrameWithId() stack_frame.id = "stack_%d" % i stack_frame.file_line_col.file_index = i * 10 writer.WriteStackFrameWithId(stack_frame) writer.FlushNonExecutionFiles() with debug_events_reader.DebugEventsReader(self.dump_root) as reader: actuals = list(item.debug_event.source_file for item in reader.source_files_iterator()) self.assertLen(actuals, num_protos) for i in range(num_protos): self.assertEqual(actuals[i].file_path, "/home/tf2user/main.py") self.assertEqual(actuals[i].host_name, "machine.cluster") self.assertEqual(actuals[i].lines, ["print(%d)" % i]) actuals = list(item.debug_event.stack_frame_with_id for item in reader.stack_frames_iterator()) self.assertLen(actuals, num_protos) for i in range(num_protos): self.assertEqual(actuals[i].id, "stack_%d" % i) self.assertEqual(actuals[i].file_line_col.file_index, i * 10)
def testConcurrentSourceFileRandomReads(self): writer = debug_events_writer.DebugEventsWriter(self.dump_root, self.tfdbg_run_id) for i in range(100): source_file = debug_event_pb2.SourceFile( host_name="localhost", file_path="/tmp/file_%d.py" % i) source_file.lines.append("# File %d" % i) writer.WriteSourceFile(source_file) writer.FlushNonExecutionFiles() reader = debug_events_reader.DebugDataReader(self.dump_root) reader.update() lines = [None] * 100 def read_job_1(): # Read in the reverse order to enhance randomness of the read access. for i in range(49, -1, -1): lines[i] = reader.source_lines("localhost", "/tmp/file_%d.py" % i) def read_job_2(): for i in range(99, 49, -1): lines[i] = reader.source_lines("localhost", "/tmp/file_%d.py" % i) thread_1 = threading.Thread(target=read_job_1) thread_2 = threading.Thread(target=read_job_2) thread_1.start() thread_2.start() thread_1.join() thread_2.join() for i in range(100): self.assertEqual(lines[i], ["# File %d" % i])
def WriteSourceFile(): source_file = debug_event_pb2.SourceFile() with source_file_state["lock"]: source_file.file_path = "/home/tf2user/file_%d.py" % source_file_state[ "counter"] source_file_state["counter"] += 1 writer.WriteSourceFile(source_file) # More-frequent-than-necessary concurrent flushing is not recommended, # but tolerated. writer.FlushNonExecutionFiles()
def testWriteSourceFilesAndStackFrames(self): writer = debug_events_writer.DebugEventsWriter(self.dump_root) num_protos = 10 for i in range(num_protos): source_file = debug_event_pb2.SourceFile() source_file.file_path = "/home/tf2user/main.py" source_file.host_name = "machine.cluster" source_file.lines.append("print(%d)" % i) writer.WriteSourceFile(source_file) stack_frame = debug_event_pb2.StackFrameWithId() stack_frame.id = "stack_%d" % i stack_frame.file_line_col.file_index = i * 10 writer.WriteStackFrameWithId(stack_frame) writer.FlushNonExecutionFiles() source_files_paths = glob.glob( os.path.join(self.dump_root, "*.source_files")) self.assertEqual(len(source_files_paths), 1) actuals = ReadDebugEvents(source_files_paths[0]) self.assertEqual(len(actuals), num_protos) for i in range(num_protos): self.assertEqual(actuals[i].source_file.file_path, "/home/tf2user/main.py") self.assertEqual(actuals[i].source_file.host_name, "machine.cluster") self.assertEqual(actuals[i].source_file.lines, ["print(%d)" % i]) stack_frames_paths = glob.glob( os.path.join(self.dump_root, "*.stack_frames")) self.assertEqual(len(stack_frames_paths), 1) actuals = ReadDebugEvents(stack_frames_paths[0]) self.assertEqual(len(actuals), num_protos) for i in range(num_protos): self.assertEqual(actuals[i].stack_frame_with_id.id, "stack_%d" % i) self.assertEqual( actuals[i].stack_frame_with_id.file_line_col.file_index, i * 10)