Esempio n. 1
0
 def __init__(self, logdir, cache_max_items=128):
     self.logdir = logdir
     self.event_mux = EventMultiplexer(tensor_size_guidance={
         metadata.PLUGIN_NAME: 0  # Store all metadata in RAM
     }).AddRunsFromDirectory(logdir)
     self._run_to_tags = {}
     self._event_lock = threading.Lock()  # Protect TB event file data
     # Geometry data reading
     self._tensor_events = dict()
     self.geometry_cache = LRUCache(max_items=cache_max_items)
     self._file_handles = {}  # {filename, (open_handle, read_lock)}
     self._file_handles_lock = threading.Lock()
     self.reload_events()
Esempio n. 2
0
def plot_tf_scalar_summaries_splitted(summaries_dir,
                                      xmin=None,
                                      xmax=None,
                                      smoothing_function=None,
                                      max_reward=None,
                                      x_label="episode",
                                      legend=True,
                                      splitted_length=25,
                                      save_directory=None,
                                      show_plots=True):
    """Process sets of runs (of length splitted_length) separately before plotting TensorFlow scalar summaries."""
    _, rundirs, _ = next(os.walk(summaries_dir))
    data = {}
    for runs_group in range(int(np.ceil(len(rundirs) / splitted_length))):
        logging.info("Processing run group {}. Runs processed: {}/{}.".format(
            runs_group, runs_group * splitted_length, len(rundirs)))
        em = EventMultiplexer()
        for run_idx in range(runs_group * splitted_length,
                             runs_group * splitted_length + splitted_length):
            run_path = os.path.join(summaries_dir, "run" + str(run_idx + 1))
            for subdir in GetLogdirSubdirectories(run_path):
                rpath = os.path.relpath(subdir, summaries_dir)
                em.AddRun(subdir, name=rpath)
        em.Reload()
        data_runs = tf_scalar_data(em)
        for scalar, scalar_data in data_runs.items():
            if scalar not in data:
                data[scalar] = scalar_data
            else:
                for task, epochs_values in scalar_data.items():
                    data[scalar][task]["values"].extend(
                        epochs_values["values"])
    logging.info("Data processed, plotting...")
    plot_tasks(data,
               x_label,
               smoothing_function=smoothing_function,
               max_reward=max_reward,
               xmin=xmin,
               xmax=xmax,
               legend=legend,
               save_directory=save_directory,
               show_plots=show_plots)
Esempio n. 3
0
def plot_tf_scalar_summaries(summaries_dir,
                             xmin=None,
                             xmax=None,
                             smoothing_function=None,
                             max_reward=None,
                             x_label="episode",
                             legend=True,
                             save_directory=None,
                             show_plots=True):
    """Plot TensorFlow scalar summaries."""
    em = EventMultiplexer().AddRunsFromDirectory(summaries_dir).Reload()
    data = tf_scalar_data(em)
    plot_tasks(data,
               x_label,
               smoothing_function=smoothing_function,
               max_reward=max_reward,
               xmin=xmin,
               xmax=xmax,
               legend=legend,
               save_directory=save_directory,
               show_plots=show_plots)
Esempio n. 4
0
def plot_tf_scalar_summaries_subdirs(summaries_dir,
                                     xmin=None,
                                     xmax=None,
                                     smoothing_function=None,
                                     max_reward=None,
                                     x_label="episode",
                                     legend=True,
                                     save_directory=None,
                                     show_plots=True):
    """Process each subdirectory of summaries_dir separately before plotting TensorFlow scalar summaries."""
    _, subdirs, _ = next(os.walk(summaries_dir))

    data = {}
    for subdir in subdirs:
        if not subdir.startswith("exp"):
            continue
        em = EventMultiplexer().AddRunsFromDirectory(
            os.path.join(summaries_dir, subdir)).Reload()
        subdir_data = tf_scalar_data(em)
        for scalar, scalar_data in subdir_data.items():
            if scalar not in data:
                data[scalar] = scalar_data
            else:
                for task, epochs_values in scalar_data.items():
                    data[scalar][task]["values"].extend(
                        epochs_values["values"])

    plot_tasks(data,
               x_label,
               smoothing_function=smoothing_function,
               max_reward=max_reward,
               xmin=xmin,
               xmax=xmax,
               legend=legend,
               save_directory=save_directory,
               show_plots=show_plots)
Esempio n. 5
0
class Open3DPluginDataReader:
    """Manage TB event data and geometry data for common use by all
    Open3DPluginWindow instances. This is thread safe for simultaneous use by
    multiple browser clients with a multi-threaded web server. Read geometry
    data is cached in memory.

    Args:
        logdir (str): TensorBoard logs directory.
        cache_max_items (int): Max geometry elements to be cached in memory.
    """

    def __init__(self, logdir, cache_max_items=128):
        self.logdir = logdir
        self.event_mux = EventMultiplexer(tensor_size_guidance={
            metadata.PLUGIN_NAME: 0  # Store all metadata in RAM
        })
        self._run_to_tags = {}
        self._event_lock = threading.Lock()  # Protect TB event file data
        # Geometry data reading
        self._tensor_events = dict()
        self.geometry_cache = LRUCache(max_items=cache_max_items)
        self.runtag_prop_shape = dict()
        self._file_handles = {}  # {filename, (open_handle, read_lock)}
        self._file_handles_lock = threading.Lock()
        self.reload_events()

    def reload_events(self):
        """Reload event file"""
        self.event_mux.AddRunsFromDirectory(self.logdir)
        self.event_mux.Reload()
        run_tags = self.event_mux.PluginRunToTagToContent(metadata.PLUGIN_NAME)
        with self._event_lock:
            self._run_to_tags = {
                run: sorted(tagdict.keys())
                for run, tagdict in sorted(run_tags.items())
            }
            self._tensor_events = dict()  # Invalidate index
        # Close all open files
        with self._file_handles_lock:
            while len(self._file_handles) > 0:
                unused_filename, file_handle = self._file_handles.popitem()
                with file_handle[1]:
                    file_handle[0].close()

        _log.debug(f"Event data reloaded: {self._run_to_tags}")

    def is_active(self):
        """Do we have any Open3D data to display?"""
        with self._event_lock:
            return any(len(tags) > 0 for tags in self._run_to_tags.values())

    @property
    def run_to_tags(self):
        """Locked access to the run_to_tags map."""
        with self._event_lock:
            return self._run_to_tags

    def tensor_events(self, run):
        """Locked access to tensor events for a run."""
        with self._event_lock:
            if run not in self._tensor_events:
                self._tensor_events[run] = {
                    tag: self.event_mux.Tensors(run, tag)
                    for tag in self._run_to_tags[run]
                }
            return self._tensor_events[run]

    def get_label_to_names(self, run, tag):
        """Get label (id) to name (category) mapping for a tag."""
        md_proto = self.event_mux.SummaryMetadata(run, tag)
        lab2name = metadata.parse_plugin_metadata(md_proto.plugin_data.content)
        return dict(sorted(lab2name.items()))

    def read_from_file(self, filename, read_location, read_size,
                       read_masked_crc32c):
        """Read data from the file ``filename`` from a given offset
        ``read_location`` and size ``read_size``. Data is validated with the provided
        ``masked_crc32c``. This is thread safe and manages a list of open files.
        """
        with self._file_handles_lock:
            if filename not in self._file_handles:
                self._file_handles[filename] = (_fileopen(filename, "rb"),
                                                threading.Lock())
                if not self._file_handles[filename][0].seekable():
                    raise RuntimeError(filename + " does not support seeking."
                                       " This storage is not supported.")
            # lock to seek + read
            file_handle = self._file_handles[filename]
            file_handle[1].acquire()

        file_handle[0].seek(read_location)
        buf = file_handle[0].read(read_size)
        file_handle[1].release()
        if masked_crc32c(buf) == read_masked_crc32c:
            return buf
        else:
            return None

    def update_runtag_prop_shape(self, run, tag, geometry,
                                 inference_data_proto):
        """Update list of custom properties and their shapes for different runs
        and tags.
        """
        tag_prop_shape = self.runtag_prop_shape.setdefault(run, dict())
        prop_shape = tag_prop_shape.setdefault(tag, dict())
        if len(prop_shape) == 0 and not geometry.is_empty():
            for prop_type in ('point', 'vertex'):  # exclude 'line'
                if hasattr(geometry, prop_type):
                    label_props, custom_props = _classify_properties(
                        getattr(geometry, prop_type))
                    prop_shape.update(custom_props)
                    prop_shape.update(label_props)
            if len(inference_data_proto.inference_result) > 0:
                # Only bbox labels can be visualized. Scalars such as
                # 'confidence' from BoundingBox3D requires
                # unlitGradient.GRADIENT shader support for LineSet.
                prop_shape.update({'labels': 1})

    def read_geometry(self, run, tag, step, batch_idx, step_to_idx):
        """Geometry reader from msgpack files."""
        idx = step_to_idx[step]
        metadata_proto = plugin_data_pb2.Open3DPluginData()
        run_tensor_events = self.tensor_events(run)
        metadata_proto.ParseFromString(
            run_tensor_events[tag][idx].tensor_proto.string_val[0])
        data_dir = PluginDirectory(os.path.join(self.logdir, run),
                                   metadata.PLUGIN_NAME)
        filename = os.path.join(data_dir, metadata_proto.batch_index.filename)
        read_location = metadata_proto.batch_index.start_size[batch_idx].start
        read_size = metadata_proto.batch_index.start_size[batch_idx].size
        read_masked_crc32c = metadata_proto.batch_index.start_size[
            batch_idx].masked_crc32c
        cache_key = (filename, read_location, read_size, run, tag, step,
                     batch_idx)
        geometry = self.geometry_cache.get(cache_key)
        if geometry is None:  # Read from storage
            buf = self.read_from_file(filename, read_location, read_size,
                                      read_masked_crc32c)
            if buf is None:
                raise IOError(f"Geometry {cache_key} reading failed! CRC "
                              "mismatch in msgpack data.")
            msg_tag, msg_step, geometry = o3d.io.rpc.data_buffer_to_meta_geometry(
                buf)
            if geometry is None:
                raise IOError(f"Geometry {cache_key} reading failed! Possible "
                              "msgpack or TensorFlow event file corruption.")
            if tag != msg_tag or step != msg_step:
                _log.warning(
                    f"Mismatch between TensorFlow event (tag={tag}, step={step})"
                    f" and msgpack (tag={msg_tag}, step={msg_step}) data. "
                    "Possible data corruption.")
            _log.debug(f"Geometry {cache_key} reading successful!")
            self.geometry_cache.put(cache_key, geometry)

        # Fill in properties by reference
        for prop_ref in metadata_proto.property_references:
            prop = plugin_data_pb2.Open3DPluginData.GeometryProperty.Name(
                prop_ref.geometry_property)
            if prop_ref.step_ref >= step:
                _log.warning(
                    f"Incorrect future step reference {prop_ref.step_ref} for"
                    f" property {prop} of geometry at step {step}. Ignoring.")
                continue
            geometry_ref = self.read_geometry(run, tag, prop_ref.step_ref,
                                              batch_idx, step_to_idx)[0]
            # "vertex_normals" -> ["vertex", "normals"]
            prop_map, prop_attribute = prop.split("_")
            if prop_map == "vertex" and not isinstance(
                    geometry, o3d.t.geometry.TriangleMesh):
                prop_map = "point"
            # geometry.vertex["normals"] = geometry_ref.vertex["normals"]
            getattr(geometry, prop_map)[prop_attribute] = getattr(
                geometry_ref, prop_map)[prop_attribute]

        # Aux data (e.g. labels, confidences) -> not cached
        aux_read_location = metadata_proto.batch_index.start_size[
            batch_idx].aux_start
        aux_read_size = metadata_proto.batch_index.start_size[
            batch_idx].aux_size
        aux_read_masked_crc32c = metadata_proto.batch_index.start_size[
            batch_idx].aux_masked_crc32c
        data_bbox_proto = plugin_data_pb2.InferenceData()
        if aux_read_size > 0:
            data_bbox_serial = self.read_from_file(filename, aux_read_location,
                                                   aux_read_size,
                                                   aux_read_masked_crc32c)
            if data_bbox_serial is None:
                raise IOError(f"Aux data for {cache_key} reading failed! CRC "
                              "mismatch in protobuf data.")
            data_bbox_proto.ParseFromString(data_bbox_serial)

        self.update_runtag_prop_shape(run, tag, geometry, data_bbox_proto)
        return geometry, data_bbox_proto
Esempio n. 6
0
class Open3DPluginDataReader:
    """Manage TB event data and geometry data for common use by all
    Open3DPluginWindow instances. This is thread safe for simultaneous use by
    multiple browser clients with a multi-threaded web server. Read geometry
    data is cached in memory.

    Args:
        logdir (str): TensorBoard logs directory.
        cache_max_items (int): Max geometry elements to be cached in memory.
    """

    def __init__(self, logdir, cache_max_items=128):
        self.logdir = logdir
        self.event_mux = EventMultiplexer(tensor_size_guidance={
            metadata.PLUGIN_NAME: 0  # Store all metadata in RAM
        }).AddRunsFromDirectory(logdir)
        self._run_to_tags = {}
        self._event_lock = threading.Lock()  # Protect TB event file data
        # Geometry data reading
        self._tensor_events = dict()
        self.geometry_cache = LRUCache(max_items=cache_max_items)
        self._file_handles = {}  # {filename, (open_handle, read_lock)}
        self._file_handles_lock = threading.Lock()
        self.reload_events()

    def reload_events(self):
        """Reload event file"""
        self.event_mux.Reload()
        run_tags = self.event_mux.PluginRunToTagToContent(metadata.PLUGIN_NAME)
        with self._event_lock:
            self._run_to_tags = {
                run: list(tagdict.keys()) for run, tagdict in run_tags.items()
            }
            self._tensor_events = dict()  # Invalidate index
        # Close all open files
        with self._file_handles_lock:
            while len(self._file_handles) > 0:
                unused_filename, file_handle = self._file_handles.popitem()
                with file_handle[1]:
                    file_handle[0].close()

        _log.debug(f"Event data reloaded: {self._run_to_tags}")

    def is_active(self):
        """Do we have any Open3D data to display?"""
        with self._event_lock:
            return any(len(tags) > 0 for tags in self._run_to_tags.values())

    @property
    def run_to_tags(self):
        """Locked access to the run_to_tags map."""
        with self._event_lock:
            return self._run_to_tags

    def tensor_events(self, run):
        with self._event_lock:
            if run not in self._tensor_events:
                self._tensor_events[run] = {
                    tag: self.event_mux.Tensors(run, tag)
                    for tag in self._run_to_tags[run]
                }
            return self._tensor_events[run]

    def read_geometry(self, run, tag, step, batch_idx, step_to_idx):
        """Geometry reader from msgpack files.
        """
        idx = step_to_idx[step]
        metadata_proto = plugin_data_pb2.Open3DPluginData()
        run_tensor_events = self.tensor_events(run)
        metadata_proto.ParseFromString(
            run_tensor_events[tag][idx].tensor_proto.string_val[0])
        data_dir = PluginDirectory(os.path.join(self.logdir, run),
                                   metadata.PLUGIN_NAME)
        filename = os.path.join(data_dir, metadata_proto.batch_index.filename)
        read_location = metadata_proto.batch_index.start_size[batch_idx].start
        read_size = metadata_proto.batch_index.start_size[batch_idx].size
        read_masked_crc32c = metadata_proto.batch_index.start_size[
            batch_idx].masked_crc32c
        cache_key = (filename, read_location, read_size, run, tag, step,
                     batch_idx)
        geometry = self.geometry_cache.get(cache_key)
        if geometry is None:  # Read from storage
            with self._file_handles_lock:
                if filename not in self._file_handles:
                    self._file_handles[filename] = (_fileopen(filename, "rb"),
                                                    threading.Lock())
                    if not self._file_handles[filename][0].seekable():
                        raise RuntimeError(filename +
                                           " does not support seeking."
                                           " This storage is not supported.")
                # lock to seek + read
                file_handle = self._file_handles[filename]
                file_handle[1].acquire()

            file_handle[0].seek(read_location)
            buf = file_handle[0].read(read_size)
            file_handle[1].release()
            if not read_masked_crc32c == masked_crc32c(buf):
                raise IOError(f"Geometry {cache_key} reading failed! CRC "
                              "mismatch in msgpack data.")
            msg_tag, msg_step, geometry = o3d.io.rpc.data_buffer_to_meta_geometry(
                buf)
            if geometry is None:
                raise IOError(f"Geometry {cache_key} reading failed! Possible "
                              "msgpack or TensorFlow event file corruption.")
            if tag != msg_tag or step != msg_step:
                _log.warning(
                    f"Mismatch between TensorFlow event (tag={tag}, step={step})"
                    f" and msgpack (tag={msg_tag}, step={msg_step}) data. "
                    "Possible data corruption.")
            _log.debug(f"Geometry {cache_key} reading successful!")
            self.geometry_cache.put(cache_key, geometry)

        # Fill in properties by reference
        for prop_ref in metadata_proto.property_references:
            prop = plugin_data_pb2.Open3DPluginData.GeometryProperty.Name(
                prop_ref.geometry_property)
            if prop_ref.step_ref >= step:
                _log.warning(
                    f"Incorrect future step reference {prop_ref.step_ref} for"
                    f" property {prop} of geometry at step {step}. Ignoring.")
                continue
            geometry_ref = self.read_geometry(run, tag, prop_ref.step_ref,
                                              batch_idx, step_to_idx)
            # "vertex_normals" -> ["vertex", "normals"]
            prop_map, prop_attribute = prop.split("_")
            if prop_map == "vertex" and not isinstance(
                    geometry, o3d.t.geometry.TriangleMesh):
                prop_map = "point"
            # geometry.vertex["normals"] = geometry_ref.vertex["normals"]
            getattr(geometry, prop_map)[prop_attribute] = getattr(
                geometry_ref, prop_map)[prop_attribute]

        return geometry