Example #1
0
    def __init__(self, db_uri, db_connection_provider, purge_orphaned_data,
                 max_reload_threads):
        """Constructor for `DbImportMultiplexer`.

    Args:
      db_uri: A URI to the database file in use.
      db_connection_provider: Provider function for creating a DB connection.
      purge_orphaned_data: Whether to discard any events that were "orphaned" by
        a TensorFlow restart.
      max_reload_threads: The max number of threads that TensorBoard can use
        to reload runs. Each thread reloads one run at a time. If not provided,
        reloads runs serially (one after another).
    """
        logger.info("DbImportMultiplexer initializing for %s", db_uri)
        super(DbImportMultiplexer, self).__init__()
        self.db_uri = db_uri
        self.db_connection_provider = db_connection_provider
        self._purge_orphaned_data = purge_orphaned_data
        self._max_reload_threads = max_reload_threads
        self._event_sink = None
        self._run_loaders = {}

        if self._purge_orphaned_data:
            logger.warn(
                "--db_import does not yet support purging orphaned data")

        conn = self.db_connection_provider()
        # Set the DB in WAL mode so reads don't block writes.
        conn.execute("PRAGMA journal_mode=wal")
        conn.execute("PRAGMA synchronous=normal")  # Recommended for WAL mode
        sqlite_writer.initialize_schema(conn)
        logger.info("DbImportMultiplexer done initializing")
Example #2
0
  def __init__(self,
               db_connection_provider,
               purge_orphaned_data,
               max_reload_threads,
               use_import_op):
    """Constructor for `DbImportMultiplexer`.

    Args:
      db_connection_provider: Provider function for creating a DB connection.
      purge_orphaned_data: Whether to discard any events that were "orphaned" by
        a TensorFlow restart.
      max_reload_threads: The max number of threads that TensorBoard can use
        to reload runs. Each thread reloads one run at a time. If not provided,
        reloads runs serially (one after another).
      use_import_op: If True, use TensorFlow's import_event() op for imports,
        otherwise use TensorBoard's own sqlite ingestion logic.
    """
    tf.logging.info('DbImportMultiplexer initializing');
    self._db_connection_provider = db_connection_provider
    self._purge_orphaned_data = purge_orphaned_data
    self._max_reload_threads = max_reload_threads
    self._use_import_op = use_import_op
    self._event_sink = None
    self._run_loaders = {}

    if self._purge_orphaned_data:
      tf.logging.warning(
          '--db_import does not yet support purging orphaned data')

    conn = self._db_connection_provider()
    # Extract the file path of the DB from the DB connection.
    rows = conn.execute('PRAGMA database_list').fetchall()
    db_name_to_path = {row[1]: row[2] for row in rows}
    self._db_path = db_name_to_path['main']
    tf.logging.info('DbImportMultiplexer using db_path %s', self._db_path)
    # Set the DB in WAL mode so reads don't block writes.
    conn.execute('PRAGMA journal_mode=wal')
    conn.execute('PRAGMA synchronous=normal')  # Recommended for WAL mode
    sqlite_writer.initialize_schema(conn)
    tf.logging.info('DbImportMultiplexer done initializing')
Example #3
0
    def __init__(self, db_connection_provider, purge_orphaned_data,
                 max_reload_threads, use_import_op):
        """Constructor for `DbImportMultiplexer`.

    Args:
      db_connection_provider: Provider function for creating a DB connection.
      purge_orphaned_data: Whether to discard any events that were "orphaned" by
        a TensorFlow restart.
      max_reload_threads: The max number of threads that TensorBoard can use
        to reload runs. Each thread reloads one run at a time. If not provided,
        reloads runs serially (one after another).
      use_import_op: If True, use TensorFlow's import_event() op for imports,
        otherwise use TensorBoard's own sqlite ingestion logic.
    """
        tf.logging.info('DbImportMultiplexer initializing')
        self._db_connection_provider = db_connection_provider
        self._purge_orphaned_data = purge_orphaned_data
        self._max_reload_threads = max_reload_threads
        self._use_import_op = use_import_op
        self._event_sink = None
        self._run_loaders = {}

        if self._purge_orphaned_data:
            tf.logging.warning(
                '--db_import does not yet support purging orphaned data')

        conn = self._db_connection_provider()
        # Extract the file path of the DB from the DB connection.
        rows = conn.execute('PRAGMA database_list').fetchall()
        db_name_to_path = {row[1]: row[2] for row in rows}
        self._db_path = db_name_to_path['main']
        tf.logging.info('DbImportMultiplexer using db_path %s', self._db_path)
        # Set the DB in WAL mode so reads don't block writes.
        conn.execute('PRAGMA journal_mode=wal')
        conn.execute('PRAGMA synchronous=normal')  # Recommended for WAL mode
        sqlite_writer.initialize_schema(conn)
        tf.logging.info('DbImportMultiplexer done initializing')