コード例 #1
0
ファイル: main.py プロジェクト: soychanq/alf
def main():
    logging.use_absl_handler()
    logging.set_verbosity(logging.INFO)

    if torch.cuda.is_available():
        torch.set_default_tensor_type(torch.cuda.FloatTensor)
    unittest.main()
コード例 #2
0
def set_logging_format():
    fmt = "%(levelname)s [%(hostname)s %(hostip)s] %(asctime)-8s: %(message)s"
    date_fmt = "%b %d %H:%M:%S"
    formatter = normal_logging.Formatter(fmt, date_fmt)
    handler = logging.get_absl_handler()
    handler.setFormatter(formatter)
    handler.addFilter(HostnameFilter())
    logging.use_absl_handler()
コード例 #3
0
ファイル: cache.py プロジェクト: pytorch/kineto
 def __setstate__(self, state):
     """The default logging level in new process is warning. Only warning and error log can be written to
     streams.
     So, we need call use_absl_handler in the new process.
     """
     from absl import logging
     logging.use_absl_handler()
     logger.debug('Cache.__setstate__ %s ' % (state, ))
     data, file._REGISTERED_FILESYSTEMS = state
     self.__dict__.update(data)
コード例 #4
0
def main():
    FLAGS(sys.argv)
    if not FLAGS.manual:
        return False

    logging.use_absl_handler()
    logging.set_verbosity(logging.INFO)
    env = suite_carla.CarlaEnvironment(batch_size=1,
                                       map_name='Town01',
                                       num_other_vehicles=20,
                                       num_walkers=20)
    try:
        play(env)
    finally:
        env.close()
    return True
コード例 #5
0
ファイル: app.py プロジェクト: bazelbuild/bazel
def _run_init(argv):
  """Does one-time initialization and re-parses flags on rerun."""
  if _run_init.done:
    return parse_flags_with_usage(argv)
  command_name.make_process_name_useful()
  # Set up absl logging handler.
  logging.use_absl_handler()
  argv = register_and_parse_flags_with_usage(argv=argv)
  if faulthandler:
    try:
      faulthandler.enable()
    except Exception:  # pylint: disable=broad-except
      # Some tests verify stderr output very closely, so don't print anything.
      # Disabled faulthandler is a low-impact error.
      pass
  _run_init.done = True
  return argv
コード例 #6
0
def _run_init(argv):
    """Does one-time initialization and re-parses flags on rerun."""
    if _run_init.done:
        return parse_flags_with_usage(argv)
    command_name.make_process_name_useful()
    # Set up absl logging handler.
    logging.use_absl_handler()
    argv = _register_and_parse_flags_with_usage(argv=argv)
    if faulthandler:
        try:
            faulthandler.enable()
        except Exception:  # pylint: disable=broad-except
            # Some tests verify stderr output very closely, so don't print anything.
            # Disabled faulthandler is a low-impact error.
            pass
    _run_init.done = True
    return argv
コード例 #7
0
def initialize_logging(log_dir: Optional[str] = None) -> None:
    """Initialize logging system"""
    # log_dir is a gflag defined in absl.logging
    if log_dir is None:
        log_dir = FLAGS["log_dir"].value
    if not log_dir:
        log_dir = CWD_LOG_DIR
    FLAGS["log_dir"].value = log_dir

    # mkdir -p ${log_dir}
    Path(log_dir).mkdir(parents=True, exist_ok=True)

    handler = log.get_absl_handler()
    handler.use_absl_log_file(log_dir=log_dir)
    log.use_absl_handler()

    # set alsologtostderr as default behavior.
    if not FLAGS["alsologtostderr"].present:
        FLAGS["alsologtostderr"].value = True

    # default verbosity to INFO
    if not FLAGS["verbosity"].present:
        FLAGS["verbosity"].value = 0
        logging.root.setLevel(logging.INFO)
コード例 #8
0
    assert p.testset_config is not None
    dataset_builder = SUPPORTED_DATASET_BUILDER[p.dataset_builder](
        p.testset_config)
    dataset_builder.shard(rank_size, rank)
    logging.info("shard result: %d" % len(dataset_builder))

    inference_solver = SOLVERS[p.solver_type]
    solver = inference_solver(model,
                              dataset_builder,
                              config=p.inference_config)
    solver.inference(dataset_builder.as_dataset(batch_size=1),
                     rank_size=rank_size)


if __name__ == "__main__":
    logging.use_absl_handler()
    flags.FLAGS.mark_as_parsed()
    logging.get_absl_handler().python_handler.stream = open(
        "inference.log", "w")
    logging.set_verbosity(logging.INFO)
    if len(sys.argv) < 2:
        logging.warning('Usage: python {} config_json_file'.format(
            sys.argv[0]))
        sys.exit()
    tf.random.set_seed(1)

    jsonfile = sys.argv[1]
    with open(jsonfile) as file:
        config = json.load(file)
    p = parse_config(config)