Exemplo n.º 1
0
def initialize():
    tf.compat.v1.disable_eager_execution()

    options.initialize_with_logfiles(get_parser())
    logging.info(f'-- Starting --')
    logging.info(f'Host: {socket.gethostname()}')
    logging.info(f'Process id (pid): {os.getpid()}')

    if FLAGS.comment:
        logging.info(f'Comment: {FLAGS.comment}')
    logging.info(f'Raw command: {" ".join(map(shlex.quote, sys.argv))}')
    logging.info(f'Parsed flags: {FLAGS}')
    tfu.set_data_format(FLAGS.data_format)
    tfu.set_dtype(tf.float32 if FLAGS.dtype == 'float32' else tf.float16)

    if FLAGS.batch_size_test is None:
        FLAGS.batch_size_test = FLAGS.batch_size

    if FLAGS.checkpoint_dir is None:
        FLAGS.checkpoint_dir = FLAGS.logdir

    FLAGS.checkpoint_dir = util.ensure_absolute_path(
        FLAGS.checkpoint_dir, root=f'{paths.DATA_ROOT}/experiments')
    os.makedirs(FLAGS.checkpoint_dir, exist_ok=True)

    if not FLAGS.pred_path:
        FLAGS.pred_path = f'predictions_{FLAGS.dataset}.npz'
    base = os.path.dirname(
        FLAGS.load_path) if FLAGS.load_path else FLAGS.checkpoint_dir
    FLAGS.pred_path = util.ensure_absolute_path(FLAGS.pred_path, base)

    if FLAGS.bone_length_dataset is None:
        FLAGS.bone_length_dataset = FLAGS.dataset

    if FLAGS.load_path:
        if FLAGS.load_path.endswith('.index') or FLAGS.load_path.endswith(
                '.meta'):
            FLAGS.load_path = os.path.splitext(FLAGS.load_path)[0]
        FLAGS.load_path = util.ensure_absolute_path(FLAGS.load_path,
                                                    FLAGS.checkpoint_dir)

    # Override the default data format in slim layers
    enter_context(
        slim.arg_scope([
            slim.conv2d, slim.conv3d, slim.conv3d_transpose,
            slim.conv2d_transpose, slim.avg_pool2d, slim.separable_conv2d,
            slim.max_pool2d, slim.batch_norm, slim.spatial_softmax
        ],
                       data_format=tfu.data_format()))

    # Override default paddings to SAME
    enter_context(
        slim.arg_scope([slim.avg_pool2d, slim.max_pool2d], padding='SAME'))
    tf.compat.v2.random.set_seed(FLAGS.seed)
    if FLAGS.gui:
        plt.switch_backend('TkAgg')
Exemplo n.º 2
0
def initialize(args=None):
    options.initialize_with_logfiles(get_parser(), args)
    logger.info(f'-- Starting --')
    logger.info(f'Host: {socket.gethostname()}')
    logger.info(f'Process id (pid): {os.getpid()}')

    if FLAGS.comment:
        logger.info(f'Comment: {FLAGS.comment}')
    logger.info(f'Raw command: {" ".join(map(shlex.quote, sys.argv))}')
    logger.info(f'Parsed flags: {FLAGS}')
    tfu.set_data_format(FLAGS.data_format)
    tfu.set_dtype(tf.float32 if FLAGS.dtype == 'float32' else tf.float16)

    if FLAGS.batch_size_test is None:
        FLAGS.batch_size_test = FLAGS.batch_size

    if FLAGS.checkpoint_dir is None:
        FLAGS.checkpoint_dir = FLAGS.logdir

    FLAGS.checkpoint_dir = util.ensure_absolute_path(
        FLAGS.checkpoint_dir, root=f'{paths.DATA_ROOT}/experiments')
    os.makedirs(FLAGS.checkpoint_dir, exist_ok=True)

    if not FLAGS.pred_path:
        FLAGS.pred_path = f'predictions_{FLAGS.dataset}.npz'
    base = os.path.dirname(
        FLAGS.load_path) if FLAGS.load_path else FLAGS.checkpoint_dir
    FLAGS.pred_path = util.ensure_absolute_path(FLAGS.pred_path, base)

    if FLAGS.bone_length_dataset is None:
        FLAGS.bone_length_dataset = FLAGS.dataset

    if FLAGS.model_joints is None:
        FLAGS.model_joints = FLAGS.dataset

    if FLAGS.output_joints is None:
        FLAGS.output_joints = FLAGS.dataset

    if FLAGS.load_path:
        if FLAGS.load_path.endswith('.index') or FLAGS.load_path.endswith(
                '.meta'):
            FLAGS.load_path = os.path.splitext(FLAGS.load_path)[0]
        FLAGS.load_path = util.ensure_absolute_path(FLAGS.load_path,
                                                    FLAGS.checkpoint_dir)

    tf.random.set_seed(FLAGS.seed)
    if FLAGS.viz:
        plt.switch_backend('TkAgg')

    FLAGS.backbone = FLAGS.backbone.replace('_', '-')

    for gpu in tf.config.experimental.list_physical_devices('GPU'):
        tf.config.experimental.set_memory_growth(gpu, True)

    if FLAGS.dtype == 'float16':
        tf.keras.mixed_precision.set_global_policy('mixed_float16')
Exemplo n.º 3
0
def init_worker_process_flags(flags):
    import os
    os.environ['OMP_NUM_THREADS'] = '1'
    os.environ['KMP_INIT_AT_FORK'] = 'FALSE'
    from options import FLAGS
    for key in flags.__dict__:
        setattr(FLAGS, key, getattr(flags, key))
    import tfu
    tfu.set_data_format(FLAGS.data_format)
    init_worker_process()
Exemplo n.º 4
0
def initialize():
    global FLAGS
    parse_and_set_global_flags()
    setup_logging()
    logging.info(f'-- Starting --')
    logging.info(f'Host: {socket.gethostname()}')
    logging.info(f'Process id (pid): {os.getpid()}')

    if FLAGS.comment:
        logging.info(f'Comment: {FLAGS.comment}')
    logging.info(f'Raw command: {" ".join(map(shlex.quote, sys.argv))}')
    logging.info(f'Parsed flags: {FLAGS}')
    tfu.set_data_format(FLAGS.data_format)

    if FLAGS.dtype == 'float32':
        tfu.set_dtype(tf.float32)
    elif FLAGS.dtype == 'float16':
        tfu.set_dtype(tf.float16)
    else:
        raise ValueError(f'Training dtype {FLAGS.dtype} not supported, only float16/32.')

    # We parallelize on a coarser level already, openmp just makes things slower

    # Override the default data format in slim layers
    enter_context(slim.arg_scope(
        [slim.conv2d, slim.conv3d, slim.conv3d_transpose, slim.conv2d_transpose, slim.avg_pool2d,
         slim.separable_conv2d, slim.max_pool2d, slim.batch_norm, slim.spatial_softmax],
        data_format=tfu.data_format()))

    # Override default paddings to SAME
    enter_context(slim.arg_scope([slim.avg_pool2d, slim.max_pool2d], padding='SAME'))

    if FLAGS.gui:
        plt.switch_backend('TkAgg')

    tf.set_random_seed(FLAGS.seed)