Exemple #1
0
flags.DEFINE_integer('num_eval_episodes', 10,
                     'The number of episodes to run eval on.')
flags.DEFINE_integer('eval_interval', 10000,
                     'Run eval every eval_interval train steps')
flags.DEFINE_boolean('eval_only', False,
                     'Whether to run evaluation only on trained checkpoints')
flags.DEFINE_boolean('eval_deterministic', False,
                     'Whether to run evaluation using a deterministic policy')
flags.DEFINE_integer('gpu_c', 0, 'GPU id for compute, e.g. Tensorflow.')

# Added for Gibson
flags.DEFINE_string('config_file', '../test/test.yaml',
                    'Config file for the experiment.')
flags.DEFINE_list(
    'model_ids', None,
    'A comma-separated list of model ids to overwrite config_file.'
    'len(model_ids) == num_parallel_environments')
flags.DEFINE_list(
    'model_ids_eval', None,
    'A comma-separated list of model ids to overwrite config_file for eval.'
    'len(model_ids) == num_parallel_environments_eval')
flags.DEFINE_float('collision_reward_weight', 0.0, 'collision reward weight')
flags.DEFINE_string('env_mode', 'headless',
                    'Mode for the simulator (gui or headless)')
flags.DEFINE_string('env_type', 'gibson',
                    'Type for the Gibson environment (gibson or ig)')
flags.DEFINE_float('action_timestep', 1.0 / 10.0,
                   'Action timestep for the simulator')
flags.DEFINE_float('physics_timestep', 1.0 / 40.0,
                   'Physics timestep for the simulator')
flags.DEFINE_integer('gpu_g', 0, 'GPU id for graphics, e.g. Gibson.')
Exemple #2
0
from environment.GoEnv import Go
import time, os
import numpy as np
from agent.agent import RandomAgent
import tensorflow as tf

FLAGS = flags.FLAGS

flags.DEFINE_integer("num_train_episodes", 10,
                     "Number of training episodes for each base policy.")
flags.DEFINE_integer("num_eval", 10, "Number of evaluation episodes")
flags.DEFINE_integer("eval_every", 2000,
                     "Episode frequency at which the agents are evaluated.")
flags.DEFINE_integer("learn_every", 128,
                     "Episode frequency at which the agents are evaluated.")
flags.DEFINE_list("hidden_layers_sizes", [128],
                  "Number of hidden units in the avg-net and Q-net.")
flags.DEFINE_integer("replay_buffer_capacity", int(2e5),
                     "Size of the replay buffer.")
flags.DEFINE_integer("reservoir_buffer_capacity", int(2e6),
                     "Size of the reservoir buffer.")


def main(unused_argv):
    begin = time.time()
    env = Go()
    agents = [RandomAgent(idx) for idx in range(2)]

    for ep in range(FLAGS.num_eval):
        time_step = env.reset()
        while not time_step.last():
            player_id = time_step.observations["current_player"]
Exemple #3
0
import datetime
import os
import traceback
import string
import torch
import random

import gym_psketch.bots.model_bot
import imitation as IL
import generate_demo as demo
from taco import train_taco
from compile import train_compile

FLAGS = flags.FLAGS
flags.DEFINE_list('envs',
                  default=['makebedfull-v0'],
                  help='List of env to train. Use comma spearated')
flags.DEFINE_list('test_envs', default=[], help='Extra test envs')
flags.DEFINE_integer('max_steps', default=64, help='maximum environment steps')
flags.DEFINE_integer('width', default=10, help='width of env')
flags.DEFINE_integer('height', default=10, help='height of env')
flags.DEFINE_enum('arch',
                  enum_values=['mlp', 'lstm', 'omstack'],
                  default='lstm',
                  help='Architecture')
flags.DEFINE_integer('eval_episodes',
                     default=30,
                     help='Evaluation episode number')

# Model
flags.DEFINE_integer('hidden_size', default=64, help='model hidden size')
    'not be included in the vocabulary.')
flags.DEFINE_float('sample', 1e-3, 'Subsampling rate.')
flags.DEFINE_integer('window_size', 10, 'Num of words on the left or right side'
    ' of target word within a window.')

flags.DEFINE_integer('hidden_size', 300, 'Length of word vector.')
flags.DEFINE_integer('negatives', 5, 'Num of negative words to sample.')
flags.DEFINE_float('power', 0.75, 'Distortion for negative sampling.')
flags.DEFINE_float('alpha', 0.025, 'Initial learning rate.')
flags.DEFINE_float('min_alpha', 0.0001, 'Final learning rate.')
flags.DEFINE_boolean('add_bias', True, 'Whether to add bias term to dotproduct '
    'between syn0 and syn1 vectors.')

flags.DEFINE_integer('log_per_steps', 10000, 'Every `log_per_steps` steps to '
    ' log the value of loss to be minimized.')
flags.DEFINE_list(
    'filenames', None, 'Names of comma-separated input text files.')
flags.DEFINE_string('in_dir', None, 'Input directory.')
flags.DEFINE_string('out_dir', '/tmp/word2vec', 'Output directory.')

FLAGS = flags.FLAGS

def get_files_from_dir(in_dir):
  if in_dir is None:
    return None
  else:
    in_files = []
    for f in os.listdir(in_dir):
      in_files.append(os.path.join(in_dir, f))
    return in_files

def main(_):  
Exemple #5
0
def define_base(data_dir=True,
                model_dir=True,
                clean=True,
                train_epochs=True,
                epochs_between_evals=True,
                stop_threshold=True,
                batch_size=True,
                num_gpu=True,
                hooks=True,
                export_dir=True):
    """Register base flags.

  Args:
    data_dir: Create a flag for specifying the input input directory.
    model_dir: Create a flag for specifying the model file directory.
    train_epochs: Create a flag to specify the number of training epochs.
    epochs_between_evals: Create a flag to specify the frequency of testing.
    stop_threshold: Create a flag to specify a threshold accuracy or other
      eval metric which should trigger the end of training.
    batch_size: Create a flag to specify the batch size.
    num_gpu: Create a flag to specify the number of GPUs used.
    hooks: Create a flag to specify hooks for logging.
    export_dir: Create a flag to specify where a SavedModel should be exported.

  Returns:
    A list of flags for core.py to marks as key flags.
  """
    key_flags = []

    if data_dir:
        flags.DEFINE_string(name="data_dir",
                            short_name="dd",
                            default="/tmp",
                            help=help_wrap("The location of the input input."))
        key_flags.append("data_dir")

    if model_dir:
        flags.DEFINE_string(
            name="model_dir",
            short_name="md",
            default="/tmp",
            help=help_wrap("The location of the model checkpoint files."))
        key_flags.append("model_dir")

    if clean:
        flags.DEFINE_boolean(
            name="clean",
            default=False,
            help=help_wrap("If set, model_dir will be removed if it exists."))
        key_flags.append("clean")

    if train_epochs:
        flags.DEFINE_integer(
            name="train_epochs",
            short_name="te",
            default=1,
            help=help_wrap("The number of epochs used to train."))
        key_flags.append("train_epochs")

    if epochs_between_evals:
        flags.DEFINE_integer(
            name="epochs_between_evals",
            short_name="ebe",
            default=1,
            help=help_wrap("The number of training epochs to run between "
                           "evaluations."))
        key_flags.append("epochs_between_evals")

    if stop_threshold:
        flags.DEFINE_float(
            name="stop_threshold",
            short_name="st",
            default=None,
            help=help_wrap("If passed, training will stop at the earlier of "
                           "train_epochs and when the evaluation metric is  "
                           "greater than or equal to stop_threshold."))

    if batch_size:
        flags.DEFINE_integer(
            name="batch_size",
            short_name="bs",
            default=32,
            help=help_wrap(
                "Batch size for training and evaluation. When using "
                "multiple gpus, this is the global batch size for "
                "all devices. For example, if the batch size is 32 "
                "and there are 4 GPUs, each GPU will get 8 examples on "
                "each step."))
        key_flags.append("batch_size")

    if num_gpu:
        flags.DEFINE_integer(
            name="num_gpus",
            short_name="ng",
            default=1 if tf.test.is_gpu_available() else 0,
            help=help_wrap(
                "How many GPUs to use with the DistributionStrategies API. The "
                "default is 1 if TensorFlow can detect a GPU, and 0 otherwise."
            ))

    if hooks:
        # Construct a pretty summary of hooks.
        hook_list_str = (u"\ufeff  Hook:\n" + u"\n".join(
            [u"\ufeff    {}".format(key) for key in hooks_helper.HOOKS]))
        flags.DEFINE_list(
            name="hooks",
            short_name="hk",
            default="LoggingTensorHook",
            help=help_wrap(
                u"A list of (case insensitive) strings to specify the names of "
                u"training hooks.\n{}\n\ufeff  Example: `--hooks ProfilerHook,"
                u"ExamplesPerSecondHook`\n See official.utils.logs.hooks_helper "
                u"for details.".format(hook_list_str)))
        key_flags.append("hooks")

    if export_dir:
        flags.DEFINE_string(
            name="export_dir",
            short_name="ed",
            default=None,
            help=help_wrap(
                "If set, a SavedModel serialization of the model will "
                "be exported to this directory at the end of training. "
                "See the README for more details and relevant links."))
        key_flags.append("export_dir")

    return key_flags
Exemple #6
0
                   'ensemble members.')
flags.DEFINE_integer('width_multiplier', 10, 'Integer to multiply the number of'
                     'typical filters by. "k" in ResNet-n-k.')
flags.DEFINE_integer('per_core_batch_size', 64, 'Batch size per TPU core/GPU.')
flags.DEFINE_integer('batch_repetitions', 4, 'Number of times an example is'
                     'repeated in a training batch. More repetitions lead to'
                     'lower variance gradients and increased training time.')
flags.DEFINE_integer('seed', 0, 'Random seed.')
flags.DEFINE_float('base_learning_rate', 0.1,
                   'Base learning rate when total training batch size is 128.')
flags.DEFINE_integer(
    'lr_warmup_epochs', 1,
    'Number of epochs for a linear warmup to the initial '
    'learning rate. Use 0 to do no warmup.')
flags.DEFINE_float('lr_decay_ratio', 0.2, 'Amount to decay learning rate.')
flags.DEFINE_list('lr_decay_epochs', ['80', '160', '180'],
                  'Epochs to decay learning rate by.')
flags.DEFINE_float('l2', 3e-4, 'L2 coefficient.')
flags.DEFINE_enum(
    'dataset', 'cifar10', enum_values=['cifar10', 'cifar100'], help='Dataset.')
flags.DEFINE_string(
    'cifar100_c_path', None,
    'Path to the TFRecords files for CIFAR-100-C. Only valid '
    '(and required) if dataset is cifar100 and corruptions.')
flags.DEFINE_integer(
    'corruptions_interval', 250,
    'Number of epochs between evaluating on the corrupted '
    'test data. Use -1 to never evaluate.')
flags.DEFINE_integer(
    'checkpoint_interval', -1,
    'Number of epochs between saving checkpoints. Use -1 to '
    'never save checkpoints.')
"""Pose embedding model training base code."""

import math

from absl import flags
import tensorflow.compat.v1 as tf
import tf_slim

from poem.core import data_utils
from poem.core import keypoint_utils
from poem.core import loss_utils
from poem.core import pipeline_utils

FLAGS = flags.FLAGS

flags.DEFINE_list('input_table', None,
                  'CSV of input tf.Example training table patterns.')
flags.mark_flag_as_required('input_table')

flags.DEFINE_string('train_log_dir', None,
                    'Directory to save checkpoints and summary logs.')
flags.mark_flag_as_required('train_log_dir')

flags.DEFINE_string('input_keypoint_profile_name_3d', 'LEGACY_3DH36M17',
                    'Profile name for input 3D keypoints.')

flags.DEFINE_string(
    'input_keypoint_profile_name_2d', 'LEGACY_2DCOCO13',
    'Profile name for 2D keypoints from input sources. Use None to ignore input'
    ' 2D keypoints.')

# See `common_module.SUPPORTED_TRAINING_MODEL_INPUT_KEYPOINT_TYPES`.
Exemple #8
0
# ============================================================================
"""Rewrite script for TF->JAX."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import collections

# Dependency imports
from absl import app
from absl import flags

flags.DEFINE_boolean('numpy_to_jax', False,
                     'Whether or not to rewrite numpy imports to jax.numpy')
flags.DEFINE_list('omit_deps', [], 'List of build deps being omitted.')

FLAGS = flags.FLAGS

TF_REPLACEMENTS = {
    'import tensorflow ':
        'from tensorflow_probability.python.internal.backend import numpy ',
    'import tensorflow.compat.v1':
        'from tensorflow_probability.python.internal.backend.numpy.compat '
        'import v1',
    'import tensorflow.compat.v2':
        'from tensorflow_probability.python.internal.backend.numpy.compat '
        'import v2',
    'import tensorflow_probability as tfp':
        'import tensorflow_probability as tfp; '
        'tfp = tfp.substrates.numpy',
Exemple #9
0
from tensor2robot.models import tpu_model_wrapper
from tensor2robot.utils import tensorspec_utils
import tensorflow as tf  # tf

from typing import Any, Callable, Dict, List, Optional, Text

EXPORTER_FN = Callable[[
    model_interface.ModelInterface, abstract_export_generator.
    AbstractExportGenerator
], List[tf.estimator.Exporter]]

FLAGS = flags.FLAGS

try:
    flags.DEFINE_list(
        'gin_configs', None, 'A comma-separated list of paths to Gin '
        'configuration files.')
    flags.DEFINE_multi_string(
        'gin_bindings', [],
        'A newline separated list of Gin parameter bindings.')
except flags.DuplicateFlagError:
    pass

gin_configurable_eval_spec = gin.external_configurable(
    tf.estimator.EvalSpec, name='tf.estimator.EvalSpec')


def print_spec(tensor_spec):
    """Iterate over a spec and print its values in sorted order.

  Args:
Exemple #10
0
import tensorflow_probability as tfp
from tensorflow_probability.python.layers import util as tfp_layers_util

from meta_learning_without_memorization.pose_code.maml_bbb_2 import MAML

FLAGS = flags.FLAGS

## Dataset/method options
flags.DEFINE_string('datasource', 'pose',
                    'sinusoid or omniglot or miniimagenet')
flags.DEFINE_integer('dim_w', 196, 'dimension of w')
flags.DEFINE_integer('dim_im', 128, 'dimension of image')
flags.DEFINE_integer('dim_y', 1, 'dimension of w')
flags.DEFINE_string('data_dir', None, 'Directory of data files.')
get_data_dir = lambda: FLAGS.data_dir
flags.DEFINE_list('data', ['train_data_ins.pkl', 'val_data_ins.pkl'],
                  'data name')

## Training options
flags.DEFINE_float('beta', 1e-3, 'beta for IB')
flags.DEFINE_integer(
    'num_classes', 1,
    'number of classes used in classification (e.g. 5-way classification).')
flags.DEFINE_integer(
    'update_batch_size', 15,
    'number of examples used for inner gradient update (K for K-shot learning).'
)
flags.DEFINE_integer('num_updates', 5,
                     'number of inner gradient updates during training.')
flags.DEFINE_integer('meta_batch_size', 10,
                     'number of tasks sampled per meta-update')
flags.DEFINE_integer('test_num_updates', 20,
    def test_write_help_in_xmlformat(self):
        fv = flags.FlagValues()
        # Since these flags are defined by the top module, they are all key.
        flags.DEFINE_integer('index', 17, 'An integer flag', flag_values=fv)
        flags.DEFINE_integer('nb_iters',
                             17,
                             'An integer flag',
                             lower_bound=5,
                             upper_bound=27,
                             flag_values=fv)
        flags.DEFINE_string('file_path',
                            '/path/to/my/dir',
                            'A test string flag.',
                            flag_values=fv)
        flags.DEFINE_boolean('use_gpu',
                             False,
                             'Use gpu for performance.',
                             flag_values=fv)
        flags.DEFINE_enum('cc_version',
                          'stable', ['stable', 'experimental'],
                          'Compiler version to use.',
                          flag_values=fv)
        flags.DEFINE_list('files',
                          'a.cc,a.h,archive/old.zip',
                          'Files to process.',
                          flag_values=fv)
        flags.DEFINE_list('allow_users', ['alice', 'bob'],
                          'Users with access.',
                          flag_values=fv)
        flags.DEFINE_spaceseplist('dirs',
                                  'src libs bins',
                                  'Directories to create.',
                                  flag_values=fv)
        flags.DEFINE_multi_string('to_delete', ['a.cc', 'b.h'],
                                  'Files to delete',
                                  flag_values=fv)
        flags.DEFINE_multi_integer('cols', [5, 7, 23],
                                   'Columns to select',
                                   flag_values=fv)
        flags.DEFINE_multi_enum('flavours', ['APPLE', 'BANANA'],
                                ['APPLE', 'BANANA', 'CHERRY'],
                                'Compilation flavour.',
                                flag_values=fv)
        # Define a few flags in a different module.
        module_bar.define_flags(flag_values=fv)
        # And declare only a few of them to be key.  This way, we have
        # different kinds of flags, defined in different modules, and not
        # all of them are key flags.
        flags.declare_key_flag('tmod_bar_z', flag_values=fv)
        flags.declare_key_flag('tmod_bar_u', flag_values=fv)

        # Generate flag help in XML format in the StringIO sio.
        sio = io.StringIO() if six.PY3 else io.BytesIO()
        fv.write_help_in_xml_format(sio)

        # Check that we got the expected result.
        expected_output_template = EXPECTED_HELP_XML_START
        main_module_name = sys.argv[0]
        module_bar_name = module_bar.__name__

        if main_module_name < module_bar_name:
            expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MAIN_MODULE
            expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MODULE_BAR
        else:
            expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MODULE_BAR
            expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MAIN_MODULE

        expected_output_template += EXPECTED_HELP_XML_END

        # XML representation of the whitespace list separators.
        whitespace_separators = _list_separators_in_xmlformat(
            string.whitespace, indent='    ')
        expected_output = (expected_output_template % {
            'basename_of_argv0': os.path.basename(sys.argv[0]),
            'usage_doc': sys.modules['__main__'].__doc__,
            'main_module_name': main_module_name,
            'module_bar_name': module_bar_name,
            'whitespace_separators': whitespace_separators
        })

        actual_output = sio.getvalue()
        self.assertMultiLineEqual(expected_output, actual_output)

        # Also check that our result is valid XML.  minidom.parseString
        # throws an xml.parsers.expat.ExpatError in case of an error.
        xml.dom.minidom.parseString(actual_output)
from __future__ import print_function

import importlib
import inspect
import re

# Dependency imports

from absl import app
from absl import flags

FLAGS = flags.FLAGS

flags.DEFINE_string('module_name', '', 'TF linalg module to transform')
flags.DEFINE_list(
    'whitelist', '',
    'TF linalg module whitelist (other imports will be commented-out)')

MODULE_MAPPINGS = {
    'framework import dtypes': 'dtype as dtypes',
    'framework import errors': 'errors',
    'framework import ops': 'ops',
    'framework import tensor_shape': 'ops as tensor_shape',
    'module import module': 'ops as module',
    'ops import array_ops': 'numpy_array as array_ops',
    'ops import check_ops': 'debugging as check_ops',
    'ops.signal import fft_ops': 'numpy_signal as fft_ops',
    'ops import control_flow_ops': 'control_flow as control_flow_ops',
    'ops import linalg_ops': 'linalg_impl as linalg_ops',
    'ops import math_ops': 'numpy_math as math_ops',
    'ops import variables as variables_module': 'ops as variables_module',
from threading import Thread
from time import time
from typing import List

import humanize
from absl import app, flags

import compiler_gym.util.flags.output_dir  # noqa Flag definition.
from compiler_gym.envs import CompilerEnv
from compiler_gym.util.flags.benchmark_from_flags import benchmark_from_flags
from compiler_gym.util.flags.env_from_flags import env_from_flags
from compiler_gym.util.logs import create_logging_dir

flags.DEFINE_list(
    "actions",
    [],
    "A list of action names to enumerate. If not provided, all actions are used "
    "(warning: this might make a long time!)",
)
flags.DEFINE_integer("episode_length", 5, "The number of steps in each episode.")
flags.DEFINE_integer(
    "nproc", cpu_count(), "The number of parallel worker threads to run."
)

FLAGS = flags.FLAGS


def grouper(iterable, n):
    """Split an iterable into chunks of length `n`, padded if required."""
    args = [iter(iterable)] * n
    return itertools.zip_longest(*args, fillvalue=None)
from dm_c19_modelling.england_data import dataset_merge_util
from dm_c19_modelling.england_data import error_reporting_util
import pandas as pd

FLAGS = flags.FLAGS

flags.DEFINE_string(
    "scrape_date", None, "Enter in the form YYYYMMDD, eg. "
    "November 5, 2020 would be '20201105'. If you want the "
    "latest date, enter 'latest'.")
flags.DEFINE_string("input_directory", None,
                    "The directory to read the standardized data .csvs from.")
flags.DEFINE_string("output_directory", None,
                    "The directory to write the merged data .csv to.")
flags.DEFINE_list(
    "names", None, "List of names: "
    f"{', '.join([data_type.value for data_type in constants.DataTypes])}")
flags.mark_flag_as_required("scrape_date")
flags.mark_flag_as_required("input_directory")
flags.mark_flag_as_required("output_directory")


@error_reporting_util.report_exception
def main(argv):
    if len(argv) > 1:
        raise app.UsageError("Too many command-line arguments.")

    scrape_date = FLAGS.scrape_date
    if scrape_date == "latest":
        logging.info("Merging data for 'latest' scrape date")
        scrape_date_dirname = max(os.listdir(FLAGS.input_directory))
Exemple #15
0
import config
import datagen


"""
This script trains a model on triplets.
Example usage: 
    python train.py --save_to progress/test --num_epochs 10 --batch_size 8 --model mobilenet_v2 --input_size 224,224 --jitter --overwrite
"""

flags.DEFINE_string('save_to', config.SAVE_TO_DEFAULT, 'directory to save checkpoints and logs')
flags.DEFINE_boolean('overwrite', False, 'Overwrite given save path')
flags.DEFINE_string('from_ckpt', None, 'path to continue training on checkpoint')
flags.DEFINE_boolean('jitter', config.JITTER_DEFAULT, 'Apply image augmentation')
flags.DEFINE_integer('batch_size', config.BATCH_SIZE_DEFAULT, 'batch size')
flags.DEFINE_list('input_size', config.INPUT_SIZE_DEFAULT, 'input size in (width, height) format')
flags.DEFINE_boolean('keep_aspect_ratio', config.KEEP_ASPECT_RATIO_DEFAULT, 'keep aspect ratio when resizing patches')
flags.DEFINE_list('loss_weights',config.LOSS_WEIGHTS_DEFAULT, 'loss weights size in (w_dimension, w_orientation, w_confidence) format')
flags.DEFINE_integer('num_bins', config.NUM_BINS_DEFAULT, 'numebr of bins used in orientation regression')
flags.DEFINE_integer('num_epochs', config.NUM_EPOCHS_DEFAULT, 'number of epochs')
flags.DEFINE_string('model', config.MODEL_DEFAULT, 'integer model type - %s'%str(config.MODELS.keys()))

def main(_argv):
    assert not ((FLAGS.overwrite) and (FLAGS.from_ckpt is not None))
    input_size = list(map(int, FLAGS.input_size)) # (width, height)
    input_shape = (input_size[1], input_size[0], 3)
    loss_weights = list(map(float, FLAGS.loss_weights))

    # Load data
    logging.info("Loading data")
    kitti_reader = reader.KittiReader()
from absl import app
from absl import flags

import tensorflow.compat.v2 as tf

from non_semantic_speech_benchmark.eval_embedding.finetune import get_data
from non_semantic_speech_benchmark.eval_embedding.finetune import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')
flags.DEFINE_string('label_key', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('tbs', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('train_batch_size', 'tbs')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')

flags.DEFINE_integer('nc', None, 'num_clusters')
flags.DEFINE_alias('num_clusters', 'nc')
flags.DEFINE_float('alpha_init', None, 'Initial autopool alpha.')
flags.DEFINE_alias('ai', 'alpha_init')
flags.DEFINE_boolean('ubn', None, 'Whether to use batch normalization.')
flags.DEFINE_alias('use_batch_normalization', 'ubn')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')

flags.DEFINE_string('logdir', None,
                    'Path to directory where to store summaries.')
Exemple #17
0
import re
from absl import app
from absl import flags
import pandas as pd
import tensorflow as tf
from tensorflow import feature_column as fc
from tensorflow_lattice import configs
from tensorflow_lattice import estimators

FLAGS = flags.FLAGS
flags.DEFINE_float('learning_rate', 0.1, 'Learning rate.')
flags.DEFINE_integer('batch_size', 100, 'Batch size.')
flags.DEFINE_integer('num_epochs', 50, 'Number of training epoch.')
flags.DEFINE_integer('prefitting_num_epochs', 10, 'Prefitting epochs.')
flags.DEFINE_list(
    'config_updates', '',
    'Comma separated list of updates to model configs in name=value format.'
    'See tfl.configs.apply_updates().')


def main(_):
    # Parse configs updates from command line flags.
    config_updates = []
    for update in FLAGS.config_updates:
        config_updates.extend(re.findall(r'(\S*)\s*=\s*(\S*)', update))

    # UCI Statlog (Heart) dataset.
    csv_file = tf.keras.utils.get_file(
        'heart.csv', 'http://storage.googleapis.com/applied-dl/heart.csv')
    df = pd.read_csv(csv_file)
    target = df.pop('target')
    train_size = int(len(df) * 0.8)
Exemple #18
0
flags.DEFINE_string("train_file", None, "Path to processed training file")

flags.DEFINE_integer("num_train_examples", None,
                     "Number of examples used for training the model.")

flags.DEFINE_string(
    "bert_config_file", None,
    "The config json file corresponding to the pre-trained BERT model. "
    "This specifies the model architecture.")

flags.DEFINE_string("vocab_file", None,
                    "The vocabulary file that the BERT model was trained on.")

flags.DEFINE_list("label_list", [
    '[PAD]', 'B-product_characteristics', 'U-product_color',
    'U-product_category', 'L-product_characteristics', 'U-product_brand', '-',
    'U-product_characteristics', 'B-product_brand', 'L-product_brand', 'O',
    'X', '[CLS]', '[SEP]'
], "labels used in preprocessing.")

flags.DEFINE_string(
    "output_dir", None,
    "The output directory where the model checkpoints will be written.")

## Other parameters

flags.DEFINE_string(
    "init_checkpoint", None,
    "Initial checkpoint (usually from a pre-trained BERT model).")

# if you download cased checkpoint you should use "False",if uncased you should use
# "True"
Exemple #19
0
from absl import flags

# Detection flags.
flags.DEFINE_list(
    'obstacle_detection_model_paths',
    'dependencies/models/obstacle_detection/faster-rcnn/frozen_inference_graph.pb',  # noqa: E501
    'Comma-separated list of model paths')
flags.DEFINE_list('obstacle_detection_model_names', 'faster-rcnn',
                  'Comma-separated list of model names')
flags.DEFINE_float(
    'obstacle_detection_gpu_memory_fraction', 0.3,
    'GPU memory fraction allocated to each obstacle detector operator')
flags.DEFINE_integer('obstacle_detection_gpu_index', 0,
                     'The index of the GPU to deploy the model on')
flags.DEFINE_float('obstacle_detection_min_score_threshold', 0.5,
                   'Min score threshold for bounding box')
flags.DEFINE_string('path_coco_labels', 'dependencies/models/pylot.names',
                    'Path to the COCO labels')
flags.DEFINE_float('dynamic_obstacle_distance_threshold', 30.0,
                   'Max distance to consider dynamic obstacles [m]')
flags.DEFINE_float(
    'static_obstacle_distance_threshold', 70.0,
    'Max distance to consider static obstacles (e.g., traffic lights) [m]')

# Traffic light detector flags.
flags.DEFINE_string(
    'traffic_light_det_model_path',
    'dependencies/models/traffic_light_detection/faster-rcnn/frozen_inference_graph.pb',  # noqa: E501
    'Path to the traffic light model protobuf')
flags.DEFINE_float('traffic_light_det_min_score_threshold', 0.3,
                   'Min score threshold for bounding box')
Exemple #20
0
from collections import defaultdict

from absl import app
from absl import flags
from absl import logging

import h5py
import numpy as np
import tensorflow as tf

FLAGS = flags.FLAGS

flags.DEFINE_list(
    'partition_volumes', None, 'Partition volumes as '
    '<volume_name>:<volume_path>:<dataset>, where volume_path '
    'points to a HDF5 volume, and <volume_name> is an arbitrary '
    'label that will have to also be used during training.')
flags.DEFINE_string(
    'coordinate_output', None, 'Path to a TF Record file in which to save the '
    'coordinates.')
flags.DEFINE_list(
    'margin', None, '(z, y, x) tuple specifying the '
    'number of voxels adjacent to the border of the volume to '
    'exclude from sampling. This should normally be set to the '
    'radius of the FFN training FoV (i.e. network FoV radius '
    '+ deltas.')

IGNORE_PARTITION = 255

Exemple #21
0
FLAGS = flags.FLAGS
flags.DEFINE_string('avd', None,
                    'The avd to test (or pick first available from emulator)')
flags.DEFINE_string(
    'avd_root', None,
    'The ANDROID_SDK_HOME to use, or existing environment var if not set.')
flags.DEFINE_string(
    'sdk_root', None,
    'The ANDROID_SDK_ROOT to use, or existing environment var if not set.')
flags.DEFINE_string('emulator', None, 'Emulator executable')
flags.DEFINE_integer('maxboot', 120, 'Maximum time to start emulator')
flags.DEFINE_string('stackwalker', None,
                    'Breakpad minidump_stackwalker executable')
flags.DEFINE_string('symbol_src', None, 'Location of the .sym files')
flags.DEFINE_list('args', [],
                  'Additional parameters to pass to the emulator launcher')
ext = ''
if platform.system().lower() == 'windows':
    ext = '.exe'


def crash_emulator(connection):
    """Send the crash command to the emulator.

       This should cause an immediate crash, which will cause
       a disconnect, so the while loop should exit immediately.

       Note: This will NOT WORK with emulators that are not build
       with crash support!!
    """
    while connection.is_connected():
Exemple #22
0
# Optimization and evaluation flags
flags.DEFINE_integer('seed', 8, 'Random seed.')
flags.DEFINE_integer('per_core_batch_size', 32, 'Batch size per TPU core/GPU.')
flags.DEFINE_float(
    'base_learning_rate', 1e-5,
    'Base learning rate when total batch size is 128. It is '
    'scaled by the ratio of the total batch size to 128.')
flags.DEFINE_integer(
    'checkpoint_interval', 5,
    'Number of epochs between saving checkpoints. Use -1 to '
    'never save checkpoints.')
flags.DEFINE_integer('evaluation_interval', 1,
                     'Number of epochs between evaluation.')
flags.DEFINE_integer('num_bins', 15, 'Number of bins for ECE.')
flags.DEFINE_list(
    'fractions', ['0.0', '0.01', '0.05', '0.1', '0.15', '0.2'],
    'A list of fractions of total examples to send to '
    'the moderators (up to 1).')
flags.DEFINE_string('output_dir', '/tmp/toxic_comments', 'Output directory.')
flags.DEFINE_integer('train_epochs', 5, 'Number of training epochs.')
flags.DEFINE_float(
    'warmup_proportion', 0.1,
    'Proportion of training to perform linear learning rate warmup for. '
    'E.g., 0.1 = 10% of training.')
flags.DEFINE_float(
    'ece_label_threshold', 0.7,
    'Threshold used to convert toxicity score into binary labels for computing '
    'Expected Calibration Error (ECE). Default is 0.7 which is the threshold '
    'value recommended by Jigsaw team.')

# Loss type
flags.DEFINE_string('loss_type', 'cross_entropy',
Exemple #23
0
from absl import app
from absl import flags
import t5.data
import tensorflow as tf
import tensorflow_datasets as tfds

FLAGS = flags.FLAGS

flags.DEFINE_string("predictions_file", None, "Path to model predictions.")
flags.DEFINE_string("task", None, "T5 task name for this benchmark.")
flags.DEFINE_string("tfds_name", None, "Short name of tfds (e.g. 'cb').")
flags.DEFINE_string("out_dir", None, "Path to write output file.")
flags.DEFINE_string("split", "test", "Split, should typically be test.")
flags.DEFINE_boolean("super", False, "Whether to make SuperGLUE-style file.")
flags.DEFINE_boolean("cached", True, "Whether to used cached dataset.")
flags.DEFINE_list("additional_task_cache_dirs", [], "Dirs with cached tasks.")

FILE_NAME_MAP = {
    "boolq": "BoolQ",
    "cb": "CB",
    "copa": "COPA",
    "multirc": "MultiRC",
    "record": "ReCoRD",
    "rte": "RTE",
    "wic": "WiC",
    "cola": "CoLA",
    "sst2": "SST-2",
    "mrpc": "MRPC",
    "stsb": "STS-B",
    "qqp": "QQP",
    "mnli_matched": "MNLI-m",
Example usage:
mkdir stoichs
enumerate_stoichiometries.py --output_prefix=stoichs/ \
    --num_heavy=3 --heavy_elements=C,N,O,S
"""

from absl import app
from absl import flags

from graph_sampler import stoichiometry

FLAGS = flags.FLAGS

flags.DEFINE_integer('num_heavy', None, 'Number of non-hydrogen atoms.')
flags.DEFINE_list('heavy_elements', ['C', 'N', 'N+', 'O', 'O-', 'F'],
                  'Which heavy elements to use.')
flags.DEFINE_string('output_prefix', '', 'Prefix for output files.')
flags.DEFINE_list(
    'valences', [],
    'Valences of atom types (only required for atom types whose valence cannot '
    'be inferred by rdkit, (e.g. "X=7,R=3" if you\'re using "synthetic atoms" '
    'with valences 7 and 3).')
flags.DEFINE_list(
    'charges', [],
    'Charges of atom types (only required for atom types whose charge cannot '
    'be inferred by rdkit, (e.g. "X=0,R=-1" if you\'re using "synthetic atoms" '
    'with valences 0 and -1).')


def main(argv):
    if len(argv) > 1:
Exemple #25
0
from absl import flags
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.compat.v1.keras.layers import MaxPooling2D
import tensorflow_probability as tfp
from tensorflow_probability.python.layers import util as tfp_layers_util

tf.compat.v1.enable_v2_tensorshape()
FLAGS = flags.FLAGS

## Dataset/method options
flags.DEFINE_string('logdir', '/tmp/data',
                    'directory for summaries and checkpoints.')
flags.DEFINE_string('data_dir', None, 'Directory of data files.')
get_data_dir = lambda: FLAGS.data_dir
flags.DEFINE_list('data', ['train_data_ins.pkl', 'val_data_ins.pkl'],
                  'data name')
flags.DEFINE_integer('update_batch_size', 15, 'number of context/target')
flags.DEFINE_integer('meta_batch_size', 10, 'number of tasks')
flags.DEFINE_integer('dim_im', 128, 'image size')
flags.DEFINE_integer('dim_y', 1, 'dimension of y')

## Training options
flags.DEFINE_list('n_hidden_units_g', [100, 100],
                  'number of tasks sampled per meta-update')
flags.DEFINE_list('n_hidden_units_r', [100, 100],
                  'number of inner gradient updates during test.')
flags.DEFINE_integer('dim_z', 200, 'dimension of z')
flags.DEFINE_integer('dim_r', 200, 'dimension of r for aggregating')
flags.DEFINE_float('update_lr', 5e-4, 'lr')
flags.DEFINE_integer('num_updates', 100000, 'num_updates')
flags.DEFINE_integer('trial', 1, 'trial number')
Exemple #26
0
from tools.data_handling import get_image_paths, sample_data, load_data
import tensorflow as tf
import os

import argparse

FLAGS = flags.FLAGS
flags.DEFINE_integer('epochs', 50, 'number of training epochs', lower_bound=0)
flags.DEFINE_integer('batch_size', 64, 'batch size for training')
flags.DEFINE_float('learning_rate', 1e-3, 'initial learning rate')
flags.DEFINE_integer('pixels', 12, 'input size of images', lower_bound=0)
flags.DEFINE_integer('training_size', 100000, 'size of the trianing set')
flags.DEFINE_integer('validation_size', 40000, 'size of the validation set')
flags.DEFINE_list('training_set_split', [
    1,
    0,
    1,
], 'split of training set: positives, partials, negatives')
flags.DEFINE_list(
    'validation_set_split', [1, 0, 1],
    'split of the validation set: positives, partials, negatives')
flags.DEFINE_bool('save_inputs', False,
                  'whether the input data is saved or not')


def main(args):

    # get image paths
    training_image_paths = get_image_paths(FLAGS.pixels, 'raw')
    validation_image_paths = get_image_paths(FLAGS.pixels, 'val')
                     'Number of pipelines to use.')
flags.DEFINE_integer('enterprise_redis_loadgen_clients', 24,
                     'Number of clients per loadgen vm.')
flags.DEFINE_integer('enterprise_redis_max_threads', 40,
                     'Maximum number of memtier threads to use.')
flags.DEFINE_integer('enterprise_redis_min_threads', 18,
                     'Minimum number of memtier threads to use.')
flags.DEFINE_integer('enterprise_redis_thread_increment', 1,
                     'Number of memtier threads to increment by.')
flags.DEFINE_integer(
    'enterprise_redis_latency_threshold', 1100,
    'The latency threshold in microseconds '
    'until the test stops.')
flags.DEFINE_boolean('enterprise_redis_pin_workers', False,
                     'Whether to pin the proxy threads after startup.')
flags.DEFINE_list('enterprise_redis_disable_cpu_ids', None,
                  'List of cpus to disable by id.')

_PACKAGE_NAME = 'redis_enterprise'
_LICENSE = 'enterprise_redis_license'
_WORKING_DIR = '~/redislabs'
_RHEL_TAR = 'redislabs-5.4.2-24-rhel7-x86_64.tar'
_XENIAL_TAR = 'redislabs-5.4.2-24-xenial-amd64.tar'
_BIONIC_TAR = 'redislabs-5.4.2-24-bionic-amd64.tar'
_USERNAME = '******'
PREPROVISIONED_DATA = {
    _RHEL_TAR:
    '8db83074b3e4e6de9c249ce34b6bb899ed158a6a4801f36c530e79bdb97a4c20',
    _XENIAL_TAR:
    'ef2da8b5eaa02b53488570392df258c0d5d3890a9085c2495aeb5c96f336e639',
    _BIONIC_TAR:
    'ef0c58d6d11683aac07d3f2cae6b9544cb53064c9f7a7419d63b6d14cd858d53',
Exemple #28
0
from midas import Midas
import numpy as np
import pandas as pd

flags.DEFINE_string("input_dir", "",
                    "Input directory for preprocessed SIGTYP files.")

flags.DEFINE_string(
    "training_set_name", const.TRAIN_FILENAME,
    "Name of the training set. Can be \"train\" or \"train_dev\".")

flags.DEFINE_integer("num_epochs", 10, "Number of epochs for training.")

flags.DEFINE_integer("batch_size", 8, "Number of examples per batch.")

flags.DEFINE_list("layer_structure", ["16", "16"],
                  "Number of nodes per layer.")

flags.DEFINE_string("model_dir", "/tmp/MIDAS",
                    "Directory where to store or load the model.")

flags.DEFINE_string("output_datasets_dir", "",
                    "Directory for storing datasets with imputed values.")

flags.DEFINE_integer("num_datasets", 5,
                     "Number of imputed datasets to generate.")

flags.DEFINE_boolean(
    "save_softmax_columns", False,
    "Saves the candidate datasets with all the softmax columns for categorical "
    "data. If disabled, the softmax columns are converted back to the normal "
    "format.")
Exemple #29
0
from absl import flags

flags.DEFINE_enum(
    "data_format", "example_list_with_context",
    ["example_list_with_context", "example_in_example", "sequence_example"],
    "Data format defined in data.py.")
flags.DEFINE_string("train_path", None, "Input file path used for training.")
flags.DEFINE_string("eval_path", None, "Input file path used for eval.")
flags.DEFINE_string("vocab_path", None,
                    "Vocabulary path for query and document tokens.")
flags.DEFINE_string("model_dir", None, "Output directory for models.")
flags.DEFINE_integer("batch_size", 32, "The batch size for train.")
flags.DEFINE_integer("num_train_steps", 15000, "Number of steps for train.")
flags.DEFINE_float("learning_rate", 0.05, "Learning rate for optimizer.")
flags.DEFINE_float("dropout_rate", 0.8, "The dropout rate before output layer.")
flags.DEFINE_list("hidden_layer_dims", ["64", "32", "16"],
                  "Sizes for hidden layers.")
flags.DEFINE_integer(
    "list_size", None,
    "List size used for training. Use None for dynamic list size.")
flags.DEFINE_integer("group_size", 1, "Group size used in score function.")
flags.DEFINE_string("loss", "approx_ndcg_loss",
                    "The RankingLossKey for the loss function.")
flags.DEFINE_string("weights_feature_name", "",
                    "The name of the feature where unbiased learning-to-rank "
                    "weights are stored.")
flags.DEFINE_bool("listwise_inference", False,
                  "If true, exports accept `data_format` while serving.")
flags.DEFINE_bool(
    "use_document_interactions", False,
    "If true, uses cross-document interactions to generate scores.")
Exemple #30
0
from absl import app
from absl import flags

import erdos

import pylot.flags
import pylot.component_creator
import pylot.operator_creator
from pylot.simulation.scenario.person_avoidance_agent_operator \
    import PersonAvoidanceAgentOperator
from pylot.simulation.utils import get_world, set_asynchronous_mode

FLAGS = flags.FLAGS
flags.DEFINE_list('goal_location', '17.73, 327.07, 0.5',
                  'Ego-vehicle goal location')
flags.DEFINE_bool(
    'avoidance_agent', True,
    'True to enable scenario avoidance agent planner and controller')

# The location of the center camera relative to the ego-vehicle.
CENTER_CAMERA_LOCATION = pylot.utils.Location(1.5, 0.0, 1.4)


def add_avoidance_agent(can_bus_stream, obstacles_stream,
                        ground_obstacles_stream, goal_location):
    op_config = erdos.OperatorConfig(
        name=FLAGS.obstacle_detection_model_names[0] + '_agent',
        flow_watermarks=False,
        log_file_name=FLAGS.log_file_name,
        csv_log_file_name=FLAGS.csv_log_file_name,
        profile_file_name=FLAGS.profile_file_name)