Пример #1
0
 def __init__(self, name, fv):
   super(JobsList, self).__init__(name, fv)
   flags.DEFINE_boolean(
       'allUsers',
       None,
       u'Whether to display jobs owned by all users in the project. Default '
       u'false',
       flag_values=fv)
   flags.DEFINE_integer(
       'maxResults',
       None,
       u'Maximum number of results to return',
       flag_values=fv)
   flags.DEFINE_string(
       'pageToken',
       None,
       u'Page token, returned by a previous call, to request the next page '
       u'of results',
       flag_values=fv)
   flags.DEFINE_enum(
       'projection',
       u'full',
       [u'full', u'minimal'],
       u'Restrict information returned to a set of selected fields',
       flag_values=fv)
   flags.DEFINE_enum(
       'stateFilter',
       u'done',
       [u'done', u'pending', u'running'],
       u'Filter for job state',
       flag_values=fv)
Пример #2
0
def _DeclareIamFlags():
    """Declare global flags in an idempotent way."""
    if 'api_endpoint' in flags.FLAGS:
        return
    flags.DEFINE_string('api_endpoint',
                        u'https://iam.googleapis.com/',
                        'URL of the API endpoint to use.',
                        short_name='iam_url')
    flags.DEFINE_string('history_file', u'~/.iam.v1.history',
                        'File with interactive shell history.')
    flags.DEFINE_multistring(
        'add_header', [], 'Additional http headers (as key=value strings). '
        'Can be specified multiple times.')
    flags.DEFINE_string(
        'service_account_json_keyfile', '',
        'Filename for a JSON service account key downloaded'
        ' from the Developer Console.')
    flags.DEFINE_enum('f__xgafv', u'_1', [u'_1', u'_2'], u'V1 error format.')
    flags.DEFINE_string('access_token', None, u'OAuth access token.')
    flags.DEFINE_enum('alt', u'json', [u'json', u'media', u'proto'],
                      u'Data format for response.')
    flags.DEFINE_string('bearer_token', None, u'OAuth bearer token.')
    flags.DEFINE_string('callback', None, u'JSONP')
    flags.DEFINE_string(
        'fields', None,
        u'Selector specifying which fields to include in a partial response.')
    flags.DEFINE_string(
        'key', None,
        u'API key. Your API key identifies your project and provides you with '
        u'API access, quota, and reports. Required unless you provide an OAuth '
        u'2.0 token.')
    flags.DEFINE_string('oauth_token', None,
                        u'OAuth 2.0 token for the current user.')
    flags.DEFINE_boolean('pp', 'True', u'Pretty-print response.')
    flags.DEFINE_boolean(
        'prettyPrint', 'True',
        u'Returns response with indentations and line breaks.')
    flags.DEFINE_string(
        'quotaUser', None,
        u'Available to use for quota purposes for server-side applications. Can'
        u' be any arbitrary string assigned to a user, but should not exceed 40'
        u' characters.')
    flags.DEFINE_string(
        'trace', None,
        'A tracing token of the form "token:<tokenid>" to include in api '
        'requests.')
    flags.DEFINE_string(
        'uploadType', None,
        u'Legacy upload protocol for media (e.g. "media", "multipart").')
    flags.DEFINE_string(
        'upload_protocol', None,
        u'Upload protocol for media (e.g. "raw", "multipart").')
Пример #3
0
 def __init__(self, name, fv):
   super(ChangesList, self).__init__(name, fv)
   flags.DEFINE_integer(
       'maxResults',
       None,
       u'Optional. Maximum number of results to be returned. If unspecified,'
       u' the server will decide how many results to return.',
       flag_values=fv)
   flags.DEFINE_string(
       'pageToken',
       None,
       u'Optional. A tag returned by a previous list request that was '
       u'truncated. Use this parameter to continue a previous list request.',
       flag_values=fv)
   flags.DEFINE_enum(
       'sortBy',
       u'changeSequence',
       [u'changeSequence'],
       u'Sorting criterion. The only supported value is change sequence.',
       flag_values=fv)
   flags.DEFINE_string(
       'sortOrder',
       None,
       u"Sorting order direction: 'ascending' or 'descending'.",
       flag_values=fv)
Пример #4
0
def args():

    default_experiment_name = "exp-{}".format(int(time.time()))

    gflags.DEFINE_enum("preset", None, ["demo"], "Easily set configuration.")
    gflags.DEFINE_string("data_path", os.path.expanduser("~/data/multinli_0.9/multinli_0.9_dev_matched.jsonl"), "Path to NLI data.")
    gflags.DEFINE_string("eval_data_path", os.path.expanduser("~/data/multinli_0.9/multinli_0.9_dev_matched.jsonl"), "Path to NLI data.")
    gflags.DEFINE_string("embedding_path", os.path.expanduser("~/data/glove.840B.300d.txt"), "Path to GloVe vectors.")
    gflags.DEFINE_integer("batch_size", 100, "Batch size.")
    gflags.DEFINE_integer("input_dim", 300, "Word embedding dimension.")
    gflags.DEFINE_integer("hidden_dim", 300, "Hidden representation dimension.")
    gflags.DEFINE_string("save_path", ".", "Path to logs and checkpoints.")
    gflags.DEFINE_string("load_path", None, "Path to load checkpoint.")
    gflags.DEFINE_string("log_path", None, "Path to log.")
    gflags.DEFINE_string("experiment_name", default_experiment_name, "Experiment name.")
    gflags.DEFINE_float("l2", None, "Use l2 regularization.")
    gflags.DEFINE_boolean("extract", False, "Use pretrained model to calculate query and target vectors for input data.")
    gflags.DEFINE_integer("seed", 11, "Random seed.")

    FLAGS(sys.argv)

    presets()

    if not FLAGS.load_path:
        FLAGS.load_path = FLAGS.save_path  # this way we use logs/ckpt for an experiment_name if it exists.

    if not FLAGS.log_path:
        FLAGS.log_path = os.path.join('.', FLAGS.experiment_name + '.log')

    logger = MainLogger().init(path=FLAGS.log_path)
    logger.Log(json.dumps(FLAGS.FlagValuesDict(), indent=4, sort_keys=True))
Пример #5
0
def flags():
    # Debug settings
    gflags.DEFINE_string("branch", None, "")
    gflags.DEFINE_string("sha", None, "")
    gflags.DEFINE_boolean("debug", False, "")

    # Performance settings
    gflags.DEFINE_boolean("cuda", False, "")

    # Display settings
    gflags.DEFINE_string("env", "main", "")
    gflags.DEFINE_string("experiment_name", None, "")

    # Data settings
    gflags.DEFINE_string("descr_train", "./utils/descriptions.csv", "")
    gflags.DEFINE_string("descr_dev", "./utils/descriptions.csv", "")
    gflags.DEFINE_string("train_data", "./utils/imgs/train", "")
    gflags.DEFINE_string("dev_data", "./utils/imgs/dev", "")
    gflags.DEFINE_integer("word_embedding_dim", 100, "")
    gflags.DEFINE_string("word_embedding_path",
                         "~/data/glove/glove.6B.100d.txt", "")

    # Optimization settings
    gflags.DEFINE_enum("optim_type", "RMSprop", ["Adam", "SGD", "RMSprop"], "")
    gflags.DEFINE_integer("batch_size", 32, "Minibatch size for train set.")
    gflags.DEFINE_integer("batch_size_dev", 50, "Minibatch size for dev set.")
    gflags.DEFINE_float("learning_rate", 1e-4, "Used in optimizer.")
    gflags.DEFINE_integer("max_epoch", 500, "")
Пример #6
0
def _DeclareDnsFlags():
  """Declare global flags in an idempotent way."""
  if 'api_endpoint' in flags.FLAGS:
    return
  flags.DEFINE_string(
      'api_endpoint',
      u'https://www.googleapis.com/dns/v1/',
      'URL of the API endpoint to use.',
      short_name='dns_url')
  flags.DEFINE_string(
      'history_file',
      u'~/.dns.v1.history',
      'File with interactive shell history.')
  flags.DEFINE_multistring(
      'add_header', [],
      'Additional http headers (as key=value strings). '
      'Can be specified multiple times.')
  flags.DEFINE_string(
      'service_account_json_keyfile', '',
      'Filename for a JSON service account key downloaded'
      ' from the Developer Console.')
  flags.DEFINE_enum(
      'alt',
      u'json',
      [u'json'],
      u'Data format for the response.')
  flags.DEFINE_string(
      'fields',
      None,
      u'Selector specifying which fields to include in a partial response.')
  flags.DEFINE_string(
      'key',
      None,
      u'API key. Your API key identifies your project and provides you with '
      u'API access, quota, and reports. Required unless you provide an OAuth '
      u'2.0 token.')
  flags.DEFINE_string(
      'oauth_token',
      None,
      u'OAuth 2.0 token for the current user.')
  flags.DEFINE_boolean(
      'prettyPrint',
      'True',
      u'Returns response with indentations and line breaks.')
  flags.DEFINE_string(
      'quotaUser',
      None,
      u'Available to use for quota purposes for server-side applications. Can'
      u' be any arbitrary string assigned to a user, but should not exceed 40'
      u' characters. Overrides userIp if both are provided.')
  flags.DEFINE_string(
      'trace',
      None,
      'A tracing token of the form "token:<tokenid>" to include in api '
      'requests.')
  flags.DEFINE_string(
      'userIp',
      None,
      u'IP address of the site where the request originates. Use this if you '
      u'want to enforce per-user limits.')
Пример #7
0
def get_flags():
    # Debug settings.
    gflags.DEFINE_string("data_dir", "cmu",
                         "dir containing train.txt, test.txt, valid.txt")
    gflags.DEFINE_string("log_path", "logs", "")
    gflags.DEFINE_string("data_type", "discriminator",
                         "figure out how to use this")
    gflags.DEFINE_enum("model_type", "LSTM", ["LSTM", "BiLSTM", "DEEP"],
                       "options: LSTM, BiLSTM, DEEP, ...")
    gflags.DEFINE_string("ckpt_path", "checkpoints", "")
    gflags.DEFINE_boolean("gpu", False, "set to false on local")
    gflags.DEFINE_string("experiment_name", "", "")
    gflags.DEFINE_boolean("evaluate_only", False, "")

    #sizes
    gflags.DEFINE_integer("embedding_size", 29, "hardcoded for simplicity")
    gflags.DEFINE_integer("reduction_size", 40, "hardcoded for simplicity")
    gflags.DEFINE_integer("crop_pad_length", 30, "")

    #chunks
    gflags.DEFINE_integer("stages_per_epoch", 40,
                          "how many eval/stats steps per epoch?")
    gflags.DEFINE_integer("prints_per_stage", 1,
                          "how often to print stats to stdout during epoch")
    gflags.DEFINE_integer("convergence_threshold", 50,
                          "how many eval steps before early stop")
    gflags.DEFINE_integer(
        "max_epochs", 100,
        "number of epochs before stop, essentially unreachable")
    gflags.DEFINE_integer("batch_size", 64, "")

    #tunable parameters
    gflags.DEFINE_integer("hidden_size", 1024, "")
    gflags.DEFINE_integer("num_layers", 1, "")
    gflags.DEFINE_float("learning_rate", .002, "")
Пример #8
0
 def __init__(self, name, fv):
     super(ProjectsServiceAccountsKeysGet, self).__init__(name, fv)
     flags.DEFINE_enum(
         'publicKeyType',
         u'TYPE_NONE',
         [u'TYPE_NONE', u'TYPE_X509_PEM_FILE', u'TYPE_RAW_PUBLIC_KEY'],
         u'The output format of the public key requested. X509_PEM is the '
         u'default output format.',
         flag_values=fv)
Пример #9
0
 def __init__(self, name, fv):
     super(ProjectsServiceAccountsKeysList, self).__init__(name, fv)
     flags.DEFINE_enum(
         'keyTypes',
         u'KEY_TYPE_UNSPECIFIED',
         [u'KEY_TYPE_UNSPECIFIED', u'USER_MANAGED', u'SYSTEM_MANAGED'],
         u'Filters the types of keys the user wants to include in the list '
         u'response. Duplicate key types are not allowed. If no key type is '
         u'provided, all keys are returned.',
         flag_values=fv)
Пример #10
0
def DeclareBaseFlags():
    """Declare base flags for all CLIs."""
    # TODO(craigcitro): FlagValidators?
    global _BASE_FLAGS_DECLARED
    if _BASE_FLAGS_DECLARED:
        return
    flags.DEFINE_boolean('log_request', False, 'Log requests.')
    flags.DEFINE_boolean('log_response', False, 'Log responses.')
    flags.DEFINE_boolean('log_request_response', False,
                         'Log requests and responses.')
    flags.DEFINE_enum('output_format', 'protorpc',
                      _OUTPUT_FORMATTER_MAP.keys(),
                      'Display format for results.')

    _BASE_FLAGS_DECLARED = True
Пример #11
0
 def testFlagHelpInXML_Enum(self):
     gflags.DEFINE_enum('cc_version',
                        'stable', ['stable', 'experimental'],
                        'Compiler version to use.',
                        flag_values=self.fv)
     expected_output = (' <flag>\n'
                        '   <file>tool</file>\n'
                        '   <name>cc_version</name>\n'
                        '   <meaning>&lt;stable|experimental&gt;: '
                        'Compiler version to use.</meaning>\n'
                        '   <default>stable</default>\n'
                        '   <current>stable</current>\n'
                        '   <type>string enum</type>\n'
                        '   <enum_value>stable</enum_value>\n'
                        '   <enum_value>experimental</enum_value>\n'
                        ' </flag>\n')
     self._CheckFlagHelpInXML('cc_version', 'tool', expected_output)
Пример #12
0
def go():
    # The gflags module makes defining command-line options easy for
    # applications. Run this program with the '--help' argument to see
    # all the flags that it understands.
    gflags.DEFINE_enum('logging_level', 'ERROR',
                       ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
                       'Set the level of logging detail.')
    gflags.DEFINE_string('clientjson',
                         None,
                         'client json file',
                         short_name='j')
    gflags.DEFINE_boolean('sydproxy',
                          True,
                          'use the syduni proxy',
                          short_name='p')

    # Let the gflags module process the command-line arguments
    try:
        argv = gflags.FLAGS(sys.argv)
    except gflags.FlagsError, e:
        print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], gflags.FLAGS)
        sys.exit(1)
Пример #13
0
 def __init__(self, name, flag_values):
   super(Deprecate, self).__init__(name, flag_values)
   flags.DEFINE_enum('state',
                     None,
                     ['DEPRECATED', 'OBSOLETE', 'DELETED'],
                     'The new deprecation state for this image. '
                     'Valid values are DEPRECATED, OBSOLETE, and '
                     'DELETED.  DEPRECATED resources will generate '
                     'a warning when new uses occur, OBSOLETE '
                     'and DELETED resources generate an error on '
                     'new uses.',
                     flag_values=flag_values)
   flags.DEFINE_string('replacement',
                       None,
                       'A valid full URL to a compute engine image. '
                       'Users of the deprecated image will be advised '
                       'to switch to this replacement.',
                       flag_values=flag_values)
   flags.DEFINE_string('deprecated_on',
                       None,
                       'A valid RFC 3339 full-date or date-time on which '
                       'the state of this resource became or will become '
                       'DEPRECATED.  For example:  2020-01-02T00:00:00Z for '
                       'midnight on January 2nd, 2020.',
                       flag_values=flag_values)
   flags.DEFINE_string('obsolete_on',
                       None,
                       'A valid RFC 3339 full-date or date-time on which '
                       'the state of this resource became or will become '
                       'OBSOLETE.  For example:  2020-01-02T00:00:00Z for '
                       'midnight on January 2nd, 2020.',
                       flag_values=flag_values)
   flags.DEFINE_string('deleted_on',
                       None,
                       'A valid RFC 3339 full-date or date-time on which '
                       'the state of this resource became or will become '
                       'DELETED.  For example:  2020-01-02T00:00:00Z for '
                       'midnight on January 2nd, 2020.',
                       flag_values=flag_values)
Пример #14
0
    def __init__(self):
        self.FLAGS = gflags.FLAGS
        self.CLIENT_SECRETS = '/srv/http/univie2gcal_root/lib/client_secrets.json'  # todo: better solution
        self.MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0

To make this sample run you will need to download the client_secrets.json file
and save it at:

   %s

""" % os.path.join(os.path.dirname(__file__), self.CLIENT_SECRETS)
        self.msg = self.CLIENT_SECRETS
        self.FLOW = flow_from_clientsecrets(
            self.CLIENT_SECRETS,
            scope=[
                'https://www.googleapis.com/auth/calendar.readonly',
                'https://www.googleapis.com/auth/calendar',
            ],
            message=self.MISSING_CLIENT_SECRETS_MESSAGE)
        gflags.DEFINE_enum('logging_level', 'ERROR',
                           ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
                           'Set the level of logging detail.')
Пример #15
0
 def __init__(self, name, flag_values):
     super(_Userinfo, self).__init__(name, flag_values)
     flags.DEFINE_enum(
         'format', 'json', sorted(('json', 'json_compact')),
         'Output format for userinfo.',
         short_name='f', flag_values=flag_values)
Пример #16
0
 def __init__(self, name, flag_values):
     super(_Fetch, self).__init__(name, flag_values)
     flags.DEFINE_enum(
         'credentials_format', 'pretty', sorted(_FORMATS),
         'Output format for token.',
         short_name='f', flag_values=flag_values)
Пример #17
0
DEFAULT_TOKEN_FILE = os.path.join(os.path.dirname(__file__), 'analytics.dat')
MISSING_CLIENT_MSG = """
WARNING: Please configure OAuth 2.0

You need to populate the client_secrets.json file found at:

   %s

with information from the APIs Console <https://code.google.com/apis/console>.

"""
DOC_URL = ('https://developers.google.com/api-client-library/python/guide/'
           'aaa_client_secrets')

gflags.DEFINE_enum('logging_level', 'ERROR',
                   ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
                   'Set the level of logging detail.')

# Name of file that will store the access and refresh tokens to access
# the API without having to login each time. Make sure this file is in
# a secure place.


def process_flags(flags=[]):
    """Uses the command-line flags to set the logging level.

    Args:
    argv: List of command line arguments passed to the python script.
    """

    # Let the gflags module process the command-line arguments.
Пример #18
0
from lib.config import create_app

import gflags

FLAGS = gflags.FLAGS

gflags.DEFINE_enum('env',
                   None, ['local', 'prod', 'qa', 'dev'],
                   'Zhibogame environment to use',
                   short_name='e')

create_app()


def main(argv, run_func):
    try:
        argv = FLAGS(argv)
    except gflags.FlagsError as e:
        print '\n%s\nUsage: %s ARGS\n%s' % (e, argv[0], FLAGS)
        return 1
    if len(FLAGS(argv)) > 1:
        run_func(argv[1:])
    else:
        run_func()
Пример #19
0
def get_flags():
    # Debug settings.
    gflags.DEFINE_bool(
        "debug",
        False,
        "Set to True to disable debug_mode and type_checking.")
    gflags.DEFINE_bool(
        "show_progress_bar",
        True,
        "Turn this off when running experiments on HPC.")
    gflags.DEFINE_string("git_branch_name", "", "Set automatically.")
    gflags.DEFINE_string("slurm_job_id", "", "Set automatically.")
    gflags.DEFINE_integer(
        "deque_length",
        100,
        "Max trailing examples to use when computing average training statistics.")
    gflags.DEFINE_string("git_sha", "", "Set automatically.")
    gflags.DEFINE_string("experiment_name", "", "")
    gflags.DEFINE_string("load_experiment_name", None, "")

    # Data types.
    gflags.DEFINE_enum("data_type",
                       "bl",
                       ["bl",
                        "sst",
                        "sst-binary",
                        "nli",
                        "arithmetic",
                        "listops",
                        "sign",
                        "eq",
                        "relational"],
                       "Which data handler and classifier to use.")

    # Choose Genre.
    # 'fiction', 'government', 'slate', 'telephone', 'travel'
    # 'facetoface', 'letters', 'nineeleven', 'oup', 'verbatim'
    gflags.DEFINE_string("train_genre", None, "Filter MultiNLI data by genre.")
    gflags.DEFINE_string("eval_genre", None, "Filter MultiNLI data by genre.")

    # Where to store checkpoints
    gflags.DEFINE_string(
        "log_path",
        "./logs",
        "A directory in which to write logs.")
    gflags.DEFINE_string(
        "load_log_path",
        None,
        "A directory from which to read logs.")
    gflags.DEFINE_boolean(
        "write_proto_to_log",
        False,
        "Write logs in a protocol buffer format.")
    gflags.DEFINE_string(
        "ckpt_path", None, "Where to save/load checkpoints. Can be either "
        "a filename or a directory. In the latter case, the experiment name serves as the "
        "base for the filename.")
    gflags.DEFINE_integer(
        "ckpt_step",
        1000,
        "Steps to run before considering saving checkpoint.")
    gflags.DEFINE_boolean(
        "load_best",
        False,
        "If True, attempt to load 'best' checkpoint.")

    # Data settings.
    gflags.DEFINE_string("training_data_path", None, "")
    gflags.DEFINE_string(
        "eval_data_path", None, "Can contain multiple file paths, separated "
        "using ':' tokens. The first file should be the dev set, and is used for determining "
        "when to save the early stopping 'best' checkpoints.")
    gflags.DEFINE_integer("seq_length", 200, "")
    gflags.DEFINE_boolean(
        "allow_cropping",
        False,
        "Trim overly long training examples to fit. If not set, skip them.")
    gflags.DEFINE_integer("eval_seq_length", None, "")
    gflags.DEFINE_boolean(
        "allow_eval_cropping",
        False,
        "Trim overly long evaluation examples to fit. If not set, crash on overly long examples.")
    gflags.DEFINE_boolean(
        "smart_batching",
        True,
        "Organize batches using sequence length.")
    gflags.DEFINE_boolean("use_peano", True, "A mind-blowing sorting key.")
    gflags.DEFINE_integer(
        "eval_data_limit",
        None,
        "Truncate evaluation set to this many batches. -1 indicates no truncation.")
    gflags.DEFINE_boolean(
        "bucket_eval",
        True,
        "Bucket evaluation data for speed improvement.")
    gflags.DEFINE_boolean("shuffle_eval", False, "Shuffle evaluation data.")
    gflags.DEFINE_integer(
        "shuffle_eval_seed",
        123,
        "Seed shuffling of eval data.")
    gflags.DEFINE_string("embedding_data_path", None,
                         "If set, load GloVe-formatted embeddings from here.")
    gflags.DEFINE_boolean("fine_tune_loaded_embeddings", False,
                          "If set, backpropagate into embeddings even when initializing from pretrained.")

    # Model architecture settings.
    gflags.DEFINE_enum(
        "model_type", "RNN", [

            "CBOW", "RNN", "SPINN", "RLSPINN", "ChoiPyramid", "Maillard", "LMS"], "")
    gflags.DEFINE_integer("gpu", -1, "")
    gflags.DEFINE_integer("model_dim", 8, "")
    gflags.DEFINE_integer("word_embedding_dim", 8, "")
    gflags.DEFINE_boolean("lowercase", False, "When True, ignore case.")
    gflags.DEFINE_boolean("use_internal_parser", False, "Use predicted parse.")
    gflags.DEFINE_boolean(
        "validate_transitions",
        True,
        "Constrain predicted transitions to ones that give a valid parse tree.")
    gflags.DEFINE_float(
        "embedding_keep_rate",
        1.0,
        "Used for dropout on transformed embeddings and in the encoder RNN.")
    gflags.DEFINE_boolean("use_difference_feature", True, "")
    gflags.DEFINE_boolean("use_product_feature", True, "")

    # SPINN tracking LSTM settings.
    gflags.DEFINE_integer(
        "tracking_lstm_hidden_dim",
        None,
        "Set to none to avoid using tracker.")
    gflags.DEFINE_boolean(
        "tracking_ln",
        False,
        "When True, layer normalization is used in tracking.")
    gflags.DEFINE_float(
        "transition_weight",
        None,
        "Set to none to avoid predicting transitions.")
    gflags.DEFINE_boolean("lateral_tracking", True,
                          "Use previous tracker state as input for new state.")
    gflags.DEFINE_boolean(
        "use_tracking_in_composition",
        True,
        "Use tracking lstm output as input for the reduce function.")
    gflags.DEFINE_boolean(
        "composition_ln",
        True,
        "When True, layer normalization is used in TreeLSTM composition.")
    gflags.DEFINE_boolean("predict_use_cell", True,
                          "Use cell output as feature for transition net.")

    # SPINN composition function settings.
    gflags.DEFINE_enum(
        "reduce", "treelstm", [
            "treelstm", "treegru", "tanh", "lms"], "Specify composition function.")

    # ChoiPyramid/ST-Gumbel model settings
    gflags.DEFINE_boolean(
        "pyramid_trainable_temperature",
        None,
        "If set, add a scalar trained temperature parameter.")
    gflags.DEFINE_float("pyramid_temperature_decay_per_10k_steps",
                        0.5, "What it says on the box. Does not impact SparseAdam (for word embedding fine-tuning).")
    gflags.DEFINE_float(
        "pyramid_temperature_cycle_length",
        0.0,
        "For wake-sleep-style experiments. 0.0 disables this feature.")

    # Embedding preprocessing settings.
    gflags.DEFINE_enum("encode",
                       "projection",
                       ["pass",
                        "projection",
                        "gru",
                        "attn"],
                       "Encode embeddings with sequential context.")
    gflags.DEFINE_boolean("encode_reverse", False, "Encode in reverse order.")
    gflags.DEFINE_boolean(
        "encode_bidirectional",
        False,
        "Encode in both directions.")
    gflags.DEFINE_integer(
        "encode_num_layers",
        1,
        "RNN layers in encoding net.")

    # RL settings.
    gflags.DEFINE_float(
        "rl_mu",
        0.1,
        "Use in exponential moving average baseline.")
    gflags.DEFINE_enum("rl_baseline",
                       "ema",
                       ["ema",
                        "pass",
                        "greedy",
                        "value"],
                       "Different configurations to approximate reward function.")
    gflags.DEFINE_enum("rl_reward", "standard", ["standard", "xent"],
                       "Different reward functions to use.")
    gflags.DEFINE_float("rl_weight", 1.0, "Hyperparam for REINFORCE loss.")
    gflags.DEFINE_boolean("rl_whiten", False, "Reduce variance in advantage.")
    gflags.DEFINE_boolean(
        "rl_valid",
        True,
        "Only consider non-validated actions.")
    gflags.DEFINE_float(
        "rl_epsilon",
        1.0,
        "Percent of sampled actions during train time.")
    gflags.DEFINE_float(
        "rl_epsilon_decay",
        50000,
        "Step constant in epsilon delay equation.")
    gflags.DEFINE_float(
        "rl_confidence_interval",
        1000,
        "Penalize probabilities of transitions.")
    gflags.DEFINE_float(
        "rl_confidence_penalty",
        None,
        "Penalize probabilities of transitions.")
    gflags.DEFINE_boolean(
        "rl_catalan",
        False,
        "Sample over a uniform distribution of binary trees.")
    gflags.DEFINE_boolean(
        "rl_catalan_backprop",
        False,
        "Sample over a uniform distribution of binary trees.")
    gflags.DEFINE_boolean(
        "rl_wake_sleep",
        False,
        "Inverse relationship between temperature and rl_weight.")
    gflags.DEFINE_boolean(
        "rl_transition_acc_as_reward",
        False,
        "Use the transition accuracy as the reward. For debugging only.")

    # MLP settings.
    gflags.DEFINE_integer(
        "mlp_dim",
        256,
        "Dimension of intermediate MLP layers.")
    gflags.DEFINE_integer("num_mlp_layers", 1, "Number of MLP layers.")
    gflags.DEFINE_boolean(
        "mlp_ln",
        True,
        "When True, layer normalization is used between MLP layers.")
    gflags.DEFINE_float("semantic_classifier_keep_rate", 0.9,
                        "Used for dropout in the semantic task classifier.")

    # Optimization settings.
    gflags.DEFINE_enum("optimizer_type", "SGD", ["Adam", "SGD"], "")
    gflags.DEFINE_integer(
        "training_steps",
        1000000,
        "Stop training after this point.")
    gflags.DEFINE_integer("batch_size", 32, "Minibatch size.")
    gflags.DEFINE_float("learning_rate", 0.5, "Used in optimizer.")  # https://twitter.com/karpathy/status/801621764144971776
    gflags.DEFINE_float("learning_rate_decay_when_no_progress", 0.5,
        "Used in optimizer. Decay the LR by this much every epoch steps if a new best has not been set in the last epoch.")
    gflags.DEFINE_float("clipping_max_value", 5.0, "")
    gflags.DEFINE_float("l2_lambda", 1e-5, "")

    # Display settings.
    gflags.DEFINE_integer(
        "statistics_interval_steps",
        100,
        "Log training set performance statistics at this interval.")
    gflags.DEFINE_integer(
        "eval_interval_steps",
        100,
        "Evaluate at this interval.")
    gflags.DEFINE_integer(
        "sample_interval_steps",
        None,
        "Sample transitions at this interval.")
    gflags.DEFINE_integer("ckpt_interval_steps", 5000,
                          "Update the checkpoint on disk at this interval.")
    gflags.DEFINE_boolean(
        "ckpt_on_best_dev_error",
        True,
        "If error on the first eval set (the dev set) is "
        "at most 0.99 of error at the previous checkpoint, save a special 'best' checkpoint.")
    gflags.DEFINE_integer(
        "early_stopping_steps_to_wait",
        50000,
        "If development set error doesn't improve significantly in this many steps, cease training.")
    gflags.DEFINE_boolean("evalb", False, "Print transition statistics.")
    gflags.DEFINE_integer("num_samples", 0, "Print sampled transitions.")

    # Evaluation settings
    gflags.DEFINE_boolean(
        "expanded_eval_only_mode",
        False,
        "If set, a checkpoint is loaded and a forward pass is done to get the predicted "
        "transitions. The inferred parses are written to the supplied file(s) along with example-"
        "by-example accuracy information. Requirements: Must specify checkpoint path.")  # TODO: Rename.
    gflags.DEFINE_boolean(
        "expanded_eval_only_mode_use_best_checkpoint",
        True,
        "When in expanded_eval_only_mode, load the ckpt_best checkpoint.")
    gflags.DEFINE_boolean("write_eval_report", False, "")
    gflags.DEFINE_boolean(
        "eval_report_use_preds", True, "If False, use the given transitions in the report, "
        "otherwise use predicted transitions. Note that when predicting transitions but not using them, the "
        "reported predictions will look very odd / not valid.")  # TODO: Remove.

    # Maillard Pyramid
    gflags.DEFINE_boolean(
        "cosine",
        False,
        "If true, use cosine similarity instead of dot product in measuring score for compositions.")
    gflags.DEFINE_enum(
        "parent_selection",
        "gumbel",
        ["gumbel",
        "st-gumbel",
        "softmax"],
        "Which function to use to select or calculate the parent.")
    gflags.DEFINE_boolean(
        "right_branching",
        False,
        "Force right-branching composition.")
    gflags.DEFINE_boolean(
        "debug_branching",
        False,
        "use alternative style of right-branching composition.")
    gflags.DEFINE_boolean(
        "uniform_branching",
        False,
        "Uniform distribution instead of gumble softmax weighting during chart parsing.")
    gflags.DEFINE_boolean(
        "random_branching",
        False,
        "Random weighting instead of gumble softmax weighting during chart parsing.")
    gflags.DEFINE_boolean(
        "st_gumbel",
        False,
        "ST-gumble softmax weighting during chart parsing.")
Пример #20
0
""" % os.path.join(os.path.dirname(__file__), CLIENT_SECRETS)

# Set up a Flow object to be used if we need to authenticate.
FLOW = flow_from_clientsecrets(CLIENT_SECRETS,
                               scope='https://www.googleapis.com/auth/drive',
                               message=MISSING_CLIENT_SECRETS_MESSAGE)

# FOLDERS_FILE, holds a custom list of folder names and their drive id.
FOLDERS_FILE = "folders.json"

# The gflags module makes defining command-line options easy for
# applications. Run this program with the '--help' argument to see
# all the flags that it understands.
gflags.DEFINE_enum('logging_level', 'ERROR',
                   ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
                   'Set the level of logging detail.')
gflags.DEFINE_string('destination',
                     'downloaded/',
                     'Destination folder location',
                     short_name='d')
gflags.DEFINE_boolean('debug', False, 'Log folder contents as being fetched')
gflags.DEFINE_string('logfile', 'drive.log',
                     'Location of file to write the log')
gflags.DEFINE_string('drive_id', 'root',
                     'ID of the folder whose contents are to be fetched')
gflags.DEFINE_enum(
    'export', 'OO', ['PDF', 'OO', 'MSO'],
    'Export format. Export to PDF, OpenOffice, or MS Office format')
gflags.DEFINE_boolean('from_folders_list', False,
                      'Download only the folders in folders.json')
from google.cloud.security.inventory import api_map
from google.cloud.security.inventory import errors as inventory_errors
from google.cloud.security.inventory import pipeline_builder as builder
from google.cloud.security.inventory import util as inventory_util
from google.cloud.security.notifier.pipelines import email_inventory_snapshot_summary_pipeline
# pylint: enable=line-too-long

FLAGS = flags.FLAGS

LOGLEVELS = {
    'debug': logging.DEBUG,
    'info' : logging.INFO,
    'warning' : logging.WARN,
    'error' : logging.ERROR,
}
flags.DEFINE_enum('loglevel', 'info', LOGLEVELS.keys(), 'Loglevel.')

flags.DEFINE_boolean('list_resources', False,
                     'List valid resources for --config_path.')

# These flags are for the admin.py module.
flags.DEFINE_string('config_path', None,
                    'Path to the inventory config file.')

flags.DEFINE_string('domain_super_admin_email', None,
                    'An email address of a super-admin in the GSuite domain. '
                    'REQUIRED: if inventory_groups is enabled.')
flags.DEFINE_string('groups_service_account_key_file', None,
                    'The key file with credentials for the service account. '
                    'REQUIRED: If inventory_groups is enabled and '
                    'runnning locally.')
Пример #22
0
import cv2
import time
import gflags
import numpy as np
from datetime import datetime
from matplotlib import pyplot as plt

import tensorflow as tf
from convbox_model import inference
from dataset.interface import dataset
from utils.wrapper import nms


FLAGS = gflags.FLAGS
# Basic model parametersh
gflags.DEFINE_enum('dataset', 'pascal_voc', ['pascal_voc', 'coco', 'kitti'], 
                            """Name of dataset.""")
gflags.DEFINE_string('eval_file', 'data/pascal_voc_test_2007_lmdb',
                            """Path to the lmdb file.""")
gflags.DEFINE_integer('input_row', 448,
                            """Provide row of input image.""")
gflags.DEFINE_integer('input_col', 448,
                            """Provide col of input image.""")
gflags.DEFINE_integer('output_row', 14,
                            """Provide row of output shape.""")
gflags.DEFINE_integer('output_col', 14,
                            """Provide col of output shape.""")
gflags.DEFINE_integer('num_class', 20,
                            """Number of class of dataset.""")
gflags.DEFINE_string('eval_dir', 'result',
                           """Directory where to write result files..""")
gflags.DEFINE_string('checkpoint_path', 'backup/model.ckpt-50000',
Пример #23
0
def get_flags():
    gflags.DEFINE_enum("model_type", "transup", ["transup", "bprmf", "fm",
                                                "transe", "transh", "transr", "transd",
                                                "cfkg", "cke", "cofm", "jtransup"], "")
    gflags.DEFINE_enum("dataset", "ml1m", ['kktix', "ml1m", "dbbook2014", "amazon-book", "last-fm", "yelp2018"], "including ratings.csv, r2kg.tsv and a kg dictionary containing kg_hop[0-9].dat")
    gflags.DEFINE_bool(
        "filter_wrong_corrupted",
        True,
        "If set to True, filter test samples from train and validations.")
    gflags.DEFINE_bool("share_embeddings", False, "")
    gflags.DEFINE_bool("use_st_gumbel", False, "")
    gflags.DEFINE_integer("max_queue", 10, ".")
    gflags.DEFINE_integer("num_processes", 40, ".")

    gflags.DEFINE_float("learning_rate", 0.001, "Used in optimizer.")
    gflags.DEFINE_float("norm_lambda", 1.0, "decay of joint model.")
    gflags.DEFINE_float("kg_lambda", 1.0, "decay of kg model.")
    gflags.DEFINE_integer(
        "early_stopping_steps_to_wait",
        70000,
        "How many times will lr decrease? If set to 0, it remains constant.")
    gflags.DEFINE_bool(
        "L1_flag",
        False,
        "If set to True, use L1 distance as dissimilarity; else, use L2.")
    gflags.DEFINE_bool(
        "is_report",
        False,
        "If set to True, use L1 distance as dissimilarity; else, use L2.")
    gflags.DEFINE_float("l2_lambda", 1e-5, "")
    gflags.DEFINE_integer("embedding_size", 64, ".")
    gflags.DEFINE_integer("negtive_samples", 1, ".")
    gflags.DEFINE_integer("batch_size", 512, "Minibatch size.")
    gflags.DEFINE_enum("optimizer_type", "Adagrad", ["Adam", "SGD", "Adagrad", "Rmsprop"], "")
    gflags.DEFINE_float("learning_rate_decay_when_no_progress", 0.5,
                        "Used in optimizer. Decay the LR by this much every epoch steps if a new best has not been set in the last epoch.")

    gflags.DEFINE_integer(
        "eval_interval_steps",
        14000,
        "Evaluate at this interval in each epoch.")
    gflags.DEFINE_integer(
        "training_steps",
        1400000,
        "Stop training after this point.")
    gflags.DEFINE_float("clipping_max_value", 5.0, "")
    gflags.DEFINE_float("margin", 1.0, "Used in margin loss.")
    gflags.DEFINE_float("momentum", 0.9, "The momentum of the optimizer.")
    gflags.DEFINE_integer("seed", 0, "Fix the random seed. Except for 0, which means no setting of random seed.")
    gflags.DEFINE_integer("topn", 10, "")
    gflags.DEFINE_integer("num_preferences", 4, "")
    gflags.DEFINE_float("joint_ratio", 0.5, "(0 - 1). The train ratio of recommendation, kg is 1 - joint_ratio.")

    gflags.DEFINE_string("experiment_name", None, "")
    gflags.DEFINE_string("data_path", None, "")
    gflags.DEFINE_string("rec_test_files", None, "multiple filenames separated by ':'.")
    gflags.DEFINE_string("kg_test_files", None, "multiple filenames separated by ':'.")
    gflags.DEFINE_string("log_path", None, "")
    gflags.DEFINE_enum("log_level", "debug", ["debug", "info"], "")
    gflags.DEFINE_string(
        "ckpt_path", None, "Where to save/load checkpoints. If not set, the same as log_path")
    
    gflags.DEFINE_string(
        "load_ckpt_file", None, "Where to load pretrained checkpoints under log path. multiple filenames separated by ':'.")

    gflags.DEFINE_boolean(
        "has_visualization",
        True,
        "if set True, use visdom for visualization.")
    gflags.DEFINE_integer("visualization_port", 8097, "")
    # todo: only eval when no train.dat when load data
    gflags.DEFINE_boolean(
        "eval_only_mode",
        False,
        "If set, a checkpoint is loaded and a forward pass is done to get the predicted candidates."
        "Requirements: Must specify load_experiment_name.")
    gflags.DEFINE_string("load_experiment_name", None, "")
Пример #24
0
from time import sleep
from time import time

FLAGS = flags.FLAGS
flags.DEFINE_bool("render", True, "Whether to render with pygame.")
flags.DEFINE_integer("screen_resolution", 84,
                     "Resolution for screen feature layers.")
flags.DEFINE_integer("minimap_resolution", 64,
                     "Resolution for minimap feature layers.")

flags.DEFINE_integer("max_agent_steps", 2500, "Total agent steps.")
flags.DEFINE_integer("game_steps_per_episode", 0, "Game steps per episode.")
flags.DEFINE_integer("step_mul", 8, "Game steps per agent step.")
flags.DEFINE_string("agent", "pysc2.agents.a3cAgent.py", "Which agent to run")

flags.DEFINE_enum("agent_race", None, sc2_env.races.keys(), "Agent's race.")
flags.DEFINE_enum("bot_race", None, sc2_env.races.keys(), "Bot's race.")
flags.DEFINE_enum("difficulty", None, sc2_env.difficulties.keys(),
                  "Bot's strength.")

flags.DEFINE_bool("profile", False, "Whether to turn on code profiling.")
flags.DEFINE_bool("trace", False, "Whether to trace the code execution.")
flags.DEFINE_integer("parallel", 1, "How many instances to run in parallel.")

flags.DEFINE_bool("save_replay", True, "Whether to save a replay at the end.")

flags.DEFINE_string("map", None, "Name of a map to use.")
flags.mark_flag_as_required("map")

# Define hyperparameters and constants
s_size = 7056  # Observations are greyscale frames of 84 * 84 * 1
Пример #25
0
    'results.')
gflags.DEFINE_string(
    RUN_URI, None, 'Run identifier, if provided, only run phase '
    'will be completed.')
gflags.DEFINE_string(
    RUN_STAGE, None, 'List of phases to be executed. For example:'
    '"--run_uri=provision,prepare". Available phases:'
    'prepare, provision, run, cleanup, teardown.')
gflags.DEFINE_string(GCE_BOOT_DISK_SIZE, '1000',
                     'The boot disk size in GB for GCP VMs..')
gflags.DEFINE_string(GCE_BOOT_DISK_TYPE, 'pd-ssd',
                     'The boot disk type for GCP VMs.')
gflags.DEFINE_string(MACHINE_TYPE, 'n1-standard-16',
                     'Machine type for GCE Virtual machines.')
gflags.DEFINE_enum(
    MYSQL_SVC_DB_INSTANCE_CORES, '4', ['1', '4', '8', '16'],
    'The number of cores to be provisioned for the DB instance.')
gflags.DEFINE_string(MYSQL_SVC_OLTP_TABLES_COUNT, '4',
                     'The number of tables used in sysbench oltp.lua tests')
gflags.DEFINE_string(
    MYSQL_SVC_OLTP_TABLE_SIZE, '100000',
    'The number of rows of each table used in the oltp tests')
gflags.DEFINE_string(MYSQL_INSTANCE_STORAGE_SIZE, '300',
                     'Storage size (in GB) for SQL instance.')
gflags.DEFINE_list(
    ADDITIONAL_FLAGS, None,
    'List of additional PKB mysql_service valid flags (strings).'
    'For example: "--cloud_storage_bucket=bucket_name".')

# TODO: Implement flag for STDOUT/STDERR file paths.
from google.protobuf import text_format
from google.apputils import app
import gflags as flags
import gflags as flags_validators

from tools.android.emulator import resources

from tools.android.emulator import common
from tools.android.emulator import emulated_device
from tools.android.emulator import emulator_meta_data_pb2
from tools.android.emulator import reporting

FLAGS = flags.FLAGS
flags.DEFINE_enum('action', None,
                  ['boot', 'start', 'mini_boot', 'ping', 'kill', 'info'],
                  'The action to perform against the emulator images')
flags.DEFINE_string(
    'skin', None, '[BOOT ONLY] The skin parameter to pass '
    'to the emulator')
flags.DEFINE_string('density', None, '[bazel ONLY] Density of the lcd screen')
flags.DEFINE_string(
    'cache', None, '[bazel ONLY] Size of cache partition in mb '
    '- currently not functioning')
flags.DEFINE_string('vm_size', None, '[bazel ONLY] VM heap size in mb')
flags.DEFINE_integer('memory', None,
                     '[bazel ONLY] the memory for the emulator')
flags.DEFINE_spaceseplist(
    'system_images', None, '[bazel ONLY] the system '
    'images to boot the emulator with')
flags.DEFINE_spaceseplist('apks', None, '[START ONLY] the apks to install')
Пример #27
0
flags.DEFINE_boolean('quiet',
                     False,
                     'If True, ignore status updates while jobs are running.',
                     short_name='q')
flags.DEFINE_boolean(
    'headless', False,
    'Whether this bq session is running without user interaction. This '
    'affects behavior that expects user interaction, like whether '
    'debug_mode will break into the debugger and lowers the frequency '
    'of informational printing.')
flags.DEFINE_enum(
    'format', None, ['none', 'json', 'prettyjson', 'csv', 'sparse', 'pretty'],
    'Format for command output. Options include:'
    '\n pretty: formatted table output'
    '\n sparse: simpler table output'
    '\n prettyjson: easy-to-read JSON format'
    '\n json: maximally compact JSON'
    '\n csv: csv format with header'
    '\nThe first three are intended to be human-readable, and the latter '
    'three are for passing to another program. If no format is selected, '
    'one will be chosen based on the command run.')
flags.DEFINE_multistring(
    'job_property', None,
    'Additional key-value pairs to include in the properties field of '
    'the job configuration')  # No period: Multistring adds flagspec suffix.
flags.DEFINE_boolean(
    'use_gce_service_account', False,
    'Use this when running on a Google Compute Engine instance to use service '
    'account credentials instead of stored credentials. For more information, '
    'see: https://developers.google.com/compute/docs/authentication')
flags.DEFINE_string(
Пример #28
0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import itertools

import six
from pysc2 import maps
from pysc2 import run_configs

from pysc2.lib import app
import gflags as flags
from s2clientprotocol import data_pb2 as sc_data
from s2clientprotocol import sc2api_pb2 as sc_pb

flags.DEFINE_enum("command", None, ["csv", "python"], "What to generate.")
flags.mark_flag_as_required("command")
FLAGS = flags.FLAGS


def get_data():
    run_config = run_configs.get()

    with run_config.start() as controller:
        m = maps.get("Sequencer")  # Arbitrary ladder map.
        create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
            map_path=m.path, map_data=run_config.map_data(m.path)))
        create.player_setup.add(type=sc_pb.Participant)
        create.player_setup.add(type=sc_pb.Computer,
                                race=sc_pb.Random,
                                difficulty=sc_pb.VeryEasy)
Пример #29
0
import gflags
import numpy as np

import tensorflow as tf
from nets.interface import basenet
from nets.interface import basenet_arg_scope
from nets.interface import output2X
from convbox_loss import convbox_loss
from convbox_loss import convbox_loss2

slim = tf.contrib.slim

FLAGS = gflags.FLAGS
# Basic model parameters.
gflags.DEFINE_enum('basenet', 'Msdnet',
                   ['InceptionV2', 'InceptionV3', 'InceptionV4', 'Msdnet'],
                   """Name of base network.""")


def inference(images, channels, is_training=False, scope=None):
    with tf.name_scope(scope, 'tower', [images]):
        with slim.arg_scope(basenet_arg_scope('Msdnet')):
            with slim.arg_scope([slim.batch_norm, slim.dropout],
                                is_training=is_training):
                net, end_points = basenet(images, 'Msdnet')
                print(net)
                #net = slim.conv2d(net, 1536, 1, stride=1,padding='SAME', scope='final_conv1')
                #net = slim.conv2d(net, 1536, 3, stride=1,padding='SAME', scope='final_conv2')
                #net = slim.conv2d(net, 1536, 3, stride=1,padding='SAME', scope='final_conv3')
                #net = slim.conv2d(net, 50, 3, stride=1,padding='SAME', scope='final_conv4')
        print(str(corpus_stats(report, ptb)) + '\t' + str(corpus_average_depth(report)))
        set_correct, set_total = corpus_stats_labeled(report, ptb_labeled)
        correct.update(set_correct)
        total.update(set_total)

    for key in sorted(total):
        print(key + '\t' + str(correct[key] * 1. / total[key]))


if __name__ == '__main__':
    gflags.DEFINE_string("main_report_path_template", "./checkpoints/*.report",
                         "A template (with wildcards input as \*) for the paths to the main reports.")
    gflags.DEFINE_string("main_data_path", "./snli_1.0/snli_1.0_dev.jsonl",
                         "A template (with wildcards input as \*) for the paths to the main reports.")
    gflags.DEFINE_string("ptb_report_path_template", "_",
                         "A template (with wildcards input as \*) for the paths to the PTB reports, or '_' if not available.")
    gflags.DEFINE_string("ptb_data_path", "_", "The path to the PTB data in SNLI format, or '_' if not available.")
    gflags.DEFINE_boolean("compute_self_f1", True,
                          "Compute self F1 over all reports matching main_report_path_template.")
    gflags.DEFINE_boolean("use_random_parses", False,
                          "Replace all report trees with randomly generated trees. Report path template flags are not used when this is set.")
    gflags.DEFINE_boolean("use_balanced_parses", False,
                          "Replace all report trees with roughly-balanced binary trees. Report path template flags are not used when this is set.")
    gflags.DEFINE_boolean("first_two", False, "Show 'first two' and 'last two' metrics.")
    gflags.DEFINE_boolean("neg_pair", False, "Show 'neg_pair' metric.")
    gflags.DEFINE_enum("data_type", "nli", ["nli", "sst", "listops"], "Data Type")
    gflags.DEFINE_integer("print_latex", 0, "Print this many trees in LaTeX format for each report.")

    FLAGS(sys.argv)

    run()