Ejemplo n.º 1
0
from absl import flags
from absl import logging

try:
    import faulthandler
except ImportError:
    faulthandler = None

FLAGS = flags.FLAGS

flags.DEFINE_boolean('run_with_pdb', False, 'Set to true for PDB debug mode')
flags.DEFINE_boolean(
    'pdb_post_mortem', False,
    'Set to true to handle uncaught exceptions with PDB '
    'post mortem.')
flags.DEFINE_alias('pdb', 'pdb_post_mortem')
flags.DEFINE_boolean(
    'run_with_profiling', False, 'Set to true for profiling the script. '
    'Execution will be slower, and the output format might '
    'change over time.')
flags.DEFINE_string(
    'profile_file', None, 'Dump profile information to a file (for python -m '
    'pstats). Implies --run_with_profiling.')
flags.DEFINE_boolean(
    'use_cprofile_for_profiling', True,
    'Use cProfile instead of the profile module for '
    'profiling. This has no effect unless '
    '--run_with_profiling is set.')
flags.DEFINE_boolean('only_check_args',
                     False,
                     'Set to true to validate args and exit.',
Ejemplo n.º 2
0
from absl import app
from absl import flags
from absl import logging

import tensorflow.compat.v2 as tf

from non_semantic_speech_benchmark.eval_embedding import metrics
from non_semantic_speech_benchmark.eval_embedding.finetune import get_data
from non_semantic_speech_benchmark.eval_embedding.finetune import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('sk', None, 'Samples name.')
flags.DEFINE_alias('samples_key', 'sk')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')
flags.DEFINE_string('label_key', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('batch_size', None, 'The number of images in each batch.')
flags.DEFINE_integer('tbs', None, 'not used')

flags.DEFINE_integer('nc', None, 'num_clusters')
flags.DEFINE_float('alpha_init', None, 'Initial autopool alpha.')
flags.DEFINE_alias('ai', 'alpha_init')
flags.DEFINE_boolean('ubn', None, 'Whether to normalize')
flags.DEFINE_float('lr', None, 'not used')

flags.DEFINE_string('logdir', None,
Ejemplo n.º 3
0
import time

from absl import app
from absl import flags
from absl import logging

import tensorflow.compat.v2 as tf

from non_semantic_speech_benchmark.eval_embedding.finetune import get_data
from non_semantic_speech_benchmark.eval_embedding.finetune import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('sk', None, 'Samples name.')
flags.DEFINE_alias('samples_key', 'sk')
flags.DEFINE_integer('min_length', 16000, 'Minimum length.')
flags.DEFINE_string('label_key', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('batch_size', None, 'The number of images in each batch.')
flags.DEFINE_integer('tbs', None, 'not used')

flags.DEFINE_integer('nc', None, 'num_clusters')
flags.DEFINE_boolean('ubn', None, 'Whether to normalize')
flags.DEFINE_float('lr', None, 'not used')

flags.DEFINE_string('logdir', None,
                    'Directory where the model was written to.')

flags.DEFINE_string('eval_dir', None,
Ejemplo n.º 4
0
flags.DEFINE_string(
    'label_key', None,
    'Labels in the dataset on disk. Will be dumped to disk for '
    '`downstream_sklearn_eval` in a different format.')
flags.DEFINE_string('target_key', None,
                    'Teacher embedding key in precomputed tf.Examples.')

# Flags for dumping embeddings to disk for more analysis.
flags.DEFINE_string('embeddings_output_dir', None,
                    'Optional directory to write embeddings to disk.')
flags.DEFINE_string('speaker_id_key', None, 'Optional')

# Teacher / student network flags.
flags.DEFINE_string('model_type', None, 'Specification for student model.')
flags.DEFINE_alias('mt', 'model_type')

flags.DEFINE_float('lr', None, 'not used')

flags.DEFINE_integer('take_fixed_data', None,
                     'If not `None`, take a fixed number of data elements.')
flags.DEFINE_integer('timeout', 7200, 'Wait-for-checkpoint timeout.')

# Not used.
flags.DEFINE_integer('max_sample_length', -1, 'Max samples length.')
flags.DEFINE_alias('msl', 'max_sample_length')
flags.DEFINE_integer('tbs', None, 'The number of images in each batch.')

# Constants for writing embedding data dump.
AUDIO_KEY_ = 'audio'
LABEL_KEY_ = 'label'
Ejemplo n.º 5
0
"""Flags and configuration."""

from absl import flags
from absl import logging

from seed_rl.dmlab import agents
from seed_rl.dmlab import env
import tensorflow as tf

FLAGS = flags.FLAGS

# COMMON FLAGS

flags.DEFINE_string('logdir', '/tmp/agent', 'TensorFlow log directory.')
flags.DEFINE_alias('job-dir', 'logdir')
flags.DEFINE_string('server_address', 'localhost:8686', 'Server address.')
flags.DEFINE_string('level_cache_dir', None, 'Global level cache directory.')

# LEARNER

# Training.
flags.DEFINE_integer('save_checkpoint_secs', 1800,
                     'Checkpoint save period in seconds.')
flags.DEFINE_integer('total_environment_frames', int(1e9),
                     'Total environment frames to train for.')
flags.DEFINE_integer('batch_size', 2, 'Batch size for training.')
flags.DEFINE_integer('inference_batch_size', 2, 'Batch size for inference.')
flags.DEFINE_integer('unroll_length', 100, 'Unroll length in agent steps.')
flags.DEFINE_integer('num_training_tpus', 1, 'Number of TPUs for training.')
Ejemplo n.º 6
0
    'If True, `file_pattern` must point to tf_records of tf.Examples. '
    'See get_data.get_precomputed_data for details about tf.Example formatting. '
    'If True, `teacher_model_hub`, `output_key`, `samples_key` '
    'and `min_length` flags are ignored.')
flags.DEFINE_string(
    'frontend_key', None, 'Frontend feature key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_frontend_and_targets` is False.')
flags.DEFINE_string(
    'target_key', None, 'Teacher embedding key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_frontend_and_targets` is False.')

flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('min_length', 16000, 'Minimum audio sample length.')
flags.DEFINE_alias('ml', 'min_length')

# Student network config flags.
flags.DEFINE_integer(
    'bottleneck_dimension', None, 'Dimension of bottleneck. '
    'If 0, bottleneck layer is excluded.')
flags.DEFINE_float('alpha', 1.0, 'Alpha controlling MobileNet width.')
flags.DEFINE_boolean('average_pool', False, 'Average pool MobileNet output.')
flags.DEFINE_string(
    'mobilenet_size', 'small',
    'Size specification for MobileNet in student model. '
    'valid entries are `tiny`, `small`, and `large`.')
flags.DEFINE_alias('bd', 'bottleneck_dimension')
flags.DEFINE_alias('al', 'alpha')
flags.DEFINE_alias('ap', 'average_pool')
flags.DEFINE_alias('ms', 'mobilenet_size')
Ejemplo n.º 7
0
    'If True, `file_pattern` must point to tf_records of tf.Examples. '
    'See get_data.get_precomputed_data for details about tf.Example formatting. '
    'If True, `teacher_model_hub`, `output_key`, `samples_key` '
    'and `min_length` flags are ignored.')
flags.DEFINE_string(
    'frontend_key', None, 'Frontend feature key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_frontend_and_targets` is False.')
flags.DEFINE_string(
    'target_key', None, 'Teacher embedding key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_frontend_and_targets` is False.')

flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('min_length', 16000, 'Minimum audio sample length.')
flags.DEFINE_alias('ml', 'min_length')

# Student network config flags.
flags.DEFINE_integer(
    'bottleneck_dimension', None, 'Dimension of bottleneck. '
    'If 0, bottleneck layer is excluded.')
flags.DEFINE_float('alpha', 1.0, 'Alpha controlling MobileNet width.')
flags.DEFINE_boolean('average_pool', False, 'Average pool MobileNet output.')
flags.DEFINE_string(
    'mobilenet_size', 'small',
    'Size specification for MobileNet in student model. '
    'valid entries are `tiny`, `small`, and `large`.')
flags.DEFINE_alias('bd', 'bottleneck_dimension')
flags.DEFINE_alias('al', 'alpha')
flags.DEFINE_alias('ap', 'average_pool')
flags.DEFINE_alias('mnet', 'mobilenet_size')
Ejemplo n.º 8
0
from absl import app
from absl import flags
from absl import logging

import tensorflow.compat.v2 as tf
import tensorflow_hub as hub  # pylint:disable=g-bad-import-order

from non_semantic_speech_benchmark.distillation import get_data
from non_semantic_speech_benchmark.distillation import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('sk', None, 'Samples name.')
flags.DEFINE_alias('samples_key', 'sk')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')

flags.DEFINE_boolean(
    'precomputed_frontend_and_targets', False,
    'Flag to enable training with precomputed frontend and targets. '
    'If True, `file_pattern` must point to tf_records of tf.Examples. '
    'See get_data.get_precomputed_data for details about tf.Example formatting. '
    'If True, `teacher_model_hub`, `output_key`, `samples_key` '
    'and `min_length` flags are ignored.')

# Teacher / student network flags.
flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_integer('output_dimension', None, 'Dimension of targets.')
Ejemplo n.º 9
0
import time

from absl import app
from absl import flags
from absl import logging

import tensorflow.compat.v2 as tf

from non_semantic_speech_benchmark.eval_embedding.finetune import get_data
from non_semantic_speech_benchmark.eval_embedding.finetune import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('sk', None, 'Samples name.')
flags.DEFINE_alias('samples_key', 'sk')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')
flags.DEFINE_string('label_key', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('batch_size', None, 'The number of images in each batch.')
flags.DEFINE_integer('tbs', None, 'not used')

flags.DEFINE_integer('nc', None, 'num_clusters')
flags.DEFINE_boolean('ubn', None, 'Whether to normalize')
flags.DEFINE_float('lr', None, 'not used')

flags.DEFINE_string('logdir', None,
                    'Directory where the model was written to.')
Ejemplo n.º 10
0
import tensorflow as tf

from non_semantic_speech_benchmark.trillsson import get_data
from non_semantic_speech_benchmark.trillsson import models

FLAGS = flags.FLAGS

# Data config flags.
flags.DEFINE_list('file_patterns', None, 'Dataset location.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_string('target_key', None,
                    'Teacher embedding key in precomputed tf.Examples.')

# Student network config flags.
flags.DEFINE_string('model_type', None, 'Specification for student model.')
flags.DEFINE_alias('mt', 'model_type')

# Training config flags.
flags.DEFINE_integer('train_batch_size', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('tbs', 'train_batch_size')
flags.DEFINE_integer('max_sample_length', 32000, 'Max samples length.')
flags.DEFINE_alias('msl', 'max_sample_length')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')
flags.DEFINE_string('logdir', None,
                    'Path to directory where to store summaries.')
flags.DEFINE_integer('training_steps', 1000,
                     'The number of steps to run training for.')
flags.DEFINE_integer(
    'measurement_store_interval', 10,
    'The number of steps between storing objective value in '
Ejemplo n.º 11
0
flags.DEFINE_integer('output_dimension', None, 'Dimension of targets.')

flags.DEFINE_boolean(
    'precomputed_targets', False,
    'Flag to enable training with precomputed targets. '
    'If True, `file_pattern` must point to precomputed targets, and '
    '`target_key` must be supplied.')
flags.DEFINE_string(
    'target_key', None, 'Teacher embedding key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_targets` is False.')

flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('min_length', 16000, 'Minimum audio sample length.')
flags.DEFINE_alias('ml', 'min_length')

# Student network config flags.
flags.DEFINE_integer(
    'bottleneck_dimension', None, 'Dimension of bottleneck. '
    'If 0, bottleneck layer is excluded.')
flags.DEFINE_alias('bd', 'bottleneck_dimension')
flags.DEFINE_string('model_type', 'mobilenet_debug_1.0_False',
                    'Specification for student model.')
flags.DEFINE_alias('mt', 'model_type')

# Training config flags.
flags.DEFINE_integer('train_batch_size', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('tbs', 'train_batch_size')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')
Ejemplo n.º 12
0
flags.DEFINE_integer('use_task_autoencoder', 1, '')

flags.DEFINE_float('target_task_metric_val', 0.001, '')
flags.DEFINE_float('target_pred_state_metric_val', .01, '')
flags.DEFINE_float('early_task_metric_val', 1.0, '')
flags.DEFINE_float('early_pred_state_metric_val', 1.0, '')
flags.DEFINE_integer('early_stop_step', 100000, '')

flags.DEFINE_integer('batch_size', 128, '')
flags.DEFINE_float('learning_rate', .001, '')
flags.DEFINE_float('lr_decay_rate_per1M_steps', .9, '')
flags.DEFINE_float('reg_amount', 0.0, '')
flags.DEFINE_float('dec_enc_loss_amount', 0.0, '')
flags.DEFINE_float('adver_weight', 1.0, '')

flags.DEFINE_alias('job-dir', 'job_dir')


def get_train_model(task_infos,
                    model,
                    encoder,
                    datas,
                    discriminator,
                    should_train_model,
                    adversarial_task_name,
                    metric_stop_task_name,
                    metric_prefix,
                    max_train_steps=None):
    """This training function was designed to be flexible to work with a variety of tasks.

  @ param task_infos: A list of dicts. Each dict specifies the parameters of a task. Keys:
Ejemplo n.º 13
0
from absl import app
from absl import flags

import tensorflow.compat.v2 as tf
import tensorflow_hub as hub  # pylint:disable=g-bad-import-order

from non_semantic_speech_benchmark.distillation import get_data
from non_semantic_speech_benchmark.distillation import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')

# Teacher / student network flags.
flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_integer('output_dimension', None, 'Dimension of targets.')
flags.DEFINE_integer('bd', None, 'Dimension of bottleneck.')
flags.DEFINE_alias('bottleneck_dimension', 'bd')
flags.DEFINE_float('al', 1.0, 'Alpha controlling model size.')
flags.DEFINE_alias('alpha', 'al')

flags.DEFINE_integer('tbs', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('train_batch_size', 'tbs')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')

flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')
Ejemplo n.º 14
0
from absl import app
from absl import flags
from absl import logging

import tensorflow as tf

from non_semantic_speech_benchmark.eval_embedding import metrics
from non_semantic_speech_benchmark.eval_embedding.keras import get_data
from non_semantic_speech_benchmark.eval_embedding.keras import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('embedding_name', None, 'Embedding name.')
flags.DEFINE_alias('en', 'embedding_name')
flags.DEFINE_string('embedding_dimension', None, 'Embedding dimension.')
flags.DEFINE_alias('ed', 'embedding_dimension')
flags.DEFINE_string('label_name', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')
flags.DEFINE_list('bucket_boundaries', ['99999'],
                  'bucket_boundaries for data. Default is all one bucket.')

flags.DEFINE_integer('batch_size', None, 'The number of images in each batch.')
flags.DEFINE_integer('tbs', None, 'not used')

flags.DEFINE_integer('num_clusters', None, 'num_clusters')
flags.DEFINE_alias('nc', 'num_clusters')
flags.DEFINE_float('alpha_init', None, 'Initial autopool alpha.')
flags.DEFINE_alias('ai', 'alpha_init')
flags.DEFINE_boolean('use_batch_normalization', None,
Ejemplo n.º 15
0
from absl import app
from absl import flags
from absl import logging

import tensorflow.compat.v2 as tf
import tensorflow_hub as hub  # pylint:disable=g-bad-import-order

from non_semantic_speech_benchmark.distillation import get_data
from non_semantic_speech_benchmark.distillation import models

FLAGS = flags.FLAGS

flags.DEFINE_list('file_patterns', None, 'Dataset location.')
flags.DEFINE_string('sk', None, 'Samples name.')
flags.DEFINE_alias('samples_key', 'sk')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')

flags.DEFINE_boolean(
    'precomputed_targets', False,
    'Flag to enable training with precomputed targets. '
    'If True, `file_pattern` must point to precomputed targets, and '
    '`target_key` must be supplied.')
flags.DEFINE_string(
    'target_key', None, 'Teacher embedding key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_targets` is False.')

flags.DEFINE_boolean('normalize_to_pm_one', False, 'Normalize input.')

Ejemplo n.º 16
0
flags.DEFINE_boolean(
    'precomputed_targets', False,
    'Flag to enable training with precomputed targets. '
    'If True, `file_patterns` must point to precomputed targets, and '
    '`target_key` must be supplied.')
flags.DEFINE_string(
    'target_key', None, 'Teacher embedding key in precomputed tf.Examples. '
    'This flag is ignored if `precomputed_targets` is False.')
flags.DEFINE_boolean('normalize_to_pm_one', False, 'Normalize input.')

flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('min_length', 16000, 'Minimum audio sample length.')
flags.DEFINE_alias('ml', 'min_length')

# Student network config flags.
flags.DEFINE_boolean('truncate_output', None, 'Whether to truncate output.')
flags.DEFINE_alias('tr', 'truncate_output')
flags.DEFINE_string('model_type', None, 'Specification for student model.')
flags.DEFINE_alias('mt', 'model_type')
flags.DEFINE_boolean('spec_augment', False, 'Student spec augment.')
flags.DEFINE_alias('sa', 'spec_augment')

# Training config flags.
flags.DEFINE_integer('train_batch_size', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('tbs', 'train_batch_size')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')
flags.DEFINE_string('logdir', None,
Ejemplo n.º 17
0
from absl import app
from absl import flags
from absl import logging

import tensorflow.compat.v2 as tf
import tensorflow_hub as hub  # pylint:disable=g-bad-import-order

from non_semantic_speech_benchmark.distillation import get_data
from non_semantic_speech_benchmark.distillation import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('sk', None, 'Samples name.')
flags.DEFINE_alias('samples_key', 'sk')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')

# Teacher / student network flags.
flags.DEFINE_string('teacher_model_hub', None, 'Hub teacher model.')
flags.DEFINE_string('output_key', None, 'Teacher model output_key.')
flags.DEFINE_integer('output_dimension', None, 'Dimension of targets.')
flags.DEFINE_integer('bd', None, 'Dimension of bottleneck.')
flags.DEFINE_alias('bottleneck_dimension', 'bd')
flags.DEFINE_float('al', 1.0, 'Alpha controlling model size.')
flags.DEFINE_alias('alpha', 'al')

flags.DEFINE_integer('batch_size', None, 'The number of images in each batch.')
flags.DEFINE_integer('tbs', None, 'not used')
Ejemplo n.º 18
0
flags.DEFINE_bool('use_unk', False, '')

flags.DEFINE_bool('small', False, '')

flags.DEFINE_bool('train_mask_dids', True, '')
flags.DEFINE_bool('valid_mask_dids', True, '')
flags.DEFINE_bool('test_mask_dids', True, '')
flags.DEFINE_float('mask_dids_ratio', -1.,
                   'need to do exp # dev new did ratio 0.054, test is 0.872')
flags.DEFINE_float('mask_uids_ratio', 0., '')
flags.DEFINE_bool('test_all_mask', False,
                  'TODO test later submit with test_all_mask=True')

flags.DEFINE_float('label_smoothing_rate', 0., '')
flags.DEFINE_float('unk_aug_rate', 0., '')
flags.DEFINE_alias('unk_aug_ratio', 'unk_aug_rate')

flags.DEFINE_float('neg_mask_ratio', 0., '')
flags.DEFINE_float('neg_filter_ratio', 0., '')

flags.DEFINE_bool('train_uid_emb', True, '')

flags.DEFINE_bool('use_multi_dropout', False, '')

flags.DEFINE_bool('min_count_unk', True, '')

flags.DEFINE_string('input_dir', '../input', '')

flags.DEFINE_bool('slim_emb_height', False, '')

flags.DEFINE_bool('mask_history', True, '')
Ejemplo n.º 19
0
"""Trains on embeddings using Keras."""

from absl import app
from absl import flags

import tensorflow.compat.v2 as tf
tf.compat.v2.enable_v2_behavior()

from non_semantic_speech_benchmark.eval_embedding.keras import get_data
from non_semantic_speech_benchmark.eval_embedding.keras import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('embedding_name', None, 'Embedding name.')
flags.DEFINE_alias('en', 'embedding_name')
flags.DEFINE_string('embedding_dimension', None, 'Embedding dimension.')
flags.DEFINE_alias('ed', 'embedding_dimension')
flags.DEFINE_string('label_name', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('train_batch_size', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('tbs', 'train_batch_size')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')

flags.DEFINE_integer('num_clusters', None, 'num_clusters')
flags.DEFINE_alias('nc', 'num_clusters')
flags.DEFINE_boolean('use_batch_normalization', None,
                     'Whether to use batch normalization.')
flags.DEFINE_alias('ubn', 'use_batch_normalization')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')
Ejemplo n.º 20
0
"""Trains on embeddings using Keras."""

from absl import app
from absl import flags

import tensorflow.compat.v2 as tf

from non_semantic_speech_benchmark.eval_embedding.finetune import get_data
from non_semantic_speech_benchmark.eval_embedding.finetune import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('ml', 16000, 'Minimum length.')
flags.DEFINE_alias('min_length', 'ml')
flags.DEFINE_string('label_key', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('tbs', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('train_batch_size', 'tbs')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')

flags.DEFINE_integer('nc', None, 'num_clusters')
flags.DEFINE_alias('num_clusters', 'nc')
flags.DEFINE_float('alpha_init', None, 'Initial autopool alpha.')
flags.DEFINE_alias('ai', 'alpha_init')
flags.DEFINE_boolean('ubn', None, 'Whether to use batch normalization.')
flags.DEFINE_alias('use_batch_normalization', 'ubn')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')
Ejemplo n.º 21
0
                  None,
                  'help message for this argument.',
                  short_name='l')

# enum
# error occurs if you specify something other than x, y, z
# exe command: `--enum_key z`
flags.DEFINE_enum('enum_key',
                  'x', ['x', 'y', 'z'],
                  'help message for this argument.',
                  short_name='e')

# alias
# s_key is an alias for alias_key
# exe command: `--alias_key string`
flags.DEFINE_alias('alias_key', 's_key')

FLAGS = flags.FLAGS


def main(argv=None):
    print('s_key: {}'.format(FLAGS.s_key))
    print('i_key: {}'.format(FLAGS.i_key))
    print('f_key: {}'.format(FLAGS.f_key))
    print('b_key: {}'.format(FLAGS.b_key))

    print('list_key: {}'.format(FLAGS.list_key))

    print('enum_key: {}'.format(FLAGS.enum_key))

    print('alias_key: {}'.format(FLAGS.alias_key))
Ejemplo n.º 22
0
flags.DEFINE_bool(
    'test', False,
    'Run an agent in test mode: restore flag is set to true and number of envs set to 1'
    'Loss is calculated, but gradients are not applied.'
    'Checkpoints, summaries, log files are not updated, but console logger is enabled.'
)
flags.DEFINE_integer(
    'replay', 0,
    "Save a replay after this many episodes. Default of 0 means don't save replays."
)
flags.DEFINE_string(
    'replay_dir', None, 'Directory to save replays. '
    'Linux distros will save on ~/StarCraftII/Replays/ + path(replay_dir)'
    'Windows distros will save on path(replay_dir)')

flags.DEFINE_alias('e', 'env')
flags.DEFINE_alias('a', 'agent')
flags.DEFINE_alias('p', 'n_envs')
flags.DEFINE_alias('u', 'n_updates')
flags.DEFINE_alias('lf', 'log_freq')
flags.DEFINE_alias('cf', 'ckpt_freq')
flags.DEFINE_alias('la', 'log_eps_avg')
flags.DEFINE_alias('n', 'experiment')
flags.DEFINE_alias('g', 'gin_bindings')
flags.DEFINE_alias('r', 'replay')
flags.DEFINE_alias('rd', 'replay_dir')


def main(argv):
    args = flags.FLAGS
    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
Ejemplo n.º 23
0
import tensorflow.compat.v2 as tf

from non_semantic_speech_benchmark.eval_embedding.finetune import get_data
from non_semantic_speech_benchmark.eval_embedding.finetune import models

FLAGS = flags.FLAGS

flags.DEFINE_string('file_pattern', None, 'Dataset location.')
flags.DEFINE_string('samples_key', None, 'Samples name.')
flags.DEFINE_integer('min_length', 16000, 'Minimum length.')
flags.DEFINE_string('label_key', None, 'Name of label to use.')
flags.DEFINE_list('label_list', None, 'List of possible label values.')

flags.DEFINE_integer('tbs', 1, 'Hyperparameter: batch size.')
flags.DEFINE_alias('train_batch_size', 'tbs')
flags.DEFINE_integer('shuffle_buffer_size', None, 'shuffle_buffer_size')

flags.DEFINE_integer('nc', None, 'num_clusters')
flags.DEFINE_alias('num_clusters', 'nc')
flags.DEFINE_boolean('ubn', None, 'Whether to use batch normalization.')
flags.DEFINE_alias('use_batch_normalization', 'ubn')
flags.DEFINE_float('lr', 0.001, 'Hyperparameter: learning rate.')

flags.DEFINE_string('logdir', None,
                    'Path to directory where to store summaries.')

flags.DEFINE_integer('training_steps', 1000,
                     'The number of steps to run training for.')
flags.DEFINE_integer(
    'measurement_store_interval', 10,
Ejemplo n.º 24
0
import gin
import gin.tf
import tensorflow as tf
import gin.tf.external_configurables

import hypertune
import numpy as np

from tensorflow.python.lib.io import file_io
from tf_extractor import TfExtractor
from torch_extractor import TorchExtractor

flags.DEFINE_string('root_dir', os.getenv('TEST_UNDECLARED_OUTPUTS_DIR'),
                    'Root directory for writing logs/summaries/checkpoints.')
flags.DEFINE_alias('job-dir', 'root_dir')
flags.DEFINE_multi_string('gin_file', None, 'Paths to the study config file.')
flags.DEFINE_multi_string('gin_bindings', None, 'Gin binding to pass through.')
FLAGS = flags.FLAGS


@gin.configurable
def data_pipeline(data_path):
    with file_io.FileIO(data_path, mode='rb') as fIn:
        data = pickle.load(fIn)
    return data


@gin.configurable
def train_and_report_metrics(xs,
                             ys,
Ejemplo n.º 25
0
flags.DEFINE_multi_string('gin_file', None,
                          'List of paths to the config files.')
flags.DEFINE_multi_string('gin_param', None,
                          'Newline separated list of Gin parameter bindings.')
flags.DEFINE_string('map', 'MoveToBeacon', '')
flags.DEFINE_boolean('gpu_memory_allow_growth', False, '')
flags.DEFINE_float('gpu_memory_fraction',
                   None,
                   '',
                   lower_bound=0,
                   upper_bound=1)
flags.DEFINE_boolean('profile', False, '')
flags.DEFINE_boolean('debug', False, '')
flags.DEFINE_boolean('trace', False, '')

flags.DEFINE_alias('s', 'save_checkpoint_secs')
flags.DEFINE_alias('l', 'step_limit')
flags.DEFINE_alias('n', 'run_name')
flags.DEFINE_alias('d', 'run_dir')
flags.DEFINE_alias('m', 'map')


def main(args):
    run_name = FLAGS.run_name or time.strftime('%Y%m%d-%H%M%S',
                                               time.localtime())
    output_dir = path.join(FLAGS.run_dir, run_name)

    gin.bind_parameter('SC2EnvironmentConfig.map_name', FLAGS.map)

    gin_files = []
    if path.exists(output_dir):