예제 #1
0
flags.DEFINE_string('network', 'network.py', 'network file name')
flags.DEFINE_string('data_dir', None, 'dataset location')
flags.DEFINE_integer('small_chunk', 1, 'accumulate gradients.')
flags.DEFINE_string('memory_saving_method', None,
                    'setup the memory saving method, 1. recomputing 2. TBD ')
flags.DEFINE_enum('lr_policy', 'multistep', ('multistep', 'exp'),
                  'learning_rate policy')
flags.DEFINE_boolean('aug_flip', True,
                     'whether randomly flip left or right dataset')
flags.DEFINE_integer(
    'stop_accu_epoch', 0, 'early stop when accuracy does not increase 1% for'
    'numbers of epochs')
flags.DEFINE_boolean('save_stop', True,
                     'whether to save checkpoint when killing process')
flags.DEFINE_list(
    'aug_list', [], 'Specify a list of augmentation function names to apply '
    'during training.')

import benchmark_cnn
import memory_saving as ms
from myelindl.core import benchmark_handler
import logging

logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S',
                    level=logging.INFO)

flags.define_flags()
for name in flags.param_specs.keys():
    absl_flags.declare_key_flag(name)
예제 #2
0
    int(1e6), 'Maximum number of examples to evaluate in each evaluation loop.')
flags.DEFINE_integer('mbsz', 128, 'Minibatch size during training.')
flags.DEFINE_enum('optimizer', 'momentum', ['adam', 'momentum'],
                  'Optimizer name')
flags.DEFINE_integer('num_conv_layers', 0,
                     'Number of convolutional hidden layers.')
flags.DEFINE_integer('max_strides', 1,
                     'When using convolutional layers and parameter tuning, '
                     'the maximum stride to test.')
flags.DEFINE_integer('max_rates', 1,
                     'When using convolutional layers and parameter tuning, '
                     'the maximum dilation rate to test.')
flags.DEFINE_integer('num_fc_layers', None,
                     'Number of fully connected hidden layers.')
flags.DEFINE_list(
    'target_names', [output_layers.TARGETS_ALL_OUTPUTS],
    'List of count targets to train against. By default, train against all '
    'counts.')
flags.DEFINE_enum('preprocess_mode', 'PREPROCESS_SKIP_ALL_ZERO_COUNTS', [
    data.PREPROCESS_SKIP_ALL_ZERO_COUNTS,
    data.PREPROCESS_INJECT_RANDOM_SEQUENCES,
    data.PREPROCESS_ALL_COUNTS
], 'How to preprocess input data for training purposes.')
flags.DEFINE_list('input_features', [
    'SEQUENCE_ONE_HOT'
], 'List of features to use as inputs to the model. Valid choices: %r' %
                  _VALID_INPUT_FEATURES)
flags.DEFINE_integer(
    'kmer_k_max', 4,
    'Maximum k-mer size for which to calculate counts if using '
    'SEQUENCE_KMER_COUNT as a feature.')
flags.DEFINE_float(
예제 #3
0
                    'Directory of all subgraphs, each file is a subgraph')
flags.DEFINE_string('graph', 'graph.txt', 'Edge list of the complete graph')
flags.DEFINE_string('kernel', 'kernel.json', 'Kernels to be matched')
flags.DEFINE_string('query', 'query',
                    'Used to create query files used by SubMatch')
flags.DEFINE_string('meta', 'meta/',
                    'Directory of matched instances of kernels')
flags.DEFINE_string('data', 'data.txt', None)
flags.DEFINE_string('feature', 'feature.txt', None)
flags.DEFINE_string('label', 'label.txt', None)

flags.DEFINE_boolean('use_feature', True, 'Use feature or not')
flags.DEFINE_boolean('use_embedding', True, 'Use embedding or not')
flags.DEFINE_integer('feat_dim', -1, None)
flags.DEFINE_list(
    'node_dim', [256],
    'Dimension of hidden layers between feature and node embedding')
flags.DEFINE_list(
    'instance_h_dim', [256],
    'Dimension of hidden layers between node embedding and instance embedding, last element is the dimension of instance embedding'
)
flags.DEFINE_list(
    'graph_h_dim', [128],
    'Dimension of hidden layers between instance embedding and subgraph embedding, last element is the dimension of subgraph embedding'
)
flags.DEFINE_float('keep_prob', 0.6, 'Used for dropout')

flags.DEFINE_list('kernel_sizes', [1], 'List of number of nodes in kernel')
flags.DEFINE_string('pooling', 'max', '[max, average, sum]')

flags.DEFINE_integer('epoch', 4, None)