Example #1
0
from lasagne.layers import get_all_params
from lasagne.objectives import categorical_accuracy, categorical_crossentropy
from lasagne.updates import adam
from lasagne.utils import floatX
from theano import tensor as T

import flags
from at import adversarial_training
from data import batch_iterator, mnist_load, select_balanced_subset
from deepfool import deepfool
from models import create_network, with_end_points
from utils import (build_result_str, save_images, save_network,
                   setup_train_experiment)

# experiment parameters
flags.DEFINE_integer("seed", 1, "experiment seed")
flags.DEFINE_string("name", None, "name of the experiment")
flags.DEFINE_string("data_dir", "data", "path to data")
flags.DEFINE_string("train_dir", "runs", "path to working dir")

# gan model parameters
flags.DEFINE_string("model", "mlp", "model name (mlp or mlp_with_bn)")
flags.DEFINE_string("layer_dims", "1000-1000-1000-10",
                    "dimensions of fully-connected layers")
flags.DEFINE_bool("use_dropout", False, "whenever to use dropout or not")
flags.DEFINE_float("lmbd", 1.0, "regularization coefficient")
flags.DEFINE_float("epsilon", 0.2, "epsilon for generative fgsm perturbation")

# adversary parameters
flags.DEFINE_integer("deepfool_iter", 25,
                     "maximum number of deepfool iterations")
Example #2
0
"""Benchmark script for TensorFlow.

See the README for more information.
"""

from __future__ import print_function

#from absl import app
from absl import flags as absl_flags
import tensorflow as tf
import flags

flags.DEFINE_string('network_dir', None, 'network file path.')
flags.DEFINE_string('network', 'network.py', 'network file name')
flags.DEFINE_string('data_dir', None, 'dataset location')
flags.DEFINE_integer('small_chunk', 1, 'accumulate gradients.')
flags.DEFINE_string('memory_saving_method', None,
                    'setup the memory saving method, 1. recomputing 2. TBD ')
flags.DEFINE_enum('lr_policy', 'multistep', ('multistep', 'exp'),
                  'learning_rate policy')
flags.DEFINE_boolean('aug_flip', True,
                     'whether randomly flip left or right dataset')
flags.DEFINE_integer(
    'stop_accu_epoch', 0, 'early stop when accuracy does not increase 1% for'
    'numbers of epochs')
flags.DEFINE_boolean('save_stop', True,
                     'whether to save checkpoint when killing process')
flags.DEFINE_list(
    'aug_list', [], 'Specify a list of augmentation function names to apply '
    'during training.')
Example #3
0
_VALID_INPUT_FEATURES = frozenset({
    data.SEQUENCE_ONE_HOT,
    data.SEQUENCE_KMER_COUNT,
})

TUNER_LOSS_LOSS = 'loss'
TUNER_LOSS_AUC = 'auc/true_top_1p'
TUNER_GOAL_MAX = 'MAXIMIZE'
TUNER_GOAL_MIN = 'MINIMIZE'
TUNER_LOSS_TO_GOAL = {
    TUNER_LOSS_LOSS: TUNER_GOAL_MIN,
    TUNER_LOSS_AUC: TUNER_GOAL_MAX,
}

flags.DEFINE_integer('task', 0, 'Task id when running online')
flags.DEFINE_string('master', '', 'TensorFlow master to use')
flags.DEFINE_string('input_dir', None, 'Path to input data.')
flags.DEFINE_string(
    'affinity_target_map', '',
    'Name of the affinity map from count values to affinity values. '
    'Needed only if using input_dir and running inference or using '
    'microarray values.')
flags.DEFINE_enum(
    'dataset', None,
    sorted(config.INPUT_DATA_DIRS),
    'Name of dataset with known input_dir on which to train. Either input_dir '
    'or dataset is required.')
flags.DEFINE_integer('val_fold', 0, 'Fold to use for validation.')
flags.DEFINE_string('save_base', None,
                    'Base path to save any output or weights.')
   --output_name=xxx/base30_1B_inference_top2k

"""
# pylint: enable=line-too-long

# Google internal
import apache_beam as beam
import runner
import app
import flags

from ..learning import eval_feedforward
from ..utils import pool

FLAGS = flags.FLAGS
flags.DEFINE_integer('num_batches', 1000, 'Number of batches to run')
flags.DEFINE_integer('batch_size', 10000, 'Number of sequences per batch')
flags.DEFINE_integer('num_to_save', 2000, 'The number of top results to save.')
flags.DEFINE_string('target_name', None,
                    'The name of the target protein for the inference.')
flags.DEFINE_string('model_dir', None,
                    'The path to base trained model directory.')
flags.DEFINE_string('checkpoint_path', None,
                    'String path to the checkpoint of the model.')
flags.DEFINE_string('output_name', None, 'The name of the output file.')
flags.DEFINE_integer('sequence_length', 40, 'The length of sequences to test.')
flags.DEFINE_string('affinity_target_map', None, 'Name of affinity target map')

_METRICS_NAMESPACE = 'SearchInference'

Example #5
0
from ..util import measurement_pb2
from ..preprocess import utils


class Error(Exception):
  pass


FLAGS = flags.FLAGS

flags.DEFINE_string("fastq1", None, "Path to the first fastq file.")
flags.DEFINE_string("fastq2", None,
                    "Path to the second fastq file for paired-end "
                    "sequencing, or None for single end")
flags.DEFINE_integer("measurement_id", None,
                     "The measurement data set ID for this fastq pair, from "
                     "the experiment proto")
flags.DEFINE_integer("sequence_length", 40,
                     "Expected length of each sequence read")
flags.DEFINE_string("output_name",
                    "xxx"
                    "aptitude", "Path and name for the output sstable")
flags.DEFINE_integer("base_qual_threshold", 20, "integer indicating the "
                     "lowest quality (on scale from 0 to 40) for a single "
                     "base to be considered acceptable")
flags.DEFINE_integer("bad_base_threshold", 5, "integer indicating the maximum "
                     "number of bad bases before a read is bad quality")
flags.DEFINE_float("avg_qual_threshold", 30.0, "float indicating the mean "
                   "quality across the whole read to be considered good")
flags.DEFINE_integer("num_reads", 99999999999, "The number of reads to include "
                     "from each fastq file.")
Example #6
0
import numpy as np
import theano
from theano import tensor as T
from lasagne.objectives import categorical_accuracy

import flags
from at import fast_gradient_perturbation
from data import batch_iterator, mnist_load, select_balanced_subset
from deepfool import deepfool
from models import create_network, with_end_points
from utils import (load_network, load_training_params, build_result_str,
                   save_images)

flags.DEFINE_string("load_dir", None, "path to load checkpoint from")
flags.DEFINE_integer("load_epoch", None, "epoch for which restore model")
flags.DEFINE_string("working_dir", "test", "path to working dir")
flags.DEFINE_bool("sort_labels", True, "sort labels")
flags.DEFINE_integer("batch_size", 100, "batch_index size (default: 100)")
flags.DEFINE_float("fgsm_epsilon", 0.2, "fast gradient epsilon (default: 0.2)")
flags.DEFINE_integer("deepfool_iter", 50,
                     "maximum number of deepfool iterations (default: 25)")
flags.DEFINE_float("deepfool_clip", 0.5,
                   "perturbation clip during search (default: 0.1)")
flags.DEFINE_float("deepfool_overshoot", 0.02,
                   "multiplier for final perturbation")
flags.DEFINE_integer("summary_frequency", 10, "summarize frequency")

FLAGS = flags.FLAGS
logger = logging.getLogger()
Example #7
0
import cloud
import flags
import multiprocessing

from nova.cloudpipe.pipelib import CloudPipe
import urllib
import logging


_log = logging.getLogger("api")
_log.setLevel(logging.WARN)


FLAGS = flags.FLAGS
flags.DEFINE_integer('cc_port', 8773, 'cloud controller port')


_c2u = re.compile('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')


def _camelcase_to_underscore(str):
    return _c2u.sub(r'_\1', str).lower().strip('_')


def _underscore_to_camelcase(str):
    return ''.join([x[:1].upper() + x[1:] for x in str.split('_')])


def _underscore_to_xmlcase(str):
    res = _underscore_to_camelcase(str)
Example #8
0
flags.DEFINE_string('dataset', 'cora', '[cora, citeseer]')
flags.DEFINE_string('subgraph', 'subgraph/',
                    'Directory of all subgraphs, each file is a subgraph')
flags.DEFINE_string('graph', 'graph.txt', 'Edge list of the complete graph')
flags.DEFINE_string('kernel', 'kernel.json', 'Kernels to be matched')
flags.DEFINE_string('query', 'query',
                    'Used to create query files used by SubMatch')
flags.DEFINE_string('meta', 'meta/',
                    'Directory of matched instances of kernels')
flags.DEFINE_string('data', 'data.txt', None)
flags.DEFINE_string('feature', 'feature.txt', None)
flags.DEFINE_string('label', 'label.txt', None)

flags.DEFINE_boolean('use_feature', True, 'Use feature or not')
flags.DEFINE_boolean('use_embedding', True, 'Use embedding or not')
flags.DEFINE_integer('feat_dim', -1, None)
flags.DEFINE_list(
    'node_dim', [256],
    'Dimension of hidden layers between feature and node embedding')
flags.DEFINE_list(
    'instance_h_dim', [256],
    'Dimension of hidden layers between node embedding and instance embedding, last element is the dimension of instance embedding'
)
flags.DEFINE_list(
    'graph_h_dim', [128],
    'Dimension of hidden layers between instance embedding and subgraph embedding, last element is the dimension of subgraph embedding'
)
flags.DEFINE_float('keep_prob', 0.6, 'Used for dropout')

flags.DEFINE_list('kernel_sizes', [1], 'List of number of nodes in kernel')
flags.DEFINE_string('pooling', 'max', '[max, average, sum]')