def parse_args_and_setup(self): defaults = self.defaults args = self.parser.parse_args() args_dict = vars(args) # Convert 0/1 to boolean, keep None for argname in self.boolean_args: if not args_dict[argname] is None: args_dict[argname] = bool(args_dict[argname]) self.args = args # Process while defaults not set yet if args.shuffle_examples is None and not args.shuffle_examples_capacity is None: raise ValueError( 'Shuffle capacity given without --shuffle_examples') if args.encode_lstm is None and not args.encode_hidden_units is None: raise ValueError('Encode hidden units given without --encode_lstm') # In order to make it possible to save the absence of a value # (args.cson), allow for 'none' and convert to None # Kinda silly since a lot of code works with 'none', but oh well # none_updates = {} # for k, v in args.__dict__.items(): # if isinstance(v, str) and v.lower() == 'none': # none_updates[v] = None # args.__dict__.update(none_updates) defaults.update( {k: v for (k, v) in args.__dict__.items() if v is not None}) args.__dict__.update(defaults) args.debug = self.debug if args.gen_log_dir: args.log_dir = self._gen_log_dir(args.log_dir, args.prefix) else: os.makedirs(args.log_dir, exist_ok=True) self.setup_log(args.log_dir) log = Log.get_logger(__name__) if not args.gen_log_dir and args.restore_if_possible is None: args.restore_if_possible = True log.info('Restoring if possible since a log directory is provided') if 'LD_PRELOAD' in os.environ and 'tcmalloc' in os.environ[ 'LD_PRELOAD']: log.info('Using tcmalloc, good on you!') else: log.warning('NOT using tcmalloc')
import tensorflow as tf import numpy as np from time import time from collections import OrderedDict from .util import * from attend.log import Log; log = Log.get_logger(__name__) from attend import util, tf_util import attend class AttendSolver(): def __init__(self, model, update_rule, learning_rate, stats_every): self.model = model self.update_rule = update_rule self.learning_rate = learning_rate self.summary_producer = None if self.update_rule == 'adam': self.optimizer = tf.train.AdamOptimizer else: raise Exception() self.loss_names = ['mse', 'pearson_r', 'icc'] from attend import SummaryProducer self.summary_producer = SummaryProducer(self.loss_names) self.stats_every = stats_every # TODO # This will contain the whole validation set, for losses
import tensorflow as tf import numpy as np import functools from attend.log import Log log = Log.get_logger(__name__) class Encoder(): ALLOWED_CONV_IMPLS = ['small', 'convnet', 'resnet', 'vggface', 'none'] ACTIVATIONS = dict(relu=tf.nn.relu) def __init__(self, batch_size, encode_hidden_units=0, time_steps=None, debug=True, conv_impl=None, dense_layer=0, dropout=.75, use_dropout=False, use_maxnorm=False, dense_spec=None, encode_lstm=None, lstm_impl=None, use_batch_norm=True, batch_norm_decay=None, use_batch_renorm=False): self.use_batch_norm = use_batch_norm self.batch_norm_decay = batch_norm_decay self.use_batch_renorm = use_batch_renorm
def save_interesting_things(self): Log.save_args(self.args) Log.save_pid() Log.save_env() Log.save_git_version() Log.save_hostname() Log.save_condor()
def setup_log(self, log_dir=None): if log_dir is None: log_dir = self.args.log_dir from attend.log import Log Log.setup(log_dir, self.debug)