def __init__(self, conf, output_dims, constraint, name=None): '''EDDecoder constructor Args: conf: the decoder configuration as a ConfigParser output_dims: a dictionary containing the output dimensions for each output constraint: the constraint for the variables ''' #save the parameters self.conf = dict(conf.items('decoder')) #apply default configuration default = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'defaults', type(self).__name__.lower() + '.cfg') apply_defaults(self.conf, default) self.outputs = output_dims.keys() self.output_dims = output_dims self.scope = tf.VariableScope(tf.AUTO_REUSE, name or type(self).__name__, constraint=constraint)
def __init__(self, conf): '''Processor constructor Args: conf: processor configuration as a configparser ''' self.conf = dict(conf.items('processor')) #apply default configuration default = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'defaults', type(self).__name__.lower() + '.cfg') apply_defaults(self.conf, default)
def __init__(self, conf, dataconf, modelconf, evaluatorconf, expdir, server, task_index): ''' NnetTrainer constructor, creates the training graph Args: conf: the trainer config as a ConfigParser dataconf: the data configuration as a ConfigParser modelconf: the neural net model configuration evaluatorconf: the evaluator configuration for evaluating if None no evaluation will be done expdir: directory where the summaries will be written server: optional server to be used for distributed training task_index: optional index of the worker task in the cluster ''' #save some inputs self.conf = dict(conf.items('trainer')) #apply default configuration default = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'defaults', type(self).__name__.lower() + '.cfg') apply_defaults(self.conf, default) self.dataconf = dataconf self.evaluatorconf = evaluatorconf self.expdir = expdir self.server = server self.task_index = task_index if ('norm_constraint' in self.conf and self.conf['norm_constraint'] != 'None'): constraint = constraints.MaxNorm(int(self.conf['norm_constraint'])) else: constraint = None #create the model self.model = Model( conf=modelconf, trainlabels=int(self.conf['trainlabels']), constraint=constraint)
def __init__(self, conf, model): ''' Decoder constructor Args: conf: the decoder config model: the model that will be used for decoding ''' self.conf = dict(conf.items('decoder')) #apply default configuration default = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'defaults', type(self).__name__.lower() + '.cfg') apply_defaults(self.conf, default) self.model = model
def __init__(self, conf, constraint, name=None): '''EDEncoder constructor Args: conf: the encoder configuration name: the encoder name constraint: the constraint for the variables ''' #save the configuration self.conf = dict(conf.items('encoder')) #apply default configuration default = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'defaults', type(self).__name__.lower() + '.cfg') apply_defaults(self.conf, default) self.scope = tf.VariableScope(tf.AUTO_REUSE, name or type(self).__name__, constraint=constraint)
def __init__(self, conf, dataconf, model): '''Evaluator constructor Args: conf: the evaluator configuration as a ConfigParser dataconf: the database configuration model: the model to be evaluated ''' self.conf = dict(conf.items('evaluator')) #apply default configuration default = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'defaults', type(self).__name__.lower() + '.cfg') apply_defaults(self.conf, default) self.model = model #get the database configurations inputs = self.model.input_names input_sections = [self.conf[i].split(' ') for i in inputs] self.input_dataconfs = [] for sectionset in input_sections: self.input_dataconfs.append([]) for section in sectionset: self.input_dataconfs[-1].append(dict(dataconf.items(section))) targets = self.conf['targets'].split(' ') if targets == ['']: targets = [] target_sections = [self.conf[o].split(' ') for o in targets] self.target_dataconfs = [] for sectionset in target_sections: self.target_dataconfs.append([]) for section in sectionset: self.target_dataconfs[-1].append(dict(dataconf.items(section)))