def __init__(self, dataManager): ''' Constructor ''' TFMapping.__init__(self, dataManager) SettingsClient.__init__(self) self.iterations = 0
def __init__(self, dataManager, inputArguments, outputArguments, name='Function'): TFMapping.__init__(self, dataManager, inputArguments, outputArguments, name=name)
def __init__(self, dataManager, inputArguments, outputArguments, name='Classifier'): TFMapping.__init__(self, dataManager, inputArguments, outputArguments, name=name)
def _addTensorToDictionary(self, tensor): from pypost.mappings import TFMapping tensorMapping = TFMapping(self.dataManager, tensorNode=tensor, name='data_tfmapping') self.tensorDictionary[tensor] = tensorMapping return tensorMapping
def addFeatureMapping(self, mapping): if isinstance(mapping, tf.Tensor): from pypost.mappings import TFMapping mapping = TFMapping(self, tensorNode=mapping) outputVariable = mapping.getOutputVariables()[0] if (not outputVariable in self.dataEntries): if (self.subDataManager): self.subDataManager.addFeatureMapping(mapping) else: raise ValueError( 'Can only add Feature Mapping for existing data entries (aliases are not supported). Current Entry %s does not exist' % outputVariable) else: self.dataEntries[outputVariable].isFeature = True self.dataEntries[outputVariable].callBackGetter = mapping self.addDataEntry(outputVariable + '_validFlag', 1)
def __init__(self, dataManager, lossFunction, variables_list = None, name = None, printIterations=False): super().__init__(dataManager) self.loss = lossFunction self.variables_list = variables_list if (name is None): self.name = '' else: self.name = name + '_' self.linkPropertyToSettings('tfOptimizerType', globalName = self.name + 'tfOptimizerType', defaultValue=TFOptimizerType.Adam) self.linkPropertyToSettings('tfOptimizerNumIterations', globalName = self.name + 'tfOptimizerNumIterations', defaultValue=1000) self.linkPropertyToSettings('tfOptimizerBatchSize', globalName=self.name + 'tfOptimizerBatchSize', defaultValue = -1) if self.tfOptimizerType == TFOptimizerType.Adam: self.linkPropertyToSettings('tfAdamLearningRate', globalName = self.name + 'tfAdamLearningRate', defaultValue = 0.001) self.linkPropertyToSettings('tfAdamBeta1', globalName=self.name + 'tfAdamBeta1', defaultValue=0.9) self.linkPropertyToSettings('tfAdamBeta2', globalName=self.name + 'tfAdamBeta2', defaultValue=0.999) self.linkPropertyToSettings('tfAdamEpsilon', globalName=self.name + 'tfAdamEpsilon', defaultValue=10**-8) self.optimizer = tf.train.AdamOptimizer(learning_rate = self.tfAdamLearningRate, beta1 = self.tfAdamBeta1, beta2 = self.tfAdamBeta2, epsilon = self.tfAdamEpsilon) elif self.tfOptimizerType == TFOptimizerType.GradientDescent: self.linkPropertyToSettings('tfGradientLearningRate', globalName=self.name + 'tfGradientLearningRate', defaultValue=0.001) self.optimizer = tf.train.GradientDescentOptimizer(learning_rate=self.tfGradientLearningRate) self.minimize = self.optimizer.minimize(self.loss, var_list=self.variables_list) self.tm_minimize = TFMapping(dataManager, tensorNode = self.minimize) self.tm_loss = TFMapping(dataManager, tensorNode = self.loss) self._printIterations = printIterations self.lossLogger = []
def __init__(self, dataManager, inputArguments, outputArguments, meanFunction, name = 'FullGaussian'): TFMapping.__init__(self, dataManager, inputArguments, outputArguments, name = name) self.meanFunction = meanFunction self.additionalScopes.append(meanFunction.name)
def __init__(self, dataManager, inputArguments, outputArguments, name = 'NaturalFullGaussian'): TFMapping.__init__(self, dataManager, inputArguments, outputArguments, name = name)