def __init__( self, fname, **params ): Model.__init__( self, fname, **params ) self.k = self.get_parameter( "k" ) self.d = self.get_parameter( "d" ) self.n_views = self.get_parameter( "v" ) self.weights = self.get_parameter( "w" ) self.means = self.get_parameter( "M" ) self.sigmas = self.get_parameter( "S" )
def __init__(self, **params): """Create a mixture model for components using given weights""" Model.__init__(self, **params) self.k = self["k"] self.d = self["d"] self.weights = self["w"] self.means = self["M"] self.sigmas = self["S"]
def __init__( self, fname, **params ): """Create a mixture model for components using given weights""" Model.__init__( self, fname, **params ) self.k = self.get_parameter( "k" ) self.d = self.get_parameter( "d" ) self.weights = self.get_parameter( "w" ) self.means = self.get_parameter( "M" ) self.sigmas = self.get_parameter( "S" )
def __init__(self): print("unet init") Model.__init__(self) self.learning_rate = tf.train.exponential_decay( 0.0001, tf.Variable(0, trainable=False), 10, 0.8, staircase=True) self.loss = Pixelwise_weighted_loss().compute_loss self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate) self.metric = IOU()
def __init__(self, genres, label_probs, image_shape, filter_counts, unit_counts, resize_shape=None): self.label_probs = label_probs self.image_shape = image_shape self.filter_counts = filter_counts self.unit_counts = unit_counts Model.__init__(self, genres, resize_shape)
def __init__(self, **params): """Create a mixture model for components using given weights""" Model.__init__(self, **params) self.k = self["k"] self.d = self["d"] self.weights = self["w"] self.means = self["M"] # Draw as a multinomial distribution assert allclose(self.weights.sum(), 1.) assert allclose(self.means.sum(0), 1.) # symbolic means and observed variables self.sym_means = sp.symbols('x1:'+str(self.d+1)) self.sym_obs = self.sym_means
def __init__(self, **params): """Create a mixture model for components using given weights""" Model.__init__(self, **params) self.k = self["k"] self.d = self["d"] self.weights = self["w"] self.betas = self["B"] # Draw as a multinomial distribution assert allclose(self.weights.sum(), 1.) self.mean = self["xM"] self.sigma = self["xS"] self.sigma_val = self["xSigma"] self.sym_betas = sp.symbols('b1:' + str(self.d + 1)) self.sym_obs = sp.symbols('x1:' + str(self.d + 1) + 'y')
def __init__(self, **params): """Create a mixture model for components using given weights""" Model.__init__(self, **params) self.k = self["k"] self.d = self["d"] self.weights = self["w"] self.betas = self["B"] # Draw as a multinomial distribution assert allclose(self.weights.sum(), 1.) self.mean = self["xM"] self.sigma = self["xS"] self.sigma_val = self["xSigma"] self.sym_betas = sp.symbols('b1:'+str(self.d+1)) self.sym_obs = sp.symbols('x1:'+str(self.d+1) + 'y')
def __init__(self, name, depth=5, lr=0.001, max_length=822, kernel_size=5, filters=100, regularization_factor=0.001, keep_prob=0.5, batch_size=200, hidden_size=150): self.lr = lr self.regularization_factor = regularization_factor self.keep_prob = keep_prob self.batch_size = batch_size self.hidden_size = hidden_size self.filters = filters self.kernel_size = kernel_size self.depth = depth Model.__init__(self, name, max_length)
def __init__(self, genres, label_probs, image_shape, hidden_layer_sizes, resize_shape=None): self.label_probs = label_probs self.image_shape = image_shape self.hidden_layer_sizes = hidden_layer_sizes Model.__init__(self, genres, resize_shape)
def __init__(self, genres, label_probs, image_shape, resize_shape=None): self.label_probs = label_probs self.image_shape = image_shape Model.__init__(self, genres, resize_shape)
def __init__(self): Model.__init__(self, 'book')
def __init__(self): print("unet init") Model.__init__(self)
def __init__(self): print("unet init") Model.__init__(self) self.learning_rate = 0.000001 self.loss = Pixelwise_weighted_loss().compute_loss self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)