def __init__(self, source, target, beta=tf.placeholder(tf.float32), keep_prob=tf.placeholder(tf.float32), dimension=256, lamda=tf.placeholder(tf.float32), skip_layer=['fc8'], n_class=31, weight_path="bvlc_alexnet.npy"): self.source = source self.target = target self.n_source = tf.shape(source)[0] self.n_target = tf.shape(target)[0] self.beta = beta AlexNet.__init__(self, tf.concat([self.source, self.target], axis=0), keep_prob, skip_layer, n_class, weight_path="bvlc_alexnet.npy") self.rep_dim = dimension self.lamda = lamda self.KEEP_PROB_TRAINING = 0.5 self.KEEP_PROB_VALIDATION = 1.0 #self.create() self.create_block(2)
def __init__(self, x, keep_prob, dimension, lamda, decay_l2=1e-4, model='./checkpoint/model', skip_layer=['fc8'], train_layer=['fc8_new', 'fc_adapt'], n_class=1000, weight_path="bvlc_alexnet.npy"): AlexNet.__init__(self, x, keep_prob, skip_layer, n_class, weight_path="bvlc_alexnet.npy") self.rep_dim = dimension self.model = model self.lamda = lamda self.train_layers = train_layer self.decay_l2 = decay_l2 self.KEEP_PROB_TRAINING = 0.5 self.KEEP_PROB_VALIDATION = 1.0