Beispiel #1
0
        1、先构建网络,定义一些变量
        2、构建损失函数
        3、构建循环网络
        4、筛选保留集样本
        5、先实现残差网络 再实现增量学习
        6、实现简单的残差网络
        """
        # Create neural network model
        print('Run {0} starting ...'.format(itera))
        print("Building model and compiling functions...")

        image_train, label_train, image_test, label_test = utils_data.load_data(
            Cifar_train_file, Cifar_test_file)
        #next batch
        image_batch, label_batch_0, file_protoset_batch = utils_data.Prepare_train_data_batch(
            image_train, label_train, files_protoset, itera, order, nb_cl,
            batch_size)
        label_batch = tf.one_hot(label_batch_0, 100)
        #初次训练
        if itera == 0:
            #不需要蒸馏
            variables_graph, variables_graph2, scores, scores_stored = utils_cifar.prepareNetwork(
                gpu, image_batch, itera)
            with tf.device('/gpu:0'):
                scores = tf.concat(scores, 0)
                l2_reg = wght_decay * tf.reduce_sum(
                    tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES,
                                      scope='ResNet34'))
                loss_class = tf.reduce_mean(
                    tf.nn.sigmoid_cross_entropy_with_logits(labels=label_batch,
                                                            logits=scores))
Beispiel #2
0
    5、先实现残差网络 再实现增量学习
    6、实现简单的残差网络
    """
    # Select the order for the class learning
    order = np.arange(100)
    np.random.shuffle(order)
    np.save('order', order)  #### 存储样本顺序的序列

    # Create neural network model
    print('Run {0} starting ...'.format(itera))
    print("Building model and compiling functions...")

    image_train, label_train, image_test, label_test = utils_data.load_data(
        Cifar_train_file, Cifar_test_file)
    #next batch
    image_batch, label_batch_0 = utils_data.Prepare_train_data_batch(
        image_train, label_train, itera, order, nb_cl, batch_size)
    label_batch = tf.one_hot(label_batch_0, 100)
    variables_graph, variables_graph2, scores, scores_stored = utils_cifar.prepareNetwork(
        gpu, image_batch)
    op_assign = [(variables_graph2[i]).assign(variables_graph[i])
                 for i in range(len(variables_graph))]

    with tf.device('/gpu:0'):
        scores = tf.concat(scores, 0)
        l2_reg = wght_decay * tf.reduce_sum(
            tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES,
                              scope='ResNet34'))
        loss_class = tf.reduce_mean(
            tf.nn.sigmoid_cross_entropy_with_logits(labels=label_batch,
                                                    logits=scores))
        loss = loss_class + l2_reg