Esempio n. 1
0
def wider_pre_dense(layer, n_add, weighted=True):
    """Get previous dense layer for current layer

   Args:
       weighted:
       layer: the layer from which we get wide previous dense layer
       n_add: output shape

   Returns:
       The previous dense layer
   """
    if not weighted:
        return StubDense(layer.units + n_add, layer.activation)

    n_units2 = layer.units

    teacher_w, teacher_b = layer.get_weights()
    rand = np.random.randint(n_units2, size=n_add)
    student_w = teacher_w.copy()
    student_b = teacher_b.copy()

    # target layer update (i)
    for i in range(n_add):
        teacher_index = rand[i]
        new_weight = teacher_w[:, teacher_index]
        new_weight = new_weight[:, np.newaxis]
        student_w = np.concatenate(
            (student_w, add_noise(new_weight, student_w)), axis=1)
        student_b = np.append(student_b,
                              add_noise(teacher_b[teacher_index], student_b))

    new_pre_layer = StubDense(n_units2 + n_add, layer.activation)
    new_pre_layer.set_weights((student_w, student_b))

    return new_pre_layer
Esempio n. 2
0
def wider_pre_dense(layer, n_add, weighted=True):
    if not weighted:
        return StubDense(layer.input_units, layer.units + n_add)

    n_units2 = layer.units

    teacher_w, teacher_b = layer.get_weights()
    rand = np.random.randint(n_units2, size=n_add)
    student_w = teacher_w.copy()
    student_b = teacher_b.copy()

    # target layer update (i)
    for i in range(n_add):
        teacher_index = rand[i]
        new_weight = teacher_w[teacher_index, :]
        new_weight = new_weight[np.newaxis, :]
        student_w = np.concatenate(
            (student_w, add_noise(new_weight, student_w)), axis=0)
        student_b = np.append(student_b,
                              add_noise(teacher_b[teacher_index], student_b))

    new_pre_layer = StubDense(layer.input_units, n_units2 + n_add)
    new_pre_layer.set_weights((student_w, student_b))

    return new_pre_layer
Esempio n. 3
0
def wider_next_dense(layer, start_dim, total_dim, n_add, weighted=True):
    """Get next dense layer for current layer

    Args:
       weighted:
       layer: the dense layer from which we search next dense layer
       n_add: output shape
       start_dim: the started dimension
       total_dim: the total dimension

    Returns:
       The next dense layer
    """
    if not weighted:
        return StubDense(layer.units, layer.activation)
    n_units = layer.units
    teacher_w, teacher_b = layer.get_weights()
    student_w = teacher_w.copy()
    n_units_each_channel = int(teacher_w.shape[0] / total_dim)

    new_weight = np.zeros((n_add * n_units_each_channel, teacher_w.shape[1]))
    student_w = np.concatenate(
        (student_w[:start_dim * n_units_each_channel],
         add_noise(new_weight, student_w),
         student_w[start_dim * n_units_each_channel:total_dim *
                   n_units_each_channel]))

    new_layer = StubDense(n_units, layer.activation)
    new_layer.set_weights((student_w, teacher_b))
    return new_layer
Esempio n. 4
0
def dense_to_deeper_block(dense_layer, weighted=True):
    units = dense_layer.units
    weight = np.eye(units, dtype=np.float32)
    bias = np.zeros(units, dtype=np.float32)
    new_dense_layer = StubDense(units, units)
    if weighted:
        new_dense_layer.set_weights((add_noise(weight, np.array([0, 1])), add_noise(bias, np.array([0, 1]))))
    return [StubReLU(), new_dense_layer]
Esempio n. 5
0
def dense_to_deeper_block(dense_layer, weighted=True):
    units = dense_layer.units
    weight = np.eye(units)
    bias = np.zeros(units)
    new_dense_layer = StubDense(units, units)
    if weighted:
        new_dense_layer.set_weights((add_noise(weight, np.array([0, 1])), add_noise(bias, np.array([0, 1]))))
    return [StubReLU(), new_dense_layer]
def dense_to_deeper_block(dense_layer, weighted=True):
    units = dense_layer.units
    weight = np.eye(units)
    bias = np.zeros(units)
    new_dense_layer = StubDense(units, dense_layer.activation)
    if weighted:
        new_dense_layer.set_weights(
            (add_noise(weight,
                       np.array([0, 1])), add_noise(bias, np.array([0, 1]))))
    return [new_dense_layer, StubDropout(constant.DENSE_DROPOUT_RATE)]
Esempio n. 7
0
def wider_next_dense(layer, start_dim, total_dim, n_add, weighted=True):
    if not weighted:
        return StubDense(layer.input_units + n_add, layer.units)
    teacher_w, teacher_b = layer.get_weights()
    student_w = teacher_w.copy()
    n_units_each_channel = int(teacher_w.shape[1] / total_dim)

    new_weight = np.zeros((teacher_w.shape[0], n_add * n_units_each_channel), dtype=np.float32)
    student_w = np.concatenate((student_w[:, :start_dim * n_units_each_channel],
                                add_noise(new_weight, student_w),
                                student_w[:, start_dim * n_units_each_channel:total_dim * n_units_each_channel]),
                               axis=1)

    new_layer = StubDense(layer.input_units + n_add, layer.units)
    new_layer.set_weights((student_w, teacher_b))
    return new_layer
Esempio n. 8
0
def wider_next_dense(layer, start_dim, total_dim, n_add, weighted=True):
    if not weighted:
        return StubDense(layer.input_units + n_add, layer.units)
    teacher_w, teacher_b = layer.get_weights()
    student_w = teacher_w.copy()
    n_units_each_channel = int(teacher_w.shape[1] / total_dim)

    new_weight = np.zeros((teacher_w.shape[0], n_add * n_units_each_channel))
    student_w = np.concatenate((student_w[:, :start_dim * n_units_each_channel],
                                add_noise(new_weight, student_w),
                                student_w[:, start_dim * n_units_each_channel:total_dim * n_units_each_channel]),
                               axis=1)

    new_layer = StubDense(layer.input_units + n_add, layer.units)
    new_layer.set_weights((student_w, teacher_b))
    return new_layer
def wider_next_dense(layer, start_dim, total_dim, n_add, weighted=True):
    if not weighted:
        return StubDense(layer.units, layer.activation)
    n_units = layer.units
    teacher_w, teacher_b = layer.get_weights()
    student_w = teacher_w.copy()
    n_units_each_channel = int(teacher_w.shape[0] / total_dim)

    new_weight = np.zeros((n_add * n_units_each_channel, teacher_w.shape[1]))
    student_w = np.concatenate(
        (student_w[:start_dim * n_units_each_channel],
         add_noise(new_weight, student_w),
         student_w[start_dim * n_units_each_channel:total_dim *
                   n_units_each_channel]))

    new_layer = StubDense(n_units, layer.activation)
    new_layer.set_weights((student_w, teacher_b))
    return new_layer
Esempio n. 10
0
def dense_to_deeper_block(dense_layer, weighted=True):
    """Get deeper layer for dense layer

    Args:
        weighted:
        dense_layer: the dense layer from which we get deeper layer

    Returns:
        The deeper dense layer
    """
    units = dense_layer.units
    weight = np.eye(units)
    bias = np.zeros(units)
    new_dense_layer = StubDense(units, dense_layer.activation)
    if weighted:
        new_dense_layer.set_weights(
            (add_noise(weight,
                       np.array([0, 1])), add_noise(bias, np.array([0, 1]))))
    return [new_dense_layer, StubDropout(constant.DENSE_DROPOUT_RATE)]
Esempio n. 11
0
def wider_pre_dense(layer, n_add, weighted=True):
    if not weighted:
        return StubDense(layer.input_units, layer.units + n_add)

    n_units2 = layer.units

    teacher_w, teacher_b = layer.get_weights()
    rand = np.random.randint(n_units2, size=n_add)
    student_w = teacher_w.copy()
    student_b = teacher_b.copy()

    # target layer update (i)
    for i in range(n_add):
        teacher_index = rand[i]
        new_weight = teacher_w[teacher_index, :]
        new_weight = new_weight[np.newaxis, :]
        student_w = np.concatenate((student_w, add_noise(new_weight, student_w)), axis=0)
        student_b = np.append(student_b, add_noise(teacher_b[teacher_index], student_b))

    new_pre_layer = StubDense(layer.input_units, n_units2 + n_add)
    new_pre_layer.set_weights((student_w, student_b))

    return new_pre_layer