예제 #1
0
def _get_padding(node):
    padding = node['params']['padding']
    if padding == 'Same':
        return 'SAME'
    elif padding == 'Valid':
        return 'VALID'
    else:
        raise NnvisException('Unknown padding: {}'.format(padding))
예제 #2
0
def _get_activation(node):
    activation = node['params']['activation']
    if activation == 'None':
        return tf.identity
    elif activation == 'Sigmoid':
        return tf.sigmoid
    elif activation == 'Relu':
        return tf.nn.relu
    else:
        raise NnvisException(
            'Unknown activation function: {}'.format(activation))
예제 #3
0
def calculate_loss(loss, y, pred):
    losses = {
        'logloss': _calculate_logloss,
        'mse': _calculate_mean_squared_error,
        'cross_entropy': _calculate_cross_entropy,
        'abs_diff': _calculate_absolute_difference,
        'hinge': _calculate_hinge_loss
    }

    loss = losses.get(loss)
    if loss is None:
        raise NnvisException('Unknow loss: {}'.format(loss))

    return loss(y, pred)
예제 #4
0
def build_op(node, map_op, inputs, is_training):
    input_ops = [map_op[v] for v in inputs]

    ops = {
        'input': _build_input_op,
        'fc': _build_fc_op,
        'conv': _build_conv_op,
        'pool': _build_pool_op,
        'dropout': _build_dropout_op,
        'batch_norm': _build_batch_norm_op,
        'add': _build_add_op,
        'concat': _build_concat_op,
        'softmax': _build_softmax_op
    }
    op = ops.get(node['layerType'])
    if op is None:
        raise NnvisException('Unknown type of layer')

    return op(node, input_ops, is_training)
예제 #5
0
def optimize(optimizer, loss, params):
    optimizers = {
        'adam': _adam,
        'sgd': _sgd,
        'momentum': _momentum,
        'adagrad': _adagrad,
        'adadelta': _adadelta,
        'rmsprop': _rmsprop
    }

    opt = optimizers.get(optimizer)
    if opt is None:
        raise NnvisException('Unknown optimizer: {}'.format(optimizer))
    """
    Necessary for batch norm to work properly
    """
    update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
    with tf.control_dependencies(update_ops):
        return opt(params).minimize(loss)
예제 #6
0
def _build_pool_op(node, input_ops, is_training):
    x = input_ops[0]
    kernel_size = node['params']['kernelShape']
    strides = node['params']['strides']

    with tf.name_scope(node['id']):
        if node['params']['pool'] == 'Max':
            pool_op = layers.max_pool2d(x,
                                        kernel_size=kernel_size,
                                        stride=strides,
                                        padding=_get_padding(node))
        elif node['params']['pool'] == 'Average':
            pool_op = layers.avg_pool2d(x,
                                        kernel_size=kernel_size,
                                        stride=strides,
                                        padding=_get_padding(node))
        else:
            raise NnvisException('Unkown pool: {}'.format(
                node['params']['pool']))
        # add new name for logits tensor
        tf.identity(pool_op, name='logits')
        return pool_op
예제 #7
0
def _assert(b, msg):
    if not b:
        raise NnvisException(msg)