Пример #1
0
total_train = []
total_regions = []
total_asfmap = []
bbox = [(45, 90), (90, 45), (64, 64), (90, 180), (180, 90), (128, 128),
        (181, 362), (362, 181), (256, 256), (362, 724), (724, 362), (512, 512)]
conv_height = 14
conv_width = 14
height = 600
width = 800
k = 12
gt_num = 10

feat_input = tf.placeholder(tf.float32, [None, conv_height, conv_width, 512])
with tf.variable_scope('rcnn', reuse=None):
    W_conv6 = weight_variable([3, 3, 512, 256], name="W_conv6")
    b_conv6 = bias_variable([256], name="b_conv6")
    feat = conv2d(feat_input, W_conv6) + b_conv6

    W_offset = weight_variable([1, 1, 256, k * 4], name="W_offset")
    b_offset = bias_variable([k * 4], name="b_offset")
    offset = conv2d(feat, W_offset) + b_offset
    offset = tf.reshape(offset, [k * conv_height * conv_width, 4])

    W_score = weight_variable([1, 1, 256, k], name="W_score")
    b_score = bias_variable([k], name="b_score")
    score = conv2d(feat, W_score) + b_score
    score = tf.reshape(score, [k * conv_height * conv_width])

config = tf.ConfigProto()
config.gpu_options.allow_growth = True
Пример #2
0
batch_size = 50
dropout = 0.7 # Probability to KEEP
display_step = 10

# Network Parameters
n_input = 28*28
n_classes = 10

# Graph input
x = tf.placeholder(tf.float32, [None, n_input])
y = tf.placeholder(tf.float32, [None, n_classes])
keep_prob = tf.placeholder(tf.float32)

# Weights and biases
weights = {
        'wc1': func.weight_variable([5,5,1,10], 0.2, 0.0),
        'wi1': {
                'direct1x1': func.weight_variable([1,1,10,20], 0.2, 0.0),
                '1x1pre3x3': func.weight_variable([1,1,10,10], 0.2, 0.0),
                '1x1pre5x5': func.weight_variable([1,1,10,5], 0.2, 0.0),
                '3x3':       func.weight_variable([3,3,10,30], 0.2, 0.0),
                '5x5':       func.weight_variable([5,5,5,10], 0.2, 0.0)
                },
        'wd1': func.weight_variable([7*7*(20+30+10), 100], 0.2, 0.1),
        'out': func.weight_variable([100,n_classes], 0.2, 0.1)
}

biases = {
        'bc1': func.bias_variable([10]),
        'bi1': {
                'direct1x1': func.bias_variable([20]),
Пример #3
0
batch_size = 50
dropout = 0.7  # Probability to KEEP
display_step = 10

# Network Parameters
n_input = 64 * 64
dim_mask = 32 * 32

# Graph input
x = tf.placeholder(tf.float32, [None, n_input])
y = tf.placeholder(tf.float32, [None, dim_mask])
keep_prob = tf.placeholder(tf.float32)

# Weights and biases
weights = {
    "wc1": func.weight_variable([3, 3, 1, 10], 0.2, 0.0),
    "wc2": func.weight_variable([3, 3, 60, 60], 0.2, 0.0),
    "wi1": {
        "direct1x1": func.weight_variable([1, 1, 10, 20], 0.2, 0.0),
        "1x1pre3x3": func.weight_variable([1, 1, 10, 10], 0.2, 0.0),
        "1x1pre5x5": func.weight_variable([1, 1, 10, 5], 0.2, 0.0),
        "3x3": func.weight_variable([3, 3, 10, 30], 0.2, 0.0),
        "5x5": func.weight_variable([5, 5, 5, 10], 0.2, 0.0),
    },
    "out": func.weight_variable([5 * 5 * (20 + 30 + 10), dim_mask], 0.2, 0.1),
}

biases = {
    "bc1": func.bias_variable([10]),
    "bc2": func.bias_variable([60]),
    "bi1": {
Пример #4
0
    for m in range(start_ + 1, width - 1):
        if (black[m]
                if arg else white[m]) > (0.99 * black_max if arg else 0.99 *
                                         white_max):  # 0.95这个参数请多调整,对应下面的0.05
            end_ = m
            break
    return end_


#########################前向网络##########################################
# define placeholder for inputs to network
xs = tf.placeholder(tf.float32, [None, 28, 28, 1])  # 28x28
x_image = tf.reshape(xs, [-1, 28, 28, 1])

## conv1 layer ##
W_conv1 = func.weight_variable([5, 5, 1,
                                32])  # patch 5x5, in size 1, out size 32
b_conv1 = func.bias_variable([32])
h_conv1 = tf.nn.relu(func.conv2d(x_image, W_conv1) +
                     b_conv1)  # output size 28x28x32
h_pool1 = func.max_pool_2x2(h_conv1)  # output size 14x14x32

## conv2 layer ##
W_conv2 = func.weight_variable([5, 5, 32,
                                64])  # patch 5x5, in size 32, out size 64
b_conv2 = func.bias_variable([64])
h_conv2 = tf.nn.relu(func.conv2d(h_pool1, W_conv2) +
                     b_conv2)  # output size 14x14x64
h_pool2 = func.max_pool_2x2(h_conv2)  # output size 7x7x64

## fc1 layer ##
W_fc1 = func.weight_variable([7 * 7 * 64, 1024])
Пример #5
0
batch_size = 50
dropout = 0.7 # Probability to KEEP
display_step = 10

# Network Parameters
n_input= 50*50
dim_mask = 10*10

# Graph input
x = tf.placeholder(tf.float32, [None, n_input])
y = tf.placeholder(tf.float32, [None, dim_mask])
keep_prob = tf.placeholder(tf.float32)

# Weights and biases
weights = {
        'wc1': func.weight_variable([3,3,1,10], 0.2, 0.0),
	'wc2': func.weight_variable([3,3,60,60], 0.2, 0.0), 
        'wi1': {
                'direct1x1': func.weight_variable([1,1,10,20], 0.2, 0.0),
                '1x1pre3x3': func.weight_variable([1,1,10,10], 0.2, 0.0),
                '1x1pre5x5': func.weight_variable([1,1,10,5], 0.2, 0.0),
                '3x3':       func.weight_variable([3,3,10,30], 0.2, 0.0),
                '5x5':       func.weight_variable([5,5,5,10], 0.2, 0.0)
                },
        'out': func.weight_variable([5*5*(20+30+10), dim_mask], 0.2, 0.1),
}

biases = {
        'bc1': func.bias_variable([10]),
	'bc2': func.bias_variable([60]),
        'bi1': {