示例#1
0
 def test_sigmoid_cross_entropy(self):
     """Test invoking SigmoidCrossEntropy in eager mode."""
     with context.eager_mode():
         batch_size = 10
         n_features = 5
         logits = np.random.rand(batch_size, n_features).astype(np.float32)
         labels = np.random.randint(0, 2, (batch_size, n_features)).astype(
             np.float32)
         result = layers.SigmoidCrossEntropy()(labels, logits)
         expected = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels,
                                                            logits=logits)
         assert np.allclose(result, expected)
示例#2
0
deconv2 = layers.Conv2DTranspose(16,
                                 kernel_size=5,
                                 stride=2,
                                 in_layers=concat2)
concat3 = layers.Concat(in_layers=[conv1, deconv2], axis=3)
deconv3 = layers.Conv2DTranspose(1, kernel_size=5, stride=2, in_layers=concat3)
# Compute the final output.
concat4 = layers.Concat(in_layers=[features, deconv3], axis=3)
logits = layers.Conv2D(1,
                       kernel_size=5,
                       stride=1,
                       activation_fn=None,
                       in_layers=concat4)
output = layers.Sigmoid(logits)
model.add_output(output)
loss = layers.ReduceSum(layers.SigmoidCrossEntropy(in_layers=(labels, logits)))
model.set_loss(loss)

if not os.path.exists('./models'):
    os.mkdir('models')
if not os.path.exists('./models/segmentation'):
    os.mkdir('models/segmentation')

if not RETRAIN:
    model.restore()

# Train it and evaluate performance on the test set.
if RETRAIN:
    print("About to fit model for 50 epochs")
    model.fit(train_dataset, nb_epoch=50, checkpoint_interval=100)
scores = []
示例#3
0
# Build the model.

model = dc.models.TensorGraph(batch_size=1000, model_dir='chromatin')
features = layers.Feature(shape=(None, 101, 4))
accessibility = layers.Feature(shape=(None, 1))
labels = layers.Label(shape=(None, 1))
weights = layers.Weights(shape=(None, 1))
prev = features
for i in range(3):
    prev = layers.Conv1D(filters=15, kernel_size=10, activation=tf.nn.relu, padding='same', in_layers=prev)
    prev = layers.Dropout(dropout_prob=0.5, in_layers=prev)
prev = layers.Concat([layers.Flatten(prev), accessibility])
logits = layers.Dense(out_channels=1, in_layers=prev)
output = layers.Sigmoid(logits)
model.add_output(output)
loss = layers.SigmoidCrossEntropy(in_layers=[labels, logits])
weighted_loss = layers.WeightedError(in_layers=[loss, weights])
model.set_loss(weighted_loss)

# Load the data.

train = dc.data.DiskDataset('train_dataset')
valid = dc.data.DiskDataset('valid_dataset')
span_accessibility = {}
for line in open('accessibility.txt'):
    fields = line.split()
    span_accessibility[fields[0]] = float(fields[1])

# Define a generator function to produce batches.

def generate_batches(dataset, epochs):