Пример #1
0
ds_test = tf.data.Dataset.from_tensor_slices((idx_test, y_label[idx_test]))
ds_test = ds_test.batch(len(idx_test), drop_remainder=False)
ds_test = ds_test.map(lambda x, y: ((five_p_loader(x, ragged_output=True),
                                       three_p_loader(x, ragged_output=True),
                                       ref_loader(x, ragged_output=True),
                                       alt_loader(x, ragged_output=True),
                                       strand_loader(x, ragged_output=True)),
                                       y))

histories = []
evaluations = []
weights = []
for i in range(3):
    tile_encoder = InstanceModels.VariantSequence(6, 4, 2, [16, 16, 8, 8])
    mil = RaggedModels.MIL(instance_encoders=[tile_encoder.model], output_dim=1, pooling='mean', output_type='regression')
    losses = ['mse']
    mil.model.compile(loss=losses,
                      metrics=['mse'],
                      optimizer=tf.keras.optimizers.Adam(learning_rate=0.001,
                    )
    )
    callbacks = [tf.keras.callbacks.EarlyStopping(monitor='val_mse', min_delta=0.001, patience=20, mode='min', restore_best_weights=True)]
    history = mil.model.fit(ds_train, steps_per_epoch=10, validation_data=ds_valid, epochs=100000, callbacks=callbacks)
    evaluation = mil.model.evaluate(ds_test)
    histories.append(history.history)
    evaluations.append(evaluation)
    weights.append(mil.model.get_weights())


with open(cwd / 'figures' / 'controls' / 'samples' / 'sim_data' / 'regression' / 'experiment_3' / 'instance_model_mean.pkl', 'wb') as f:
Пример #2
0
    ds_test = tf.data.Dataset.from_tensor_slices((idx_test, y_label[idx_test]))
    ds_test = ds_test.batch(len(idx_test), drop_remainder=False)
    ds_test = ds_test.map(lambda x, y: (
        (five_p_loader(x, ragged_output=True),
         three_p_loader(x, ragged_output=True),
         ref_loader(x, ragged_output=True), alt_loader(x, ragged_output=True),
         strand_loader(x, ragged_output=True)), y))
    X = False
    while X == False:
        try:
            tile_encoder = InstanceModels.VariantSequence(
                6, 4, 2, [16, 16, 8, 8])
            mil = RaggedModels.MIL(instance_encoders=[tile_encoder.model],
                                   output_dim=1,
                                   pooling='sum',
                                   output_type='other',
                                   instance_layers=[128, 64])
            losses = [Losses.CoxPH()]
            mil.model.compile(loss=losses,
                              metrics=[Losses.CoxPH()],
                              optimizer=tf.keras.optimizers.Adam(
                                  learning_rate=0.001, ))
            callbacks = [
                tf.keras.callbacks.EarlyStopping(monitor='val_coxph',
                                                 min_delta=0.0001,
                                                 patience=20,
                                                 mode='min',
                                                 restore_best_weights=True)
            ]
            history = mil.model.fit(ds_train,
Пример #3
0
ds_test = tf.data.Dataset.from_tensor_slices((idx_test, y_label[idx_test]))
ds_test = ds_test.batch(len(idx_test), drop_remainder=False)
ds_test = ds_test.map(lambda x, y: (
    (five_p_loader(x, ragged_output=True), three_p_loader(x,
                                                          ragged_output=True),
     ref_loader(x, ragged_output=True), alt_loader(x, ragged_output=True),
     strand_loader(x, ragged_output=True)), y))

histories = []
evaluations = []
weights = []
for i in range(3):
    tile_encoder = InstanceModels.VariantSequence(6, 4, 2, [16, 16, 8, 8])
    mil = RaggedModels.MIL(instance_encoders=[tile_encoder.model],
                           output_dim=2,
                           pooling='sum')
    losses = [tf.keras.losses.CategoricalCrossentropy(from_logits=True)]
    mil.model.compile(
        loss=losses,
        metrics=[
            'accuracy',
            tf.keras.metrics.CategoricalCrossentropy(from_logits=True)
        ],
        optimizer=tf.keras.optimizers.Adam(learning_rate=0.001, ))
    callbacks = [
        tf.keras.callbacks.EarlyStopping(
            monitor='val_categorical_crossentropy',
            min_delta=0.00001,
            patience=50,
            mode='min',