Example #1
0
def load_model(model_name, models_path='', output_layer=None):
    pretrained_model_fn = pretrained_models(model_name)
    if pretrained_model_fn is not None:
        if output_layer is not None:
            model = pretrained_model_fn(output_layer)
        else:
            model = pretrained_model_fn()

        preprocess_fn = preprocessor(model_name)
        return Model(model, preprocess_fn)

    raise ValueError('Model {} not found'.format(model_name))
Example #2
0
def main(args):
    tf.reset_default_graph()

    loader = get_loader(args)
    provider = get_provider(args, loader)
    is_training = tf.placeholder(tf.bool, name="is_training_2")
    body_builder = get_body_builder(args, is_training)
    head_builder = get_head_builder(args, is_training)
    monitor = Monitor(args.folder)
    scopes, trainable_scopes = get_scopes()
    ph_builder = get_placeholder_builder(args)
    monitor.save_args(args, args.config_name)
    lr_scheduler = get_lr_scheduler(args)
    if not args.multi_image:
        model = Model(DataProvider=provider,
                      BodyBuilder=body_builder,
                      HeadBuilder=head_builder,
                      Monitor=monitor,
                      scopes=scopes,
                      trainable_scopes=trainable_scopes,
                      PlaceholderBuilder=ph_builder,
                      is_training=is_training,
                      learning_rate=args.learning_rate,
                      lr_scheduler=lr_scheduler)
    else:
        shapes = get_shapes(args)
        train_mode = get_mode(args.multi_train_mode, shapes)
        model = MultiImageModel(DataProvider=provider,
                                BodyBuilder=body_builder,
                                HeadBuilder=head_builder,
                                MultiImagePlaceholderBuilder=ph_builder,
                                Monitor=monitor,
                                scopes=scopes,
                                trainable_scopes=trainable_scopes,
                                shapes=shapes,
                                is_training=is_training,
                                learning_rate=args.learning_rate,
                                train_mode=train_mode,
                                lr_scheduler=lr_scheduler)
    start_session(args, model)
Example #3
0
    def __init__(self, FLAGS, results_dir, train_dir):
        # inputs
        self.FLAGS = FLAGS
        self.train_dir = train_dir
        self.datasets = FLAGS.datasets
        self.unseen_dataset = FLAGS.unseen_dataset
        self.num_gpus = FLAGS.num_gpus
        self.num_labels = FLAGS.num_labels
        self.target_data = not (not (FLAGS.domain_mode))

        if self.target_data:
            if FLAGS.domain_mode == "None" or FLAGS.domain_mode == "Pretrain":
                self.target_data = False
                print("No adaptation")

        if FLAGS.domain_mode:
            self.domain_mode = FLAGS.domain_mode
        else:
            self.domain_mode = "None"

        self.lr = FLAGS.lr

        if not FLAGS.modality:
            raise Exception("Need to Specify modality")

        if FLAGS.modality != "rgb" and FLAGS.modality != "flow" and FLAGS.modality != "joint":
            raise Exception("Invalid Modality")

        self.results_dir = results_dir + "_" + FLAGS.modality
        self.modality = FLAGS.modality

        #if self.domain_loss or self.bn_align or self.discrepancy or self.mmd:
        self.model = Model(num_gpus=self.num_gpus, num_labels=self.num_labels, modality=self.modality,
                           temporal_window=self.FLAGS.temporal_window, batch_norm_update=self.FLAGS.batch_norm_update,
                           domain_mode=self.domain_mode,steps_per_update=FLAGS.steps_before_update,
                           aux_classifier=self.FLAGS.aux_classifier, synchronised=self.FLAGS.synchronised,
                           predict_synch=self.FLAGS.pred_synch, selfsupervised_lambda=self.FLAGS.self_lambda)
Example #4
0
from src.trainers.dm import Trainer
from src.datasets.dm import DAMNIST
from src.models.model import Model
import torch.utils.data as data

model = Model()
dataset = DAMNIST()
data_loader = data.DataLoader(dataset, batch_size=12, shuffle=True)

trainer = Trainer(
        model=model,
        data_loader=data_loader,
        n_epoch=100,
        )
trainer.train()