Exemple #1
0
def run(args):
    if args.config is not None:
        with open(args.config, 'r') as stream:
            hyper_params = load(stream, Loader=yaml.FullLoader)
    else:
        hyper_params = {}

    if not os.path.exists(args.output):
        os.makedirs(args.output)

    # to be done as soon as possible otherwise mlflow will not log with the proper exp. name
    if 'exp_name' in hyper_params:
        mlflow.set_experiment(hyper_params['exp_name'])

    # __TODO__ change the hparam that are used from the training algorithm
    # (and NOT the model - these will be specified in the model itself)
    check_and_log_hp(
        ['batch_size', 'optimizer', 'patience', 'architecture', 'max_epoch',
         'exp_name'],
        hyper_params)

    train_loader, dev_loader = load_data(args, hyper_params)
    model = load_model(hyper_params)
    optimizer = load_optimizer(hyper_params, model)
    loss_fun = load_loss(hyper_params)

    train(model, optimizer, loss_fun, train_loader, dev_loader, hyper_params['patience'],
          args.output, max_epoch=hyper_params['max_epoch'],
          use_progress_bar=not args.disable_progressbar, start_from_scratch=args.start_from_scratch)
    def __init__(self, hyper_params):
        super(FakeModel, self).__init__()

        check_and_log_hp(['size', 'dropout'], hyper_params)
        self.hyper_params = hyper_params

        self.linear = nn.Linear(5, 1)
Exemple #3
0
def run(args, hyper_params):
    """Setup and run the dataloaders, training loops, etc.

    Args:
        args (list): arguments passed from the cli
        hyper_params (dict): hyper parameters from the config file
    """
    log_exp_details(os.path.realpath(__file__), args)

    if not os.path.exists(args.output):
        os.makedirs(args.output)

    # __TODO__ change the hparam that are used from the training algorithm
    # (and NOT the model - these will be specified in the model itself)
    logger.info('List of hyper-parameters:')
    check_and_log_hp(
        ['batch_size', 'optimizer', 'patience', 'architecture', 'max_epoch',
         'exp_name'],
        hyper_params)

    train_loader, dev_loader = load_data(args, hyper_params)
    model = load_model(hyper_params)
    optimizer = load_optimizer(hyper_params, model)
    loss_fun = load_loss(hyper_params)

    train(model, optimizer, loss_fun, train_loader, dev_loader, hyper_params['patience'],
          args.output, max_epoch=hyper_params['max_epoch'],
          use_progress_bar=not args.disable_progressbar, start_from_scratch=args.start_from_scratch)
    def __init__(self, hyper_params):
        super(FakeModel, self).__init__()

        check_and_log_hp(['size', 'dropout'], hyper_params)
        self.hyper_params = hyper_params

        {%- if cookiecutter.dl_framework in ['tensorflow_cpu', 'tensorflow_gpu'] %}
        self.flat = tf.keras.layers.Flatten(input_shape=(5,))
        self.dense1 = tf.keras.layers.Dense(10, activation='relu')
        self.dense2 = tf.keras.layers.Dense(1)
Exemple #5
0
    def __init__(self, hyper_params):
        super(MyModel, self).__init__()

        check_and_log_hp(['size'], hyper_params)
        self.hyper_params = hyper_params

        {%- if cookiecutter.dl_framework in ['tensorflow_cpu', 'tensorflow_gpu'] %}
        self.hyper_params = hyper_params
        self.dense1 = tf.keras.layers.Dense(hyper_params['size'], activation=None)
        self.dense2 = tf.keras.layers.Dense(1)
Exemple #6
0
    def __init__(self, hyper_params):
        """__init__.

        Args:
            hyper_params (dict): hyper parameters from the config file.
        """
        super(MyModel, self).__init__()

        check_and_log_hp(['size'], hyper_params)
        self.hyper_params = hyper_params

        {%- if cookiecutter.dl_framework in ['tensorflow_cpu', 'tensorflow_gpu'] %}
        self.hyper_params = hyper_params
        self.dense1 = tf.keras.layers.Dense(hyper_params['size'])
        self.dense2 = tf.keras.layers.Dense(1)
Exemple #7
0
def run(args, hyper_params):

    if not os.path.exists(args.output):
        os.makedirs(args.output)

    # __TODO__ change the hparam that are used from the training algorithm
    # (and NOT the model - these will be specified in the model itself)
    check_and_log_hp(
        ['batch_size', 'optimizer', 'patience', 'architecture', 'max_epoch',
         'exp_name'],
        hyper_params)

    train_loader, dev_loader = load_data(args, hyper_params)
    model = load_model(hyper_params)
    optimizer = load_optimizer(hyper_params, model)
    loss_fun = load_loss(hyper_params)

    train(model, optimizer, loss_fun, train_loader, dev_loader, hyper_params['patience'],
          args.output, max_epoch=hyper_params['max_epoch'],
          use_progress_bar=not args.disable_progressbar, start_from_scratch=args.start_from_scratch)