"""
    run from repo root
    first arg is base config file
    override individual params with -o then comma-separated (no spaces) list of args, e.g., -o exp_name=foo,data.seed=0,model.fair_coeff=2.0
    set templates by name, e.g., --data adult 
    
    e.g.,
    >>> python src/run_unf_clf.py conf/transfer/run_unf_clf.json -o train.n_epochs=5 --data adult --dirs local
    """
    from codebase.config import process_config
    from transfer_learn import get_repr_filename
    from transfer_learn import main as train_transfer_classifier
    from run_laftr import correct_repr_exists

    import os
    transfer_opt = process_config(verbose=False)
    training_opt = transfer_opt.copy()
    if transfer_opt['transfer']['repr_name'] == 'scratch':
        print('training classifier from data directly')
        train_transfer_classifier(transfer_opt)
    else:
        # this experiment gets its own reps and tranfsers
        repr_name, transfer_name = os.path.split(transfer_opt['exp_name'])
        assert 'transfer' in transfer_name, "expect a sweep over some transfer params"
        training_opt.update(exp_name=repr_name)
        transfer_opt['transfer'].update(repr_name=repr_name)
        training_opt.pop(
            'transfer'
        )  # can discard transfer params for purposes of learning the base rep
        assert correct_repr_exists(
            training_opt), "coudln't find pre-trained laftr representation :("
Ejemplo n.º 2
0
    # all done
    with open(os.path.join(resdirname, 'done.txt'), 'w') as f:
        f.write('done')


if __name__ == '__main__':
    """
    This script trains a LAFTR model. For the full evaluation used in the paper (first train LAFTR then evaluate on naive classifier) see `src/run_laftr.py`.

    Instructions: 
    1) Run from repo root
    2) First arg is base config file
    3) Optionally override individual params with -o then comma-separated (no spaces) list of args, e.g., -o exp_name=foo,data.seed=0,model.fair_coeff=2.0 
       (the overrides must come before the named templates in steps 4--5)
    4) Set templates by name, e.g., --data adult 
    5) Required templates are --data and --dirs
    
    e.g.,
    >>> python src/laftr.py conf/laftr/config.json -o train.n_epochs=10,model.fair_coeff=2. --data adult --dirs local

    This command trains LAFTR on the Adult dataset for ten epochs with batch size 32.
    Model and optimization parameters are specified by the config file conf/classification/config.json
    Dataset specifications are read from conf/templates/data/adult.json
    Directory specifications are read from conf/templates/dirs/local.json
    Finally, two hyperparameters are overridden by the last command.
    By using the -o flag we train for 10 epochs with fairness regulariztion coeff 2. instead of the default values from the config.json.
    """
    from codebase.config import process_config
    opt = process_config(verbose=False)
    main(opt)
Ejemplo n.º 3
0
    print('opt_filename', opt_filename)
    print('done_filename_txt', done_filename_txt)
    print('done_filename_json', done_filename_json)
    if os.path.exists(done_filename_txt) or os.path.exists(done_filename_json):
        if load_config(opt_filename) == opt:
            return True
    return False


if __name__ == '__main__':
    """
    This script takes LAFTR hyperparameters and trains a LAFTR model if a model with those hyperparameters values hasn't already been trained.
    But if it was previously trained (i.e. representations/encoder already exist) then we do nothing.
    It is meant to be used as part of a larger experiment sweep.

    See `src/laftr.py` for instructions on how to format arguments.
    """
    from codebase.config import process_config
    from laftr import main as learn_reps

    training_opt = process_config(verbose=False)
    training_opt.pop(
        'transfer'
    )  # can discard transfer params for purposes of learning the base rep

    if correct_repr_exists(training_opt):
        print('pre-trained reps exist, NOT TRAINING')
    else:
        print('learning reps from scratch')
        learn_reps(training_opt)