Esempio n. 1
0
def test_update_args(args, classifier_modified):
    """

    Args:
        args(@pytest.fixture): Namespace
        classifier_modified(@pytest.fixture): ClassifierModified

    Asserts: True if exp.ARGS is updated adequately.

    """
    expected_classifier_args_before_update = {'dropout': 0.2, 'dim_h': 100}
    expected_classifier_args_after_update = {'dropout': 0.1, 'dim_h': 100}
    args_for_update = {
        'data': {
            'batch_size': 128
        },
        'model': {
            'classifier_args': {
                'dropout': 0.1
            }
        }
    }
    config.set_config()
    classifier_modified = setup_experiment(args,
                                           model=classifier_modified,
                                           testmode=True)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_before_update
    update_args(args_for_update, exp.ARGS)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_after_update
    assert exp.ARGS['data']['batch_size'] == 128
Esempio n. 2
0
def run(model=None):
    '''Main function.

    '''
    # Parse the command-line arguments

    try:
        args = setup_cortex(model=model)
        if args.command == 'setup':
            # Performs setup only.
            config.setup()
            exit(0)
        else:
            config.set_config()
            print_section('EXPERIMENT')
            model, reload_nets = setup_experiment(args, model=model)
            print_section('DATA')
            data.setup(**exp.ARGS['data'])
            print_section('MODEL')
            model.reload_nets(reload_nets)
            model.build()
            print_section('OPTIMIZER')
            optimizer.setup(model, **exp.ARGS['optimizer'])

    except KeyboardInterrupt:
        print('Cancelled')
        exit(0)

    print_section('RUNNING')
    train.main_loop(model, **exp.ARGS['train'])
Esempio n. 3
0
def test_update_nested_dicts(args, classifier_modified):
    """

    Args:
        args(@pytest.fixture): Namespace
        classifier_modified(@pytest.fixture): ClassifierModified

    Asserts: True if a dict. arg. is being updated to a
             nested dict. (not overridden).

    """
    expected_classifier_args_before_update = {'dropout': 0.2}
    expected_classifier_args_after_update = {'dropout': 0.2, 'dim_h': 100}
    args_for_update = {
        'data': {
            'batch_size': 122
        },
        'model': {
            'classifier_args': {
                'dropout': 0.2,
                'dim_h': 100
            }
        }
    }
    config.set_config()
    classifier_modified = setup_experiment(args,
                                           model=classifier_modified,
                                           testmode=True)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_before_update
    update_nested_dicts(args_for_update, exp.ARGS)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_after_update
    assert exp.ARGS['data']['batch_size'] == 122
Esempio n. 4
0
def test_update_args(args, classifier_modified):
    """

    Args:
        args(@pytest.fixture): Namespace
        classifier_modified(@pytest.fixture): ClassifierModified

    Asserts: True if exp.ARGS is updated adequately.

    """
    expected_classifier_args_before_update = {'dropout': 0.2}
    expected_classifier_args_after_update = {'dropout': 0.1}
    args_for_update = {
        'data': {
            'batch_size': 128
        },
        'model': {
            'classifier_args': {
                'dropout': 0.1
            }
        }
    }
    config.set_config()
    classifier_modified = setup_experiment(
        args, model=classifier_modified, testmode=True)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_before_update
    update_args(args_for_update, exp.ARGS)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_after_update
    assert exp.ARGS['data']['batch_size'] == 128
Esempio n. 5
0
def test_update_nested_dicts(args, classifier_modified):
    """

    Args:
        args(@pytest.fixture): Namespace
        classifier_modified(@pytest.fixture): ClassifierModified

    Asserts: True if a dict. arg. is being updated to a
             nested dict. (not overridden).

    """
    expected_classifier_args_before_update = {'dropout': 0.2}
    expected_classifier_args_after_update = {'dropout': 0.2, 'dim_h': 100}
    args_for_update = {
        'data': {
            'batch_size': 122
        },
        'model': {
            'classifier_args': {
                'dropout': 0.2,
                'dim_h': 100
            }
        }
    }
    config.set_config()
    classifier_modified = setup_experiment(
        args, model=classifier_modified, testmode=True)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_before_update
    update_nested_dicts(args_for_update, exp.ARGS)
    assert exp.ARGS['model'][
        'classifier_args'] == expected_classifier_args_after_update
    assert exp.ARGS['data']['batch_size'] == 122
Esempio n. 6
0
def run(model=None):
    '''Main function.

    '''
    # Parse the command-line arguments

    try:
        args = setup_cortex(model=model)
        if args.command == 'setup':
            # Performs setup only.
            config.setup()
            exit(0)
        else:
            config.set_config()
            print_section('EXPERIMENT')
            model, reload_nets = setup_experiment(args, model=model)
            print_section('DATA')
            data.setup(**exp.ARGS['data'])
            print_section('MODEL')
            model.reload_nets(reload_nets)
            model.build()
            print_section('OPTIMIZER')
            optimizer.setup(model, **exp.ARGS['optimizer'])

    except KeyboardInterrupt:
        print('Cancelled')
        exit(0)

    print_section('RUNNING')
    train.main_loop(model, **exp.ARGS['train'])
Esempio n. 7
0
def test_static_override_parameters(args, classifier_modified):
    """

    Args:
        args(@pytest.fixture): Namespace
        classifier_modified(@pytest.fixture): ClassifierModified

    Asserts: True if default attribute is overriding
             parameters values.

    """
    expected_type = 'convnet'
    config.set_config()
    classifier_modified = setup_experiment(
        args, model=classifier_modified, testmode=True)
    assert exp.ARGS['model']['classifier_type'] == expected_type
Esempio n. 8
0
def test_static_override_parameters(args, classifier_modified):
    """

    Args:
        args(@pytest.fixture): Namespace
        classifier_modified(@pytest.fixture): ClassifierModified

    Asserts: True if default attribute is overriding
             parameters values.

    """
    expected_type = 'convnet'
    config.set_config()
    classifier_modified = setup_experiment(args,
                                           model=classifier_modified,
                                           testmode=True)
    assert exp.ARGS['model']['classifier_type'] == expected_type
Esempio n. 9
0
def test_command_override_static(args):
    """

    Args:
        args(@pytest.fixture): Namespace

    Asserts: True if passing a command line arg, the exp.ARGS is
             changing the value from default for the command line
             one.

    """
    expected_type = 'resnet'
    args.__dict__['classifier_type'] = expected_type
    classifier_defaults = ImageClassification()
    config.set_config()
    # NOTE: exp.ARGS is being populated inside setup_experiment() call
    classifier_defaults = setup_experiment(
        args, model=classifier_defaults, testmode=True)
    assert exp.ARGS['model']['classifier_type'] == expected_type
Esempio n. 10
0
def test_command_override_static(args):
    """

    Args:
        args(@pytest.fixture): Namespace

    Asserts: True if passing a command line arg, the exp.ARGS is
             changing the value from default for the command line
             one.

    """
    expected_type = 'resnet'
    args.__dict__['classifier_type'] = expected_type
    classifier_defaults = ImageClassification()
    config.set_config()
    # NOTE: exp.ARGS is being populated inside setup_experiment() call
    classifier_defaults = setup_experiment(args,
                                           model=classifier_defaults,
                                           testmode=True)
    assert exp.ARGS['model']['classifier_type'] == expected_type