Пример #1
0
def tune_queue(main_fn):
    config.redirect_output()
    options = config.options()

    if any('tune' not in s for s in options.data_source):
        warnings.warn(
            'expected all --data_source\'s to contain "tune", instead got "{}". '
            'Are you polluting your dev/test set?'.format(options.data_source))
    if 'gpu' in options.device or 'cuda' in options.device:
        warnings.warn(
            'device is "{}". Have you checked that all processes will fit '
            'on one GPU? (Random GPU assignment has not been implemented '
            'yet.)'.format(options.device))

    with open(options.tune_config, 'r') as infile:
        tune_options = config.HoconConfigFileParser().parse(infile)

    reg = ProcessRegistry(main_fn, tune_options, options.tune_maximize)

    remaining_random = options.tune_random
    remaining_local = options.tune_local
    if options.tune_local <= 0:
        remaining_local = None

    try:
        reg.start_default()
        while remaining_random > 0 and reg.running_processes < options.tune_max_processes:
            reg.start_random()
            remaining_random -= 1

        while remaining_local > 0 and reg.running_processes < options.tune_max_processes:
            reg.start_local()
            remaining_random -= 1

        while reg.running_processes > 0:
            name, objective = reg.get()
            print('\nTUNE: {:10.3f} {}\n'.format(objective, name[:70]))

            while remaining_random > 0 and reg.running_processes < options.tune_max_processes:
                reg.start_random()
                remaining_random -= 1

            while (remaining_local is None or remaining_local > 0) and \
                    reg.running_processes < options.tune_max_processes:
                try:
                    reg.start_local()
                    if remaining_local is not None:
                        remaining_local -= 1
                except StopIteration:
                    print('no new local search candidates')
                    break
    except KeyboardInterrupt:
        reg.terminate()

    print('')
    print('best result:')
    print('{:10.3f} {}'.format(reg.best_objective, str(reg.best_name)[:70]))
Пример #2
0
def tune_queue(main_fn):
    config.redirect_output()
    options = config.options()

    if any('tune' not in s for s in options.data_source):
        warnings.warn('expected all --data_source\'s to contain "tune", instead got "{}". '
                      'Are you polluting your dev/test set?'.format(options.data_source))
    if 'gpu' in options.device or 'cuda' in options.device:
        warnings.warn('device is "{}". Have you checked that all processes will fit '
                      'on one GPU? (Random GPU assignment has not been implemented '
                      'yet.)'.format(options.device))

    with open(options.tune_config, 'r') as infile:
        tune_options = config.HoconConfigFileParser().parse(infile)

    reg = ProcessRegistry(main_fn, tune_options, options.tune_maximize)

    remaining_random = options.tune_random
    remaining_local = options.tune_local
    if options.tune_local <= 0:
        remaining_local = None

    try:
        reg.start_default()
        while remaining_random > 0 and reg.running_processes < options.tune_max_processes:
            reg.start_random()
            remaining_random -= 1

        while remaining_local > 0 and reg.running_processes < options.tune_max_processes:
            reg.start_local()
            remaining_random -= 1

        while reg.running_processes > 0:
            name, objective = reg.get()
            print('\nTUNE: {:10.3f} {}\n'.format(objective, name[:70]))

            while remaining_random > 0 and reg.running_processes < options.tune_max_processes:
                reg.start_random()
                remaining_random -= 1

            while (remaining_local is None or remaining_local > 0) and \
                    reg.running_processes < options.tune_max_processes:
                try:
                    reg.start_local()
                    if remaining_local is not None:
                        remaining_local -= 1
                except StopIteration:
                    print('no new local search candidates')
                    break
    except KeyboardInterrupt:
        reg.terminate()

    print('')
    print('best result:')
    print('{:10.3f} {}'.format(reg.best_objective, str(reg.best_name)[:70]))
Пример #3
0
def queue_results(main_fn, options, name, q):
    try:
        # Create run dir and force options to become the passed Namespace object
        config.set_options(options)
        config.redirect_output()
        (train, test) = main_fn()
        results = dict(train)
        results.update(test)
        q.put((name, results))
    except Exception as e:
        import sys
        import traceback
        traceback.print_exc(file=sys.stderr)
        q.put((name, e))
Пример #4
0
def queue_results(main_fn, options, name, q):
    try:
        # Create run dir and force options to become the passed Namespace object
        config.set_options(options)
        config.redirect_output()
        (train, test) = main_fn()
        results = dict(train)
        results.update(test)
        q.put((name, results))
    except Exception as e:
        import sys
        import traceback
        traceback.print_exc(file=sys.stderr)
        q.put((name, e))
Пример #5
0
from stanza.research import config
if __name__ == '__main__':
    config.redirect_output()

from stanza.cluster import pick_gpu
parser = config.get_options_parser()
parser.add_argument(
    '--device',
    default=None,
    help='The device to use in Theano ("cpu" or "gpu[0-n]"). If None, '
    'pick a free-ish device automatically.')
options, extras = parser.parse_known_args()
if '-h' in extras or '--help' in extras:
    # If user is just asking for the options, don't scare them
    # by saying we're picking a GPU...
    pick_gpu.bind_theano('cpu')
else:
    pick_gpu.bind_theano(options.device)

from stanza.monitoring import progress
from stanza.research import evaluate, metrics, output
import datetime
import numbers
import learners
import color_instances

parser.add_argument('--learner',
                    default='Histogram',
                    choices=learners.LEARNERS.keys(),
                    help='The name of the model to use in the experiment.')
parser.add_argument(
Пример #6
0
#!/usr/bin/env python
from stanza.research import config
config.redirect_output()

import datetime

from stanza.monitoring import progress
from stanza.research import evaluate, output

import metrics
import learners
import datasets

parser = config.get_options_parser()
parser.add_argument('--learner', default='Random', choices=learners.LEARNERS.keys(),
                    help='The name of the model to use in the experiment.')
parser.add_argument('--load', metavar='MODEL_FILE', default=None,
                    help='If provided, skip training and instead load a pretrained model '
                         'from the specified path. If None or an empty string, train a '
                         'new model.')
parser.add_argument('--train_size', type=int, default=-1,
                    help='The number of examples to use in training. This number should '
                         '*include* examples held out for validation. If negative, use the '
                         'whole training set.')
parser.add_argument('--validation_size', type=int, default=0,
                    help='The number of examples to hold out from the training set for '
                         'monitoring generalization error.')
parser.add_argument('--test_size', type=int, default=-1,
                    help='The number of examples to use in testing. '
                         'If negative, use the whole dev/test set.')
parser.add_argument('--data_source', default='cards_dev', choices=datasets.SOURCES.keys(),
Пример #7
0
#!/usr/bin/env python
r'''
python run_experiment.py -R runs/simple_unicode \
                         --train_file data/unicode_train.json \
                         --validation_file data/unicode_val.json \
                         --eval_file data/unicode_dev.json \
                         --tokenizer character
'''
from stanza.research import config
config.redirect_output()  # NOQA: imports not at top of file

import datetime
import gzip
from itertools import islice
import json

from stanza.monitoring import progress
from stanza.research import evaluate, output, iterators
from stanza.research.instance import Instance

import learners
import metrics
import thutils
import tokenizers

parser = config.get_options_parser()
parser.add_argument(
    '--device',
    default='',
    help='The device to use in PyTorch ("cpu" or "gpu[0-n]"). If None, '
    'pick a free-ish device automatically.')