Exemple #1
0
def main():
    in_arg = get_input_args()
    device = "cuda" if in_arg.gpu else "cpu"

    trainloader, testloader, validloader, train_data = utils.create_loaders(in_arg.data_dir)
    model, device, criterion, optimizer = utils.set_up_model_params(in_arg.arch, in_arg.learning_rate, in_arg.hidden_units, device)
    utils.train_the_model(model, trainloader, validloader, criterion, optimizer, device, in_arg.epochs)
    if in_arg.validate:
        utils.validate_model(model, testloader, device)
    utils.save_model(model, optimizer, in_arg.save_dir, in_arg.arch, train_data)
Exemple #2
0
def post_model():
    '''
    curl --form [email protected] localhost:8081/environments
    '''
    try:
        models = pymongo.Connection().sds.models
    except (pymongo.errors.AutoReconnect):
        raise HTTPResponse('Couldn\'t connect to SDS db\n', 500)
    try:
        model = json.loads(request.files.env.value)
    except:
        raise HTTPResponse('You need to upload a valid json file\n', 400)
    if not utils.validate_model(model):
        raise HTTPResponse('Your model is invalid\n', 400)
    c = request.creds
    user, company = c.user, c.company
    model_obj = {
        'user': user,
        'company': company,
        'env': model['name'],
        'model': model
    }
    models.update({
        'company': company,
        'env': model['name']
    },
                  model_obj,
                  upsert=True)
Exemple #3
0
def normalize(zygrib_query):
    # turn days/hours into hours
    hour_resolution = zygrib_query.pop('hours')
    days = zygrib_query.pop('days')
    hours = np.arange(0., days * 24. + hour_resolution, hour_resolution)
    zygrib_query['hours'] = hours
    zygrib_query['type'] = 'gridded'
    model = zygrib_query.get('model', None)
    zygrib_query['model'] = utils.validate_model(model)
    variables = utils.validate_variables(zygrib_query.get('variables', []))
    zygrib_query['variables'] = variables
    return zygrib_query
Exemple #4
0
def parse_send_request(body):
    """
    Parses the a saildoc-like send request and returns
    a dictionary of attributes from the query.
    """
    # the model and domain are colon separated.
    model_domain, = split_fields(body, 1)
    model, _ = model_domain.split(':', 1)
    # make sure the model exists
    model = utils.validate_model(model)
    if model == 'spot':
        return parse_spot_request(body)
    else:
        return parse_gridded_request(body)
Exemple #5
0
def post_model():
    '''
    curl --form [email protected] localhost:8081/environments
    '''
    try:
        models = pymongo.Connection().sds.models
    except(pymongo.errors.AutoReconnect):
        raise HTTPResponse('Couldn\'t connect to SDS db\n', 500)
    try:
        model = json.loads(request.files.env.value)
    except:
        raise HTTPResponse('You need to upload a valid json file\n', 400)
    if not utils.validate_model(model):
        raise HTTPResponse('Your model is invalid\n', 400)
    c = request.creds
    user, company = c.user, c.company
    model_obj = {'user': user, 'company': company, 'env': model['name'],
        'model': model}
    models.update({'company': company, 'env': model['name']}, model_obj, upsert=True)
    # How much time between logging and printing the current results.
    # Make sure that this is a lot larger than the time to save the model!
    'log_every_seconds': 120,
}

# Load the train graphs if they have not been loaded before
if (not 'TRAIN_GRAPHS' in locals()) and (not 'TRAIN_GRAPHS' in globals()):
    TRAIN_GRAPHS, EDGE_PERMUTATIONS, molecule_names = utils.load_all_graphs(
        'train')
    TRAIN_TARGET_GRAPHS, _, _ = utils.load_all_graphs('train',
                                                      target_graph=True)

# Determine the train and validation ids.
num_graphs = len(TRAIN_GRAPHS)
np.random.seed(hyperpars['seed'])
permuted_ids = np.random.permutation(num_graphs)
train_ids = permuted_ids[:int(num_graphs *
                              (1 - hyperpars['validation_fraction']))]
valid_ids = np.setdiff1d(np.arange(num_graphs), train_ids)
model_save_path = '../Models/' + model_save_name + '.ckpt'

# Train the model
if 'train' in mode:
    utils.train_model(hyperpars, TRAIN_GRAPHS, TRAIN_TARGET_GRAPHS,
                      EDGE_PERMUTATIONS, train_ids, valid_ids, model_save_path)

# Evaluate the model predictions
if 'validate' in mode:
    utils.validate_model(hyperpars, TRAIN_GRAPHS, TRAIN_TARGET_GRAPHS,
                         EDGE_PERMUTATIONS, valid_ids, model_save_path)
Exemple #7
0
def val(args):
    X_train, y_train = load_train_tfidfdim(), load_target()
    svm = LinearSVC(C=args.C, verbose=args.verbose)
    clf = OneVsOneClassifier(svm)
    validate_model(clf, X_train, y_train)
                              device,
                              fre=100,
                              sample_size=400,
                              rand=False,
                              batch_size=1)
        train_losses.append(tr_loss)

        # validation for one epoch
        print('[Validation]')
        with torch.no_grad():
            model.eval()
            val_loss = validate_model(model,
                                      val_loader,
                                      criterion,
                                      epoch,
                                      device,
                                      fre=100,
                                      sample_size=100,
                                      rand=False,
                                      batch_size=1)
            val_losses.append(val_loss)

        # save weights
        if save_model and epoch % save_frequency == 0:
            save_checkpoint(
                {
                    'epoch': epoch,
                    'state_dict': model.state_dict(),
                    'optimizer': optimizer.state_dict(),
                    'train_loss': tr_loss,
                    'valid_loss': val_loss
Exemple #9
0
            {
                'epoch': epochs,
                'model_state_dict': model.state_dict(),
                'optimizer_state_dict': optim.state_dict(),
                'loss': epoch_loss,
            }, save_path)

        # validation
        valid_slot_f1, valid_intent_accuracy, valid_sem_acc,\
            valid_total_loss, valid_slot_loss, valid_intent_loss \
            = validate_model(
                model,
                arg.batch_size,
                os.path.join(full_valid_path, arg.input_file),
                os.path.join(full_valid_path, arg.slot_file),
                os.path.join(full_valid_path, arg.intent_file),
                in_vocab,
                slot_vocab,
                intent_vocab,
                slot_loss_fn,
                intent_loss_fn
            )
        log_in_tensorboard(tb_log_writer, epochs, "valid", valid_total_loss,
                           valid_intent_loss, valid_slot_loss, valid_slot_f1,
                           valid_intent_accuracy, valid_sem_acc)

        # test set
        test_slot_f1, test_intent_accuracy, test_sem_acc,\
            test_total_loss, test_slot_loss, test_intent_loss \
            = validate_model(
                model,
                arg.batch_size,
Exemple #10
0
def val(args):
    X_train, y_train = load_train_tfidf(), load_target()
    clf = MultinomialNB(alpha=args.alpha)
    validate_model(clf, X_train, y_train)