示例#1
0
def load_tree_model(model_file, cuda_on=False,
                    soft_decision=True, stochastic=False,
                    breadth_first=False, fast=False,
                    ):
    """Load a tree model. """
    # load the model and set routers stochastic.
    map_location = None
    if not (cuda_on):
        map_location = 'cpu'

    tree_tmp = torch.load(model_file, map_location=map_location)
    tree_struct, tree_modules = tree_tmp.tree_struct, tree_tmp.update_tree_modules()
    for node in tree_modules:
        node['router'].stochastic = stochastic
        node['router'].soft_decision = soft_decision
        node['router'].dropout_prob = 0.0

    for node_meta in tree_struct:
        if not ('extended' in node_meta.keys()):
            node_meta['extended'] = False

    model = models.Tree(
        tree_struct, tree_modules,
        split=False, cuda_on=cuda_on, soft_decision=soft_decision,
        breadth_first=breadth_first,
    )
    if cuda_on:
        model.cuda()
    return model
def compute_error_general_ensemble(model_file_list,
                                   data_loader,
                                   cuda_on=False,
                                   soft_decision=True,
                                   stochastic=False,
                                   breadth_first=False,
                                   fast=False,
                                   task="classification",
                                   name=''):
    """Load an ensemble of models and compute the average prediction. """

    # load the model and set routers stochastic.
    model_list = []
    map_location = None
    if not (cuda_on):
        map_location = 'cpu'

    for model_file in model_file_list:
        tree_tmp = torch.load(model_file, map_location=map_location)
        tree_struct, tree_modules = tree_tmp.tree_struct, tree_tmp.update_tree_modules(
        )
        for node in tree_modules:
            node['router'].stochastic = stochastic
            node['router'].soft_decision = soft_decision
            node['router'].dropout_prob = 0.0

        for node_meta in tree_struct:
            if not ('extended' in node_meta.keys()):
                node_meta['extended'] = False

        if task == "classification":
            model = models.Tree(
                tree_struct,
                tree_modules,
                split=False,
                cuda_on=cuda_on,
                soft_decision=soft_decision,
                breadth_first=breadth_first,
            )

        if cuda_on:
            model.cuda()

        model_list.append(model)

    # compute the error
    for model in model_list:
        model.eval()

    test_loss = 0
    correct = 0

    for data, target in data_loader:
        if cuda_on:
            data, target = data.cuda(), target.cuda()
        data, target = Variable(data, volatile=True), Variable(target)

        # compute the average prediction over different models
        output = 0.0
        for model in model_list:
            if fast:
                output += model.fast_forward_BF(data)
            else:
                output += model.forward(data)
        output /= len(model_list)

        if task == "classification":
            test_loss += F.nll_loss(output, target, size_average=False).data[0]
            pred = output.data.max(1, keepdim=True)[1]
            correct += pred.eq(target.data.view_as(pred)).cpu().sum()
        elif task == "regression":
            # print(test_loss)
            test_loss += F.mse_loss(output, target, size_average=False).data[0]

    # Normalise the loss and print:
    if task == "classification":
        test_loss /= len(data_loader.dataset)
        print(name + 'Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)'.format(
            test_loss, correct, len(data_loader.dataset), 100. * correct /
            len(data_loader.dataset)))
    elif task == "regression":
        test_loss = test_loss / 7.0 / len(data_loader.dataset)
        print('Average loss: {:.4f}'.format(test_loss))
def compute_error_general(model_file,
                          data_loader,
                          cuda_on=False,
                          soft_decision=True,
                          stochastic=False,
                          breadth_first=False,
                          fast=False,
                          task="classification",
                          name=''):
    """Load a model and perform stochastic inferenc
    Args:
        model_file (str): model parameters
        data_dataloader (torch.utils.data.DataLoader): data loader

    """
    # load the model and set routers stochastic.
    map_location = None
    if not (cuda_on):
        map_location = 'cpu'

    tree_tmp = torch.load(model_file, map_location=map_location)
    tree_struct, tree_modules = \
        tree_tmp.tree_struct, tree_tmp.update_tree_modules()

    for node in tree_modules:
        node['router'].stochastic = stochastic
        node['router'].soft_decision = soft_decision
        node['router'].dropout_prob = 0.0

    for node_meta in tree_struct:
        if not ('extended' in node_meta.keys()):
            node_meta['extended'] = False

    if task == "classification":
        model = models.Tree(
            tree_struct,
            tree_modules,
            split=False,
            cuda_on=cuda_on,
            soft_decision=soft_decision,
            breadth_first=breadth_first,
        )

    if cuda_on:
        model.cuda()

    # compute the error
    model.eval()
    test_loss = 0
    correct = 0
    for data, target in data_loader:
        if cuda_on:
            data, target = data.cuda(), target.cuda()
        data, target = Variable(data, volatile=True), Variable(target)

        if fast:
            output = model.fast_forward_BF(data)
        else:
            output = model.forward(data)

        if task == "classification":
            test_loss += F.nll_loss(output, target, size_average=False).data[0]
            pred = output.data.max(1, keepdim=True)[1]
            correct += pred.eq(target.data.view_as(pred)).cpu().sum()
        else:
            raise NotImplementedError("The specified task is not supported")

    # Normalise the loss and print:
    if task == "classification":
        test_loss /= len(data_loader.dataset)
        print(name + 'Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)'.format(
            test_loss, correct, len(data_loader.dataset), 100. * correct /
            len(data_loader.dataset)))
    elif task == "regression":
        test_loss = test_loss / 7.0 / len(data_loader.dataset)
        print('Average loss: {:.4f}'.format(test_loss))