Ejemplo n.º 1
0
 def setUp(self):
     # setup logger
     server.logger = utils_logging.getLogger(logging.DEBUG, "logs")
     # random port
     socket = server.sockets.PySocket(port=0)
     # random datebase path name without dashes
     database_path = "./{}.db".format(str(uuid.uuid4().hex))
     self.s = server.AEPsychServer(socket=socket, database_path=database_path)
Ejemplo n.º 2
0
 def setUp(self):
     # setup logger
     server.logger = utils_logging.getLogger(logging.DEBUG, "logs")
     socket = server.sockets.ThriftSocketWrapper(msg_queue=Queue())
     # random datebase path name without dashes
     database_path = "./{}.db".format(str(uuid.uuid4().hex))
     self.s = server.AEPsychServer(socket=socket,
                                   database_path=database_path,
                                   thrift=True)
Ejemplo n.º 3
0
from aepsych.utils import promote_0d, _dim_grid, get_jnd_multid
from botorch.acquisition import (
    qNoisyExpectedImprovement,
    qUpperConfidenceBound,
    NoisyExpectedImprovement,
)
from scipy.stats import norm

# this is pretty aggressive jitter setting but should protect us from
# crashes which are a bigger concern in data collection.
# we refit with stan afterwards anyway.
gpytorch.settings.cholesky_jitter._global_float_value = 1e-3
gpytorch.settings.cholesky_jitter._global_double_value = 1e-3
gpytorch.settings.tridiagonal_jitter._global_value = 1e-3

logger = utils_logging.getLogger(logging.DEBUG)


def _prune_extra_acqf_args(acqf, extra_acqf_args):
    # prune extra args needed, ignore the rest
    # (this helps with API consistency)
    acqf_args_expected = signature(acqf).parameters.keys()
    return {k: v for k, v in extra_acqf_args.items() if k in acqf_args_expected}


def _prune_extra_acqf_args(acqf, extra_acqf_args):
    # prune extra args needed, ignore the rest
    # (this helps with API consistency)
    acqf_args_expected = signature(acqf).parameters.keys()
    return {k: v for k, v in extra_acqf_args.items() if k in acqf_args_expected}
Ejemplo n.º 4
0
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.

# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.

import json
import logging
import socket
import sys

import aepsych.utils_logging as utils_logging
import numpy as np
import zmq

logger = utils_logging.getLogger(logging.INFO)


def SimplifyArrays(message):
    return {
        k: v.tolist() if type(v) == np.ndarray else
        SimplifyArrays(v) if type(v) is dict else v
        for k, v in message.items()
    }


def createSocket(socket_type="pysocket", port=5555, msg_queue=None):
    logger.info(f"socket_type = {socket_type} port = {port}")

    if socket_type == "pysocket":
        sock = PySocket(port=port)
Ejemplo n.º 5
0
import torch
from aepsych.config import Config
from aepsych.factory.factory import default_mean_covar_factory
from aepsych.models.base import AEPsychMixin
from aepsych.utils import _process_bounds, promote_0d
from aepsych.utils_logging import getLogger
from botorch.fit import fit_gpytorch_model
from botorch.models.gpytorch import GPyTorchModel
from gpytorch.likelihoods import BernoulliLikelihood, Likelihood
from gpytorch.models import ApproximateGP
from gpytorch.variational import CholeskyVariationalDistribution, VariationalStrategy
from scipy.special import owens_t
from scipy.stats import norm
from torch.distributions import Normal

logger = getLogger()


class GPClassificationModel(AEPsychMixin, ApproximateGP, GPyTorchModel):
    """Probit-GP model with variational inference.

    From a conventional ML perspective this is a GP Classification model,
    though in the psychophysics context it can also be thought of as a
    nonlinear generalization of the standard linear model for 1AFC or
    yes/no trials.

    For more on variational inference, see e.g.
    https://docs.gpytorch.ai/en/v1.1.1/examples/04_Variational_and_Approximate_GPs/
    """

    _num_outputs = 1