示例#1
0
    def __init__(
        self,
        loc=None,
        dim=None,
        observed=False,
        name="Determ",
    ):
        """Construct Deterministic distribution


        """

        if loc != None:

            self.__check_params(
                loc, dim)  # loc and tensor cannot be None at the same time

            param_dim = 1
            if dim != None: param_dim = dim

            # shape = (batches, dimension)
            self_shape = (replicate.get_total_size(),
                          np.max([get_total_dimension(loc), param_dim]))

            loc_rep = self.__reshape_param(loc, self_shape)

            # build the distribution
            super(Deterministic,
                  self).__init__(base_models.Deterministic(loc=loc_rep,
                                                           name=name),
                                 observed=observed)

        else:

            super(Deterministic, self).__init__(observed=observed)
示例#2
0
    def __init__(self,
                 loc=0,
                 scale=1,
                 dim=None,
                 observed=False,
                 name="Normal"):
        """Construct Normal distributions

        The parameters `loc` and `scale` must be shaped in a way that supports
        broadcasting (e.g. `loc + scale` is a valid operation). If dim is specified,
        it should be consistent with the lengths of `loc` and `scale`


        Args:
            loc (float): scalar or vector indicating the mean of the distribution at each dimension.
            scale (float): scalar or vector indicating the stddev of the distribution at each dimension.
            dim (int): optional scalar indicating the number of dimensions

        Raises
            ValueError: if the parameters are not consistent
            AttributeError: if any of the properties is changed once the object is constructed

        """

        self.__check_params(loc, scale, dim)

        param_dim = 1
        if dim != None: param_dim = dim

        # shape = (batches, dimension)
        self_shape = (replicate.get_total_size(),
                      np.max([
                          get_total_dimension(loc),
                          get_total_dimension(scale), param_dim
                      ]))

        loc_rep = self.__reshape_param(loc, self_shape)
        scale_rep = self.__reshape_param(scale, self_shape)

        # build the distribution

        super(Normal, self).__init__(base_models.Normal(loc=loc_rep,
                                                        scale=scale_rep,
                                                        name=name),
                                     observed=observed)
示例#3
0
    def __check_params(self, loc, dim):
        """private method that checks the consistency of the input parameters"""

        # loc  cannot be multidimensional arrays (by now)
        if np.ndim(loc) > 1:
            raise ValueError("loccannot be a  multidimensional arrays")

        dim_loc = get_total_dimension(loc)

        # loc can be a scalar or a vector of length dim
        if dim != None and dim_loc > 1 and dim != dim_loc:
            raise ValueError("loc length is not consistent with value in dim")
示例#4
0
    def __check_params(self, loc, scale, dim):
        """private method that checks the consistency of the input parameters"""

        # loc and scale cannot be multidimensional arrays (by now)
        if np.ndim(loc) > 1 or np.ndim(scale) > 1:
            raise ValueError("loc and scale cannot be multidimensional arrays")

        dim_loc = get_total_dimension(loc)
        dim_scale = get_total_dimension(scale)

        # loc and scale lengths must be equal or must be scalars
        if dim_loc > 1 and dim_scale > 1 and dim_loc != dim_scale:
            raise ValueError(
                "loc and scale lengths must be equal or must be 1")

        # loc can be a scalar or a vector of length dim
        if dim != None and dim_loc > 1 and dim != dim_loc:
            raise ValueError("loc length is not consistent with value in dim")

        if dim != None and dim_scale > 1 and dim != dim_scale:
            raise ValueError(
                "scale length is not consistent with value in dim")
示例#5
0
def __reshape_param(self, param, self_shape, d=1):

    N = self_shape[0]
    D = self_shape[1]




    # get a D*N unidimensional vector

    k = N if get_total_dimension(param)/d == D else D*N
    param_vect = np.tile(param, k).tolist()

    ### reshape  ####
    all_num = len([x for x in param_vect if not np.isscalar(x)]) == 0

    if not all_num:
        param_vect = [param_to_tf(x) for x in param_vect]

    if N > 1:
        real_shape = [N, -1]
    else:
        real_shape = [D]
    if d > 1: real_shape = real_shape + [d]

    if all_num:
        param_np_mat = np.reshape(np.stack(param_vect), tuple(real_shape))
        param_tf_mat = tf.constant(param_np_mat, dtype="float32")
    else:
        if D == 1 and N == 1:
            param_tf_mat = param_vect[0]
        else:

            param_tf_mat = tf.reshape(tf.stack(param_vect), tuple(real_shape))



    return param_tf_mat
示例#6
0
    def constructor(self,*args, **kwargs):

        param_dist = {}
        args_list = list(args)



        for p_name in params:
            if len(args_list) > 0:
                if p_name in kwargs:
                    raise ValueError("Wrong positional or keyword argument")

                param_dist.update({p_name : args_list[0]})
                args_list = args_list[1:]
            else:
                if kwargs.get(p_name) != None:
                    param_dist.update({p_name : kwargs.get(p_name)})




        if len(param_dist)>0:

            nd_range = {}
            d = {}
            for p,v in six.iteritems(param_dist):
                    if v != None:
                        nd_range.update({p: [0,1] if is_simple.get(p) in [None, True] else [1,2]})
                        d.update({p: 1 if is_simple.get(p) in [None, True] else get_total_dimension(v if ndim(v)==1 else v[0])})


            #check the number of dimensions
            self.__check_ndim(param_dist, nd_range)



            # get the final shape

            param_dim = 1
            if kwargs.get("dim") != None: param_dim = kwargs.get("dim")

            self_shape = (inf.replicate.get_total_size(),
                          int(np.max([get_total_dimension(v)/d.get(p)
                                  for p,v in six.iteritems(param_dist) if p != None and v!=None] +
                                 [param_dim])))


            # check that dimensions are consistent

            p_expand = [p for p, v in six.iteritems(param_dist) if p!=None and v!=None and get_total_dimension(v)>d.get(p)]
            f_expand = [get_total_dimension(param_dist.get(p))/d.get(p) for p in p_expand]

            if len([x for x in f_expand if x not in [1,self_shape[1]]])>1:
                raise ValueError("Inconsistent parameter dimensions")

            # reshape the parameters

            for p,v in six.iteritems(param_dist):
                if v != None:
                    if isinstance(v, inf.models.RandomVariable)  and self_shape==tuple(v.shape):
                        param_dist[p] = param_to_tf(v)
                    else:
                        param_dist[p] = self.__reshape_param(v, self_shape, d.get(p))

            ## Build the underliying tf object

            validate_args = kwargs.get("validate_args") if  kwargs.get("validate_args") != None else False
            allow_nan_stats = kwargs.get("allow_nan_stats") if  kwargs.get("allow_nan_stats") != None else True



            dist = getattr(ed.models, class_name)(validate_args= validate_args, allow_nan_stats=allow_nan_stats, **param_dist)

        else:
            dist = None

        observed = kwargs.get("observed") if kwargs.get("observed") != None else False
        super(self.__class__, self).__init__(dist, observed=observed)