def prepare_constraints(distribution,
                        target_stats,
                        loc=0.0,
                        scale=1.0,
                        use="manual"):
    # Preparing constraints:
    constraints = [{
        "type": "ineq",
        "fun": lambda p: fconstr(p, distribution, loc, scale, use)
    }]
    if isequal_string(distribution.type, "gamma") and np.any(
            np.in1d("mode", target_stats.keys())):
        constraints.append({
            "type": "ineq",
            "fun": lambda p: fconstr_gamma_mode(p, distribution)
        })
    elif isequal_string(distribution.type, "beta") and np.any(
            np.in1d(["mode", "median"], target_stats.keys())):
        constraints.append({
            "type":
            "ineq",
            "fun":
            lambda p: fconstr_beta_mode_median(p, distribution)
        })
    return constraints
示例#2
0
def eqtn_x0cr_r(Iext1, yc, a, b, d, x1_rest, x1_cr, x0_rest, x0_cr, zmode=np.array("lin")):
    # Correspondence with EpileptorDP2D
    b = b - d
    if isequal_string(str(zmode), 'lin'):
        return 0.25 * (x0_rest * (a * x1_cr ** 3 - a * x1_rest ** 3 - b * x1_cr ** 2 +
                                  b * x1_rest ** 2 + 4.0 * x1_cr - 4.0 * x1_rest) +
                       (x0_cr - x0_rest) * (Iext1 - a * x1_rest ** 3 + b * x1_rest ** 2 -
                                            4.0 * x1_rest + yc)) / (x0_cr - x0_rest), \
               0.25 * (a * x1_cr ** 3 - a * x1_rest ** 3 - b * x1_cr ** 2 + b * x1_rest ** 2 + 4.0 * x1_cr -
                       4.0 * x1_rest) / (x0_cr - x0_rest)
    elif isequal_string(str(zmode), 'sig'):
        return (-x0_cr*(3.2e+66*20000000000000.0**(10*x1_cr) + 4.74922109128249e+68*54365636569181.0**(10*x1_cr))
                *(3.2e+66*1.024e+133**x1_rest*(Iext1 - a*x1_rest**3 + b*x1_rest**2 + yc)
                + 4.74922109128249e+68*2.25551009738825e+137**x1_rest*(Iext1 - a*x1_rest**3 + b*x1_rest**2 + yc - 3.0))
                + x0_rest*(3.2e+66*20000000000000.0**(10*x1_rest) +
                4.74922109128249e+68*54365636569181.0**(10*x1_rest))*(3.2e+66*1.024e+133**x1_cr*(Iext1 - a*x1_cr**3 +
                b*x1_cr**2 + yc) + 4.74922109128249e+68*2.25551009738825e+137**x1_cr*(Iext1 - a*x1_cr**3 + b*x1_cr**2 +
                yc - 3.0)))/((3.2e+66*20000000000000.0**(10.0*x1_cr) +
                4.74922109128249e+68*54365636569181.0**(10.0*x1_cr))*(3.2e+66*20000000000000.0**(10.0*x1_rest) +
                4.74922109128249e+68*54365636569181.0**(10.0*x1_rest))*(-x0_cr + x0_rest)), \
                (-(3.2e+66 * 20000000000000.0 ** (10 * x1_cr) +
                   4.74922109128249e+68 * 54365636569181.0 ** (10 * x1_cr)) * (3.2e+66 * 1.024e+133 ** x1_rest * (
                Iext1 - a * x1_rest ** 3 + b * x1_rest ** 2 + yc) +
                4.74922109128249e+68 * 2.25551009738825e+137 ** x1_rest * (
                Iext1 - a * x1_rest ** 3 + b * x1_rest ** 2 + yc - 3.0)) + (
                3.2e+66 * 20000000000000.0 ** (10 * x1_rest) + 4.74922109128249e+68 * 54365636569181.0 ** (
                10 * x1_rest)) * (3.2e+66 * 1.024e+133 ** x1_cr * (Iext1 - a * x1_cr ** 3 + b * x1_cr ** 2 + yc) +
                                  4.74922109128249e+68 * 2.25551009738825e+137 ** x1_cr * 
                                  (Iext1 - a * x1_cr ** 3 + b * x1_cr ** 2 + yc - 3.0))) / \
               ((3.2e+66 * 20000000000000.0 ** (10.0 * x1_cr) + 4.74922109128249e+68 * 54365636569181.0 ** (
                       10.0 * x1_cr)) * (3.2e+66 * 20000000000000.0 ** (10.0 * x1_rest) +
                                         4.74922109128249e+68 * 54365636569181.0 ** (10.0 * x1_rest)) *
                (-x0_cr + x0_rest))
    else:
        raise_value_error('zmode is neither "lin" nor "sig"')
 def pdf_params(self, parametrization="lamda"):
     if isequal_string(parametrization, "scipy"):
         return {"mu": self.mu}
     elif isequal_string(parametrization, "numpy"):
         return {"lam": self.mu}
     else:
         return {"lamda": self.lamda}
示例#4
0
 def pdf_params(self, parametrization="lamda"):
     if isequal_string(parametrization, "scale"):
         return {"scale": 1.0 / self.lamda}
     elif isequal_string(parametrization, "rate"):
         return {"rate": self.rate}
     else:
         return {"lamda": self.lamda}
 def pdf_params(self, parametrization="mu-sigma"):
     p = OrderedDict()
     if isequal_string(parametrization, "scipy") or isequal_string(parametrization, "numpy"):
         p.update(zip(["loc", "scale"], [self.mu, self.sigma]))
         return p
     else:
         p.update(zip(["mu", "sigma"], [self.mu, self.sigma]))
         return p
示例#6
0
 def pdf_params(self, parametrization="alpha-beta"):
     p = OrderedDict()
     if isequal_string(parametrization, "a-b") or \
        isequal_string(parametrization, "scipy") or \
        isequal_string(parametrization, "numpy"):
         p.update(zip(["a", "b"], [self.a, self.b]))
         return p
     else:
         p.update(zip(["alpha", "beta"], [self.alpha, self.beta]))
         return p
 def pdf_params(self, parametrization="a-b"):
     p = OrderedDict()
     if isequal_string(parametrization, "scipy"):
         p.update(zip(["loc", "scale"], [self.a, self.b - self.a]))
         return p
     elif isequal_string(parametrization, "numpy"):
         p.update(zip(["low", "high"], [self.low, self.high]))
         return p
     else:
         p.update(zip(["a", "b"], [self.a, self.b]))
         return p
示例#8
0
 def generate_model(self, model_configuration):
     if isequal_string(self.model_name,
                       EpileptorModel._ui_name) and not isequal_string(
                           self.simulator, "java"):
         raise_value_error(
             "Custom EpileptorModel can be used only with java simulator!")
     elif not isequal_string(self.model_name,
                             EpileptorModel._ui_name) and isequal_string(
                                 self.simulator, "java"):
         raise_value_error(
             "Only java EpileptorModel can be used with java simulator!")
     return model_build_dict[self.model_name](model_configuration)
示例#9
0
def eqtn_x0(x1, z, zmode=np.array("lin"), z_pos=True, K=None, w=None, coupl=None):
    if coupl is None:
        if np.all(K == 0.0) or np.all(w == 0.0) or (K is None) or (w is None):
            coupl = 0.0
        else:
            from tvb_epilepsy.base.computations.calculations_utils import calc_coupling
            coupl = calc_coupling(x1, K, w)
    if  isequal_string(str(zmode), 'lin'):
        return x1 - (z + np.where(z_pos, 0.0, 0.1 * np.power(z, 7.0)) + coupl) / 4.0
    elif  isequal_string(str(zmode), 'sig'):
        return np.divide(3.0, 1.0 + np.power(np.exp(1), -10.0 * (x1 + 0.5))) - z - coupl
    else:
        raise_value_error('zmode is neither "lin" nor "sig"')
示例#10
0
def assert_obj(obj, obj_name, obj_type):
    if (isequal_string(obj_type, "list") or isequal_string(
            obj_type, "tuple")) and not (isinstance(obj, list)):
        return []
    if (isequal_string(obj_type, "dict") or isequal_string(
            obj_type, "OrderedDict")) and not (isinstance(obj, dict)):
        return OrderedDict()
    # If still not created, make an  OrderedDict() by default:
    if obj is None:
        logger.warning("\n Child object " + str(obj_name) +
                       " still not created!" +
                       "\nCreating an OrderedDict() by default!")
        return OrderedDict()
    return obj
示例#11
0
def eqtn_fz(x1, z, x0, tau1, tau0, zmode=np.array("lin"), z_pos=True, K=None, w=None, coupl=None):
    if coupl is None:
        if np.all(K == 0.0) or np.all(w == 0.0) or (K is None) or (w is None):
            coupl = 0.0
        else:
            from tvb_epilepsy.base.computations.calculations_utils import calc_coupling
            coupl = calc_coupling(x1, K, w)
    tau = np.divide(tau1, tau0)
    if isequal_string(str(zmode), 'lin'):
        return np.multiply((4 * (x1 - x0) - np.where(z_pos, z, z + 0.1 * np.power(z, 7.0)) - coupl), tau)
    elif isequal_string(str(zmode), 'sig'):
        return np.multiply(np.divide(3.0, (1 + np.power(np.exp(1), (-10.0 * (x1 + 0.5))))) - x0 - z - coupl, tau)
    else:
        raise_value_error('zmode is neither "lin" nor "sig"')
 def normalize_signals(self, signals, normalization=None):
     if isinstance(normalization, basestring):
         if isequal_string(normalization, "zscore"):
             signals = zscore(signals, axis=None) / 3.0
         elif isequal_string(normalization, "minmax"):
             signals -= signals.min()
             signals /= signals.max()
         elif isequal_string(normalization, "baseline-amplitude"):
             signals -= np.percentile(signals, 5, 0)
             signals /= np.percentile(signals, 95)
         else:
             self.logger.warn("Ignoring target signals' normalization " + normalization +
                              ",\nwhich is not one of the currently available 'zscore' and 'minmax'!")
     return signals
 def update_active_regions(self, statistical_model, methods=["e_values", "LSA"], reset=False, **kwargs):
     if reset:
         statistical_model.update_active_regions([])
     for m, th in zip(*assert_arrays([ensure_list(methods),
                                      ensure_list(kwargs.get("active_regions_th", None))])):
         if isequal_string(m, "e_values"):
             statistical_model = self.update_active_regions_e_values(statistical_model, th)
         elif isequal_string(m, "x0_values"):
             statistical_model = self.update_active_regions_x0_values(statistical_model, th)
         elif isequal_string(m, "lsa"):
             statistical_model = self.update_active_regions_lsa(statistical_model, th)
         elif isequal_string(m, "seeg"):
             statistical_model = self.update_active_regions_seeg(statistical_model, th,
                                                                 seeg_inds=kwargs.get("seeg_inds"))
     return statistical_model
 def set_target_data_and_time(self, target_data_type, target_data, statistical_model, **kwargs):
     if isequal_string(target_data_type, "simulated"):
         signals, target_data = self.set_simulated_target_data(target_data, statistical_model, **kwargs)
         self.target_data_type = "simulated"
     else:  # isequal_string(target_data_type, "empirical"):
         signals = self.set_empirical_target_data(target_data, **kwargs)
         self.target_data_type = "empirical"
     if kwargs.get("auto_selection", True) is not False:
         if self.data_type == "lfp":
             signals = self.select_signals_lfp(signals, statistical_model.active_regions,
                                               kwargs.pop("auto_selection", "rois"), **kwargs)
         else:
             signals = self.select_signals_seeg(signals, statistical_model.active_regions,
                                                kwargs.pop("auto_selection", "rois-correlation-power"), **kwargs)
     self.time = self.set_time(target_data.get("time", None))
     if kwargs.get("decimate", 1) > 1:
         signals, time, self.dt, self.n_times = decimate_signals(signals, self.time, kwargs.get("decimate"))
         self.observation_shape = (self.n_times, self.n_signals)
     if np.sum(kwargs.get("cut_signals_tails", (0, 0))) > 0:
         signals, self.time, self.n_times = cut_signals_tails(signals, self.time, kwargs.get("cut_signals_tails"))
         self.observation_shape = (self.n_times, self.n_signals)
     # TODO: decide about signals' normalization for the different (sensors', sources' cases)
     signals = self.normalize_signals(signals, kwargs.get("normalization", None))
     statistical_model.n_signals = self.n_signals
     statistical_model.n_times = self.n_times
     statistical_model.dt = self.dt
     return signals, self.time, statistical_model, target_data
示例#15
0
def list_or_tuple_to_h5_model(h5_model, obj, path, container_path, obj_type):
    # empty list or tuple get into metadata
    if len(obj) == 0:
        h5_model.add_or_update_metadata_attribute(path + "/type_str", obj_type)
        if isinstance(obj, list):
            h5_model.add_or_update_datasets_attribute(path, "[]")
        else:
            h5_model.add_or_update_datasets_attribute(path, "()")
        return h5_model, None
    # Try to store it as a ndarray of numbers or strings but not objects...
    temp = np.array(obj)
    if not (isequal_string(str(temp.dtype)[0], "O")):
        h5_model.add_or_update_metadata_attribute(path + "/type_str",
                                                  obj.__class__.__name__)
        if isinstance(obj, tuple):
            h5_model.add_or_update_metadata_attribute(path + "/transform_str",
                                                      "tuple(obj)")
        else:
            h5_model.add_or_update_metadata_attribute(path + "/transform_str",
                                                      "obj.tolist()")
        h5_model.add_or_update_datasets_attribute(path, temp)
        return h5_model, None
    else:
        h5_model.add_or_update_metadata_attribute(
            os.path.join(container_path[1:], "create_str"), "list()")
        if isinstance(obj, tuple):
            h5_model.add_or_update_metadata_attribute(
                os.path.join(container_path[1:], "transform_str"),
                "tuple(obj))")
        return h5_model, iterable_to_dict(obj)
示例#16
0
def normalize_signals(signals, normalization=None):
    if isinstance(normalization, basestring):
        if isequal_string(normalization, "zscore"):
            signals = zscore(signals, axis=None) / 3.0
        elif isequal_string(normalization, "minmax"):
            signals -= signals.min()
            signals /= signals.max()
        elif isequal_string(normalization, "baseline-amplitude"):
            signals -= np.percentile(np.percentile(signals, 1, axis=0), 1)
            signals /= np.percentile(np.percentile(signals, 99, axis=0), 99)
        else:
            raise_value_error("Ignoring signals' normalization " + normalization +
                             ",\nwhich is not one of the currently available " +
                             "'zscore', 'minmax' and  'baseline-amplitude'!")

    return signals
示例#17
0
 def build_simulator(self, model_configuration, connectivity, **kwargs):
     if isequal_string(self.simulator, "java"):
         return self.build_simulator_java_from_model_configuration(
             model_configuration, connectivity, **kwargs)
     else:
         return self.build_simulator_TVB(model_configuration, connectivity,
                                         **kwargs)
示例#18
0
 def pdf_params(self, parametrization="alpha-beta"):
     p = OrderedDict()
     if isequal_string(parametrization, "shape-scale"):
         p.update(zip(["shape", "scale"], [self.alpha, self.theta]))
         return p
     elif isequal_string(parametrization, "k-theta"):
         p.update(zip(["k", "theta"], [self.k, self.theta]))
         return p
     elif isequal_string(parametrization, "shape-rate"):
         p.update(zip(["shape", "rate"], [self.alpha, self.beta]))
         return p
     elif isequal_string(parametrization, "scipy"):
         p.update(zip(["a", "scale"], [self.alpha, self.theta]))
         return p
     else:
         p.update(zip(["alpha", "beta"], [self.alpha, self.beta]))
         return p
示例#19
0
 def write_model_data_to_file(self, model_data, reset_path=False, **kwargs):
     model_data_path = kwargs.get("model_data_path", self.model_data_path)
     if reset_path:
         self.model_data_path = model_data_path
     extension = model_data_path.split(".", -1)[-1]
     if isequal_string(extension, "npy"):
         np.save(model_data_path, model_data)
     elif isequal_string(extension, "mat"):
         savemat(model_data_path, model_data)
     elif isequal_string(extension, "pkl"):
         with open(model_data_path, 'wb') as f:
             pickle.dump(model_data, f)
     elif isequal_string(extension, "R"):
         rdump(model_data_path, model_data)
     else:
         H5Writer().write_dictionary(model_data, os.path.join(os.path.dirname(model_data_path),
                                                              os.path.basename(model_data_path)))
示例#20
0
def eqtn_fx1(x1, z, y1, Iext1, slope, a, b, d, tau1, x1_neg=True, model="2d", x2=0.0):
    if isequal_string(str(model), '2d'):
        # Correspondence with EpileptorDP2D
        b = b - d
        return np.multiply(y1 - z + Iext1 + np.multiply(x1, np.where(x1_neg, if_ydot0(x1, a, b),
                                                                     else_ydot0_2d(x1, z, slope, d))), tau1)
    else:
        return np.multiply(y1 - z + Iext1 + np.multiply(x1, np.where(x1_neg, if_ydot0(x1, a, b),
                                                                     else_ydot0_6d(x2, z, slope))), tau1)
示例#21
0
 def sample(self, parameter=(), loc=0.0, scale=1.0, **kwargs):
     nr.seed(self.random_seed)
     if isinstance(parameter, StochasticParameterBase):
         parameter_shape = parameter.p_shape
         low = parameter.low
         high = parameter.high
         prob_distr = parameter
         loc = parameter.loc
         scale = parameter.scale
     else:
         parameter_shape = kwargs.pop("shape", (1, ))
         low = kwargs.pop("low", -CalculusConfig.MAX_SINGLE_VALUE)
         high = kwargs.pop("high", CalculusConfig.MAX_SINGLE_VALUE)
         prob_distr = kwargs.pop("probability_distribution", "uniform")
     low, high = self.check_for_infinite_bounds(low, high)
     low, high, n_outputs, parameter_shape = self.check_size(
         low, high, parameter_shape)
     self.adjust_shape(parameter_shape)
     out_shape = tuple([self.n_samples] + list(self.shape)[:-1])
     if np.any(low > -CalculusConfig.MAX_SINGLE_VALUE) or np.any(
             high < CalculusConfig.MAX_SINGLE_VALUE):
         if not (isequal_string(self.sampling_module, "scipy")):
             self.logger.warning(
                 "Switching to scipy for truncated distributions' sampling!"
             )
         self.sampling_module = "scipy"
         if isinstance(prob_distr, basestring):
             self.sampler = getattr(ss, prob_distr)(*parameter, **kwargs)
             samples = self._truncated_distribution_sampling(
                 {
                     "low": low,
                     "high": high
                 }, out_shape) * scale + loc
         elif isinstance(prob_distr, StochasticParameterBase):
             self.sampler = prob_distr._scipy()
             samples = self._truncated_distribution_sampling(
                 {
                     "low": low,
                     "high": high
                 }, out_shape)
     elif self.sampling_module.find("scipy") >= 0:
         if isinstance(prob_distr, basestring):
             self.sampler = getattr(ss, prob_distr)(*parameter, **kwargs)
             samples = self.sampler.rvs(size=out_shape) * scale + loc
         elif isinstance(prob_distr, StochasticParameterBase):
             self.sampler = prob_distr._scipy(**kwargs)
             samples = self.sampler.rvs(size=out_shape)
     elif self.sampling_module.find("numpy") >= 0:
         if isinstance(prob_distr, basestring):
             self.sampler = lambda size: getattr(nr, prob_distr)(
                 *parameter, size=size, **kwargs)
             samples = self.sampler(out_shape) * scale + loc
         elif isinstance(prob_distr, StochasticParameterBase):
             self.sampler = lambda size: prob_distr._numpy(size=size)
             samples = self.sampler(out_shape)
     return samples.T
示例#22
0
def eqtn_jac_fz_2d(x1, z, tau1, tau0, zmode=np.array("lin"), z_pos=True, K=None, w=None):
    tau = np.divide(tau1, tau0)
    jac_z = - np.ones(z.shape, dtype=z.dtype)
    if isequal_string(str(zmode), 'lin'):
        jac_x1 = 4.0 * np.ones(z.shape, dtype=z.dtype)
        if not (z_pos):
            jac_z -= 0.7 * np.power(z, 6.0)
    elif isequal_string(str(zmode), 'sig'):
        jac_x1 = np.divide(30 * np.power(np.exp(1), (-10.0 * (x1 + 0.5))),
                           1 + np.power(np.exp(1), (-10.0 * (x1 + 0.5))))
    else:
        raise_value_error('zmode is neither "lin" nor "sig"')
    # Assuming that wii = 0
    jac_x1 += np.multiply(K, np.sum(w, 1))
    jac_x1 = np.diag(jac_x1.flatten()) - np.multiply(np.repeat(np.reshape(K, (x1.size, 1)), x1.size, axis=1), w)
    jac_x1 *= np.repeat(np.reshape(tau, (x1.size, 1)), x1.size, axis=1)
    jac_z *= tau
    jac_z = np.diag(jac_z.flatten())
    return np.concatenate([jac_x1, jac_z], axis=1)
 def update_active_regions(self,
                           probabilistic_model,
                           e_values=[],
                           x0_values=[],
                           lsa_propagation_strengths=[],
                           reset=False):
     if reset:
         probabilistic_model.update_active_regions([])
     for m in ensure_list(self.active_regions_selection_methods):
         if isequal_string(m, "E"):
             probabilistic_model = self.update_active_regions_e_values(
                 probabilistic_model, e_values, reset=False)
         elif isequal_string(m, "x0"):
             probabilistic_model = self.update_active_regions_x0_values(
                 probabilistic_model, x0_values, reset=False)
         elif isequal_string(m, "LSA"):
             probabilistic_model = self.update_active_regions_lsa(
                 probabilistic_model,
                 lsa_propagation_strengths,
                 reset=False)
     return probabilistic_model
示例#24
0
 def convert_from_h5_model(self, obj=None, output_shape=None):
     output_type = obj.__class__.__name__
     if isinstance(obj, dict):
         obj = sort_dict(obj)
     elif np.in1d(output_type, ["tuple", "list"]):
         obj = iterable_to_dict(obj)
     elif isequal_string(output_type, "numpy.ndarray"):
         if isequal_string(obj.dtype, "numpy.ndarray"):
             obj = iterable_to_dict(obj.tolist())
     else:
         obj, output_type = create_object("/", self.metadata_dict)[:2]
     if obj is None:
         obj = OrderedDict()
     if output_type is None:
         output_type = obj.__class__.__name__
     for abs_path in self.datasets_dict.keys():
         child_obj = self.datasets_dict.pop(abs_path)
         rel_path = abs_path.split("/", 1)[1]
         build_hierarchical_object_recursively(obj, rel_path, child_obj,
                                               "/", abs_path,
                                               self.metadata_dict)
     if np.in1d(output_type, ["tuple", "list"]):
         obj = dict_to_list_or_tuple(obj, output_type)
     elif isequal_string(output_type, "numpy.ndarray"):
         obj = np.array(dict.values())
         if isinstance(output_shape, tuple):
             try:
                 obj = np.reshape(obj, output_shape)
             except:
                 logger.warning(
                     "Failed to reshape read object to target shape " +
                     str(output_shape) + "!" +
                     "\nReturning array of shape " + str(obj.shape) + "!")
     else:
         obj = update_object(obj, "/", self.metadata_dict,
                             getORpop="pop")[0]
     if isinstance(obj, dict) and output_type.lower().find("dict") < 0:
         return OrderedDictDot(obj)
     else:
         return obj
示例#25
0
def eqtn_fx1z_diff(x1, K, w, ix, jx, a, b, d, tau1, tau0, zmode=np.array("lin")):  # , z_pos=True
    # TODO: for the extreme z_pos = False case where we have terms like 0.1 * z ** 7. See below eqtn_fz()
    # TODO: for the extreme x1_neg = False case where we have to solve for x2 as well
    x1, K, ix, jx, a, b, d, tau1, tau0 = assert_arrays([x1, K, ix, jx, a, b, d, tau1, tau0], (x1.size,))
    tau = np.divide(tau1, tau0)
    dcoupl_dx = eqtn_coupling_diff(K, w, ix, jx)
    if isequal_string(str(zmode), 'lin'):
        dfx1_1_dx1 = 4.0 * np.ones(x1[ix].shape)
    elif isequal_string(str(zmode), 'sig'):
        dfx1_1_dx1 = np.divide(30 * np.power(np.exp(1), (-10.0 * (x1[ix] + 0.5))),
                               np.power(1 + np.power(np.exp(1), (-10.0 * (x1[ix] + 0.5))), 2))
    else:
        raise_value_error('zmode is neither "lin" nor "sig"')
    dfx1_3_dx1 = 3 * np.multiply(np.power(x1[ix], 2.0), a[ix]) + 2 * np.multiply(x1[ix], d[ix] - b[ix])
    fx1z_diff = np.empty_like(dcoupl_dx, dtype=dcoupl_dx.dtype)
    for xi in ix:
        for xj in jx:
            if xj == xi:
                fx1z_diff[xi, xj] = np.multiply(dfx1_3_dx1[xi] + dfx1_1_dx1[xi] - dcoupl_dx[xi, xj], tau[xi])
            else:
                fx1z_diff[xi, xj] = np.multiply(- dcoupl_dx[xi, xj], tau[xi])
    return fx1z_diff
示例#26
0
    def read_dictionary(self, path, type="dict"):
        """
        :param path: Path towards a dictionary H5 file
        :return: dict
        """
        self.logger.info("Starting to read a dictionary from: %s" % path)
        h5_file = h5py.File(path, 'r', libver='latest')

        dictionary = dict()
        for dataset in h5_file.keys():
            dictionary.update({dataset: h5_file["/" + dataset][()]})

        for attr in h5_file.attrs.keys():
            dictionary.update({attr: h5_file.attrs[attr]})

        h5_file.close()
        if isequal_string(type, "DictDot"):
            return DictDot(dictionary)
        elif isequal_string(type, "OrderedDictDot"):
            return OrderedDictDot(dictionary)
        else:
            return dictionary
示例#27
0
 def load_model_data_from_file(self, reset_path=False, **kwargs):
     model_data_path = kwargs.get("model_data_path", self.model_data_path)
     if reset_path:
         self.model_data_path = model_data_path
     extension = model_data_path.split(".", -1)[-1]
     if isequal_string(extension, "R"):
         model_data = rload(model_data_path)
     elif isequal_string(extension, "npy"):
         model_data = np.load(model_data_path).item()
     elif isequal_string(extension, "mat"):
         model_data = loadmat(model_data_path)
     elif isequal_string(extension, "pkl"):
         with open(model_data_path, 'wb') as f:
             model_data = pickle.load(f)
     elif isequal_string(extension, "h5"):
         model_data = H5Reader().read_dictionary(model_data_path)
     else:
         raise_not_implemented_error("model_data file (" + model_data_path +
                                     ") that are not one of (.R, .npy, .mat, .pkl) cannot be read!")
     for key in model_data.keys():
         if key[:3] == "EPI":
             del model_data[key]
     return model_data
示例#28
0
def array_to_h5_model(h5_model, obj, path, container_path, obj_type):
    if isequal_string(str(obj.dtype)[0], "O"):
        h5_model.add_or_update_metadata_attribute(
            os.path.join(container_path, "create_str"), "list()")
        h5_model.add_or_update_metadata_attribute(
            os.path.join(container_path, "transform_str"),
            "np.reshape(obj, " + str(obj.shape) + ")")
        return h5_model, iterable_to_dict(obj)
    else:
        h5_model.add_or_update_metadata_attribute(path + "/type_str", obj_type)
        h5_model.add_or_update_metadata_attribute(
            path + "/transform_str", "np.reshape(obj, " + str(obj.shape) + ")")
        h5_model.add_or_update_datasets_attribute(path, obj)
        return h5_model, None
 def set_simulated_target_data(self, target_data, statistical_model, **kwargs):
     self.signals_inds = range(self.number_of_regions)
     self.data_type = "lfp"
     signals = np.array([])
     if statistical_model.observation_model.find("seeg") >= 0:
         self.data_type = "seeg"
         self.signals_inds = range(self.gain_matrix.shape[0])
         if not(isequal_string(statistical_model.observation_model, "seeg_logpower")):
             signals = extract_dict_stringkeys(sort_dict(target_data), kwargs.get("seeg_dataset", "SEEG0"),
                                               modefun="find", two_way_search=True, break_after=1)
             if len(signals) > 0:
                 signals = signals.values()[0]
         if signals.size == 0:
             signals = np.array(target_data.get("lfp", target_data["x1"]))
             if isequal_string(statistical_model.observation_model, "seeg_logpower"):
                 signals = np.log(np.dot(self.gain_matrix[self.signals_inds], np.exp(signals.T))).T
             else:
                 signals = (np.dot(self.gain_matrix[self.signals_inds], signals.T)).T
     else:
         # if statistical_model.observation_expression == "x1z_offset":
         #     signals = ((target_data["x1"].T - np.expand_dims(self.x1EQ, 1)).T +
         #                (target_data["z"].T - np.expand_dims(self.zEQ, 1)).T) / 2.75
         #     # TODO: a better normalization
         # elif statistical_model.observation_expression == "x1_offset":
         #     # TODO: a better normalization
         #     signals = (target_data["x1"].T - np.expand_dims(self.x1EQ, 1)).T / 2.0
         # else: # statistical_models.observation_expression == "lfp"
         signals = np.array(target_data.get("lfp", target_data["x1"]))
     target_data["signals"] = np.array(signals)
     manual_selection = kwargs.get("manual_selection", [])
     if len(manual_selection) > 0:
         self.signals_inds = manual_selection
         if len(self.signals_inds) < signals.shape[1]:
             signals = signals[:, self.signals_inds]
     self.observation_shape = signals.shape
     (self.n_times, self.n_signals) = self.observation_shape
     return signals, target_data
示例#30
0
 def set_noise(self, sim_settings, **kwargs):
     # Check if the user provides a preconfigured noise instance to override
     noise = kwargs.get("noise", None)
     if isinstance(noise, Noise):
         self._check_noise_intesity_size(noise.nsig)
         sim_settings.noise_intensity = noise.nsig
         if noise.ntau == 0:
             sim_settings.noise_type = WHITE_NOISE
         else:
             sim_settings.noise_type = COLORED_NOISE
         sim_settings.noise_ntau = noise.ntau
     else:
         if isequal_string(sim_settings.noise_type, COLORED_NOISE):
             noise = self.generate_colored_noise(sim_settings.noise_intensity,
                                                 sim_settings.noise_ntau, **kwargs)
         else:
             noise = self.generate_white_noise(sim_settings.noise_intensity)
         sim_settings.noise_ntau = noise.ntau
     return noise, sim_settings