Exemplo n.º 1
0
def test_distributions_mgd_random_sample():
    mu = numpy.array([5., 1.])
    cov = numpy.eye(2)
    d = MultivariateGaussianDistribution(mu, cov)

    x = numpy.array([[5.441227, 0.66913], [7.430771, 0.747908],
                     [5.10961, 2.582481]])

    assert_array_almost_equal(d.sample(3, random_state=5), x)
    assert_raises(AssertionError, assert_array_almost_equal, d.sample(5), x)
Exemplo n.º 2
0
def test_distributions_mgd_random_sample():
	mu = numpy.array([5., 1.])
	cov = numpy.eye(2)
	d = MultivariateGaussianDistribution(mu, cov)

	x = numpy.array([[5.441227, 0.66913 ],
			         [7.430771, 0.747908],
			         [5.10961 , 2.582481]])

	assert_array_almost_equal(d.sample(3, random_state=5), x)
	assert_raises(AssertionError, assert_array_almost_equal, d.sample(5), x)
Exemplo n.º 3
0
    def reset(self, n_comp=5):
        self.n_comp = n_comp

        if self.dim == 2:
            self.data = np.array([[], []]).T
        else:
            self.data = np.array([[], [], []]).T
        self.score = np.ones(np.shape(self.data)[0])
        self.score = self.score / np.linalg.norm(self.score, ord=1)
        if self.dim == 2:
            self.model = MultivariateGaussianDistribution(
                np.zeros(self.dim), np.diag([1.0, 1.0]))
        else:
            self.model = MultivariateGaussianDistribution(
                np.zeros(self.dim), np.diag([1.0, 1.0, 5.0]))
Exemplo n.º 4
0
 def get_insert_dist(self, n_features, initial_seq):
     if isinstance(initial_seq[0], int) \
             or np.issubdtype(initial_seq[0], np.integer): #equal distribution
         return DiscreteDistribution.from_samples(range(n_features))
     else:  #distribution based on initial sequence
         return MultivariateGaussianDistribution.from_samples(
             np.array(initial_seq))
Exemplo n.º 5
0
def randMVG(n=40):
    '''
        generates random multivariate gaussian
    '''
    means = np.random.randn(n)
    # covs = np.random.rand(n, n)
    covs = np.eye(n, n)
    # print(covs)
    return MultivariateGaussianDistribution(means, covs)
Exemplo n.º 6
0
 def get_match_dist(self, index, n_features, initial_seq):
     if isinstance(initial_seq[index], int):
         return DiscreteDistribution.from_samples(range(n_features))
         #return DiscreteDistribution.from_samples(np.concatenate(
         #    (np.repeat(index, INITIAL_EMPHASIS), range(n_features))))
     else:
         return MultivariateGaussianDistribution.from_samples(
             np.concatenate(
                 (np.tile(index,
                          (INITIAL_EMPHASIS, 1)), np.array(initial_seq))))
Exemplo n.º 7
0
    def reset(self, n_comp=5):
        self.n_comp = n_comp
        # components数量

        if self.dim == 2:
            self.data = np.array([[], []]).T
        else:
            self.data = np.array([[], [], []]).T
        self.score = np.ones(np.shape(self.data)[0])
        self.score = self.score / np.linalg.norm(self.score, ord=1)
        if self.dim == 2:
            self.model = MultivariateGaussianDistribution(
                np.zeros(self.dim), np.diag([1.0, 1.0]))
            # self.model是一个类实例
            # "class" :"Distribution"
            # "name" :"MultivariateGaussianDistribution",
        else:
            self.model = MultivariateGaussianDistribution(
                np.zeros(self.dim), np.diag([1.0, 1.0, 5.0]))
Exemplo n.º 8
0
def test_multivariate_log_probability():
	X = numpy.random.randn(100, 5)

	d = MultivariateGaussianDistribution.from_samples(X)
	logp = d.log_probability(X)
	for i in range(100):
		assert_almost_equal(d.log_probability(X[i]), logp[i])

	d = IndependentComponentsDistribution.from_samples(X, distributions=NormalDistribution)
	logp = d.log_probability(X)
	for i in range(100):
		assert_almost_equal(d.log_probability(X[i]), logp[i])
Exemplo n.º 9
0
def test_multivariate_log_probability():
	X = numpy.random.randn(100, 5)

	d = MultivariateGaussianDistribution.from_samples(X)
	logp = d.log_probability(X)
	for i in range(100):
		assert_almost_equal(d.log_probability(X[i]), logp[i])

	d = IndependentComponentsDistribution.from_samples(X, distributions=NormalDistribution)
	logp = d.log_probability(X)
	for i in range(100):
		assert_almost_equal(d.log_probability(X[i]), logp[i])
Exemplo n.º 10
0
    for train_index, test_index in skf:

        cl = HMM(name="Gait")
        distros = []
        hmm_states = []
        state_names = ['swing', 'stance']

        positive_data = []
        negative_data = []
        for i in range(0, len(fl)):
            if fl[i] == 1:
                positive_data.append(fd[i])
            else:
                negative_data.append(fd[i])

        posdis = MGD.from_samples(positive_data)
        st = State(posdis, name='swing')
        distros.append(st)
        hmm_states.append(st)
        negdis = MGD.from_samples(negative_data)
        st2 = State(negdis, name='stance')
        distros.append(st2)
        hmm_states.append(st2)

        cl.add_states(hmm_states)
        cl.add_transition(cl.start, hmm_states[0], 0.5)
        cl.add_transition(cl.start, hmm_states[1], 0.5)

        for i in range(0, 2):
            for j in range(0, 2):
                cl.add_transition(hmm_states[i], hmm_states[j], t[i][j])
    def build_dis_classifier(self):
        skf = StratifiedKFold(self.full_labels, n_folds=self.folds)
        classifier_array = []
        stats_array = []
        num_class = len(self.full_data[0])
        print (num_class)
        for cl in range(0, num_class):
            lel = -1
            tp_total = 0.0
            tn_total = 0.0
            fp_total = 0.0
            fn_total = 0.0
            tests = 0
            for train_index, test_index in skf:
                if lel > 0:
                    lel -= 1
                    continue
                stats = []
                distros = []
                hmm_states = []
                state_names = ['swing', 'stance']
                swings = 0
                stances = 0
                for i in range(0, 2):
                    dis = MGD.from_samples(self.class_data[i])
                    st = State(dis, name=state_names[i])
                    distros.append(dis)
                    hmm_states.append(st)

                model = HMM()
                print(model.states)
                model.add_states(hmm_states)
                model.add_transition(model.start, hmm_states[0], 0.5)
                model.add_transition(model.start, hmm_states[1], 0.5)
                model.add_transition(hmm_states[1], model.end, 0.000000000000000001)
                model.add_transition(hmm_states[0], model.end, 0.000000000000000001)

                for i in range(0, 2):
                    for j in range(0, 2):
                        model.add_transition(hmm_states[i], hmm_states[j], self.t[i][j])
                model.bake()

                tp = 0.0
                tn = 0.0
                fp = 0.0
                fn = 0.0

                train_data = self.full_data[train_index, cl]
                train_class = self.full_labels[train_index, cl]
                test_data = self.full_data[test_index]
                test_class = self.full_labels[test_index]

                print(np.isfinite(train_data).all())
                print(np.isfinite(test_data).all())
                print(np.isnan(train_data.any()))
                print(np.isinf(train_data.any()))
                print(np.isnan(test_data.any()))
                print(np.isinf(test_data.any()))

                if (not np.isfinite(train_data.any())) or (not np.isfinite(test_data.any())) \
                        or (not np.isfinite(train_class.any())) or (not np.isfinite(test_data.any())):
                    rospy.logerr("NaN or Inf Detected")
                    exit()

                try:
                    rospy.logwarn("Training model #"+str(cl)+", fold #" + str(tests))
                    seq = np.array(train_data)
                    model.fit(seq, algorithm='baum-welch', verbose='True', n_jobs=8, max_iterations=150)

                except ValueError:
                    rospy.logwarn("Something went wrong, exiting")
                    rospy.shutdown()
                    exit()

                seq = []
                if self.batch_test == 1:
                    s = 0
                    # for s in range(0, len(test_data)):
                    while s < len(test_data):
                        k = 0
                        seq_entry = []
                        while k < 20 and s < len(test_data):
                            seq_entry.append(test_data[s])
                            k += 1
                            s += 1
                        seq.append(seq_entry)
                else:
                    seq = np.array(test_data)

                if seq == [] or test_data == []:
                    rospy.logerr("Empty testing sequence")
                    continue

                log, path = model.viterbi(test_data)
                if (len(path) - 2) != len(test_data):
                    rospy.logerr(len(path))
                    rospy.logerr(path[0][1].name)
                    rospy.logerr(path[len(path) - 1][1].name)
                    rospy.logerr(len(test_data))
                    exit()

                tests += 1
                for i in range(0, len(path) - 2):
                    if path[i + 1][1].name != 'Gait-start' and path[i + 1][1].name != 'Gait-end':
                        if path[i + 1][1].name == 'swing':  # prediction is 0
                            swings += 1
                            if test_class[i] == 0:  # class is 0
                                tn += 1.0
                            elif test_class[i] == 1:
                                fn += 1.0  # class is 1

                        elif path[i + 1][1].name == 'stance':  # prediction is 1
                            stances += 1
                            if test_class[i] == 1:  # class is 1
                                tp += 1.0
                            elif test_class[i] == 0:  # class is 0
                                fp += 1.0
                print (swings)
                print (stances)
                if (tp + fn) != 0.0:
                    rospy.logwarn("Sensitivity : " + str(tp / (tp + fn)))
                    # sensitivity = tp / (tp + fn)
                else:
                    rospy.logwarn("Sensitivity : 0.0")
                    # sensitivity = 0.0
                if (tn + fp) != 0.0:
                    rospy.logwarn("Specificity : " + str(tn / (tn + fp)))
                    # specificity = tn_total / (tn_total + fp_total)
                else:
                    rospy.logwarn("Specificity : 0.0")
                    # specificity = 0.0
                if (tn + tp + fn + fp) != 0.0:
                    rospy.logwarn("Accuracy : " + str((tn + tp) / (tn + tp + fn + fp)))
                    # accuracy = (tn + tp) / (tn + tp + fn + fp)
                else:
                    rospy.logwarn("Accuracy : 0.0")
                    # accuracy = 0.0

                tn_total += tn
                tp_total += tp
                fn_total += fn
                fp_total += fp

            tp_total /= tests
            tn_total /= tests
            fp_total /= tests
            fn_total /= tests
            rospy.logerr("TP :" + str(tp_total))
            rospy.logerr("TN :" + str(tn_total))
            rospy.logerr("FP :" + str(fp_total))
            rospy.logerr("FN :" + str(fn_total))
            rospy.logerr("Tests :" + str(tests))
            if (tp_total + fn_total) != 0.0:
                sensitivity = tp_total / (tp_total + fn_total)
            else:
                sensitivity = 0.0
            if (tn_total + fp_total) != 0.0:
                specificity = tn_total / (tn_total + fp_total)
            else:
                specificity = 0.0
            if (tn_total + tp_total + fn_total + fp_total) != 0.0:
                accuracy = (tn_total + tp_total) / (tn_total + tp_total + fn_total + fp_total)
            else:
                accuracy = 0.0

            rospy.logwarn("----------------------------------------------------------")
            rospy.logerr("Total accuracy: " + str(accuracy))
            rospy.logerr("Total sensitivity: " + str(sensitivity))
            rospy.logerr("Total specificity: " + str(specificity))
            stats = [tn_total * tests, fn_total * tests, fp_total * tests, fn_total * tests, tests,
                     accuracy, sensitivity, specificity]
            rospy.logwarn("-------------------DONE-------------------------")
            classifier_array.append(model)
            stats_array.append(stats)

        pickle.dump(classifier_array, open(datafile + "distributed_classifiers.p", 'wb'))
        pickle.dump(stats_array, open(datafile + "distributed_stats.p", 'wb'))
        scio.savemat(datafile + "distributed_stats.mat", {'stats': stats_array})
Exemplo n.º 12
0
for i in range(0, len(full_labels)):
    if prev == -1:
        prev = full_labels[i]
    t[prev][full_labels[i]] += 1.0
    prev = full_labels[i]
    sum_ += 1.0

t = t / sum
print t

distros = []
hmm_states = []
state_names = ['swing', 'stance']
hmm_states = []
for i in range(0, 2):
    dis = MGD.from_samples(class_data[i])
    st = State(dis, name=state_names[i])
    distros.append(dis)
    hmm_states.append(st)

skf = StratifiedKFold(full_labels, n_folds=folds)

for train_index, test_index in skf:

    model = HMM(name="Gait")
    hmm_states = []

    for i in range(0, 2):
        # dis = MGD(np.array(class_means[i]).flatten(), np.array(class_cov[i]))
        dis = MGD.from_samples(class_data[i])
        st = State(dis, name=state_names[i])
 def __init__(self, n_trials=3, leave_one_out=1):
     """Variable initialization"""
     self.patient = rospy.get_param("gait_phase_det/patient")
     self.verbose = rospy.get_param("gait_phase_det/verbose")
     self.n_trials = n_trials
     self.n_features = 2      # Raw data and 1st-derivative
     self.leave_one_out = leave_one_out
     self.rec_data = 0.0       # Number of recorded IMU data
     self.proc_data = 0.0      # Number of extracted features
     self.win_size = 3
     self.raw_win = [None] * self.win_size
     # self.fder_win = [0] * self.win_size
     self.ff = [[] for x in range(self.n_trials)]      # Training and test dataset
     self.labels = [[] for x in range(self.n_trials)]  # Reference labels from local data
     self.first_eval = True
     self.model_loaded = False
     algorithm = rospy.get_param("gait_phase_det/algorithm")
     rospy.loginfo('Decoding algorithm: {}'.format(algorithm))
     if algorithm not in DECODER_ALGORITHMS:
         raise ValueError("Unknown decoder {!r}".format(algorithm))
     self.decode = {
         "fov": self._run_fov,
         "bvsw": self._run_bvsw
     }[algorithm]
     self.imu_callback = {
         "fov": self._fov_callback,
         "bvsw": self._bvsw_callback
     }[algorithm]
     """HMM variables"""
     ''' State list:
         s1: Heel Strike (HS)
         s2: Flat Foot   (FF)
         s3: Heel Off    (HO)
         s4: Swing Phase (SP)'''
     self.model_name = "Gait"
     self.has_model = False
     self.must_train = False
     self.states = ['s1', 's2', 's3', 's4']
     self.n_states = len(self.states)
     self.state2phase = {"s1": "hs", "s2": "ff", "s3": "ho", "s4": "sp"}
     self.train_data = []
     self.mgds = {}
     self.dis_means = [[] for x in range(self.n_states)]
     self.dis_covars = [[] for x in range(self.n_states)]
     self.start_prob = [1.0/self.n_states]*self.n_states
     self.trans_mat = np.array([(0.9, 0.1, 0, 0), (0, 0.9, 0.1, 0), (0, 0, 0.9, 0.1), (0.1, 0, 0, 0.9)])    # Left-right model
     # self.trans_mat = np.array([0.8, 0.1, 0, 0.1], [0.1, 0.8, 0.1, 0], [0, 0.1, 0.8, 0.1], [0.1, 0, 0.1, 0.8])    # Left-right-left model
     self.log_startprob = []
     self.log_transmat = np.empty((self.n_states, self.n_states))
     self.max_win_len = 11       # ms (120 ms: mean IC duration for healthy subjects walking at comfortable speed)
     self.viterbi_path = np.empty((self.max_win_len+1, self.n_states))
     self.backtrack = [[None for x in range(self.n_states)] for y in range(self.max_win_len+1)]
     self.global_path = []
     self.work_buffer = np.empty(self.n_states)
     self.boundary = 1
     self.buff_len = 0
     self.states_pos = {}
     for i in range(len(self.states)):
         self.states_pos[self.states[i]] = i
     self.last_state = -1
     self.curr_state = -1
     self.conv_point = 0
     self.conv_found = False
     self.smp_freq = 100.0   # Hz
     self.fp_thresh = 1/self.smp_freq*4    # Threshold corresponds to 8 samples
     self.time_passed = 0.0
     self.obs = [[None for x in range(self.n_features)] for y in range(self.max_win_len)]
     self.model = HMM(name=self.model_name)
     """ROS init"""
     rospy.init_node('real_time_HMM', anonymous=True)
     rospack = rospkg.RosPack()
     self.packpath = rospack.get_path('hmm_gait_phase_classifier')
     self.init_subs()
     self.init_pubs()
     """HMM-training (if no model exists)"""
     try:
         '''HMM-model loading'''
         with open(self.packpath+'/log/HMM_models/'+self.patient+'.txt') as infile:
             json_model = json.load(infile)
             self.model = HMM.from_json(json_model)
             rospy.logwarn(self.patient + "'s HMM model was loaded.")
             self.has_model = True
     except IOError:
         if os.path.isfile(self.packpath + "/log/mat_files/" + self.patient + "_proc_data1.mat"):
             """Training with data collected with FSR-based reference system"""
             self.data_ext = 'mat'
             self.must_train = True
         elif os.path.isfile(self.packpath + "/log/IMU_data/" + self.patient + "_labels.csv"):
             """Training with data collected with offline threshold-based gait phase detection method"""
             self.data_ext = 'csv'
             self.must_train = True
         else:
             rospy.logerr("Please collect data for training ({})!".format(self.patient))
     if self.must_train:
         rospy.logwarn("HMM model not trained yet for {}!".format(self.patient))
         rospy.logwarn("Training HMM with local data...")
         self.load_data()
         self.init_hmm()
         self.train_hmm()
         self.has_model = True
     if self.has_model:
         try:
             '''MGDs loading if model exists'''
             for st in self.states:
                 with open(self.packpath+'/log/HMM_models/'+self.patient+'_'+self.state2phase[st]+'.txt') as infile:
                     yaml_dis = yaml.safe_load(infile)
                     dis = MGD.from_yaml(yaml_dis)
                     self.mgds[st] = dis
                     rospy.logwarn(self.patient +"'s " + self.state2phase[st] + " MGC was loaded.")
                     '''Loading means and covariance matrix'''
                     self.dis_means[self.states_pos[st]] = self.mgds[st].parameters[0]
                     self.dis_covars[self.states_pos[st]] = self.mgds[st].parameters[1]
         except yaml.YAMLError as exc:
             rospy.logwarn("Not able to load distributions: " + exc)
         """Transition and initial (log) probabilities matrices upon training"""
         trans_mat = self.model.dense_transition_matrix()[:self.n_states,:self.n_states]
         if self.verbose: print '**TRANSITION MATRIX (post-training)**\n'+ str(trans_mat)
         for i in range(self.n_states):
             self.log_startprob.append(ln(self.start_prob[i]))
             for j in range(self.n_states):
                 self.log_transmat[i,j] = ln(trans_mat[i][j])
         self.model_loaded = True
Exemplo n.º 14
0
    def sample(self, n=10):
        try:
            X1 = np.stack(self.model.sample(int(np.round(0.8 * n))))
            if self.dim == 2:
                mean = np.mean(X1, axis=0)
                std = np.diag([1.0, 1.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X2 = np.stack(gmm.sample(int(np.round(0.1 * n))))

                mean = np.mean(X1, axis=0)
                std = np.diag([1e-3, 1e-3])
                gmm = MultivariateGaussianDistribution(mean, std)
                X3 = np.stack(gmm.sample(int(np.round(0.1 * n))))

            else:
                mean = np.mean(X1, axis=0)
                std = np.diag([1.0, 1.0, 1e-3])
                gmm = MultivariateGaussianDistribution(mean, std)
                X2 = np.stack(gmm.sample(int(np.round(0.1 * n))))

                mean = np.mean(X1, axis=0)
                std = np.diag([1e-3, 1e-3, 10.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X3 = np.stack(gmm.sample(int(np.round(0.1 * n))))

            X = np.concatenate((X1, X2, X3))

        except ValueError:
            print("exception caught on sampling")
            if self.dim == 2:
                mean = np.zeros(self.dim)
                std = np.diag([1.0, 1.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X = gmm.sample(int(n))
            else:
                mean = np.zeros(self.dim)
                std = np.diag([1.0, 1.0, 5.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X = gmm.sample(int(n))
        return X
Exemplo n.º 15
0
# looks like 3 is better than 4 components

# TODO kmeans + 4-5 components!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

if kmeans:
    km = KMeans(n_clusters=3)
    y = km.fit_predict(X)
    X_1 = X[y == 0]
    X_2 = X[y == 1]
    X_3 = X[y == 2]

else:
    X_1 = X[2000:4000]
    X_2 = X[400:800]
    X_3 = X[7000:8000]
a = MultivariateGaussianDistribution.from_samples(X_1)
b = MultivariateGaussianDistribution.from_samples(X_2)
c = MultivariateGaussianDistribution.from_samples(X_3)
s1 = State(a, name="M1")
s2 = State(b, name="M2")
s3 = State(c, name="M3")

hmm = HiddenMarkovModel()
hmm.add_states(s1, s2, s3)
hmm.add_transition(hmm.start, s1, 0.34)
hmm.add_transition(hmm.start, s3, 0.33)
hmm.add_transition(hmm.start, s2, 0.33)

hmm.add_transition(s1, s1, 0.9)
hmm.add_transition(s1, s2, 0.05)
hmm.add_transition(s1, s3, 0.05)
Exemplo n.º 16
0
    def _initDists(self, X, distribution=MultivariateGaussianDistribution):
        technique = "R_MV-GMM"  # mixture of multivariate gaussain distribution
        if (technique == "GMM"):
            # gaussian mixture model
            #// uvgd = NormalDistribution.from_samples(X)
            #// gmm = GeneralMixtureModel([uvgd.copy() for _ in range(self.nmix)])
            gmm = GeneralMixtureModel.from_samples(
                distributions=[NormalDistribution for _ in range(self.nmix)],
                X=X)
            dists = [gmm.copy() for _ in range(self.statesNumber)]
        elif (technique == "MV-GMM"):
            # multivariate gaussian mixture model
            #// mvgd = MultivariateGaussianDistribution.from_samples(X)
            #// gmm = GeneralMixtureModel([mvgd.copy() for _ in range(self.nmix)])
            gmm = GeneralMixtureModel.from_samples(distributions=[
                MultivariateGaussianDistribution for _ in range(self.nmix)
            ],
                                                   X=X,
                                                   n_components=3)
            dists = [gmm.copy() for _ in range(self.statesNumber)]
        elif (technique == "MVG"):
            self._initkmeans(X=X, numClasses=self.statesNumber)
            dists = [
                MultivariateGaussianDistribution.from_samples(X=X[y == i])
                for i in range(self.statesNumber)
            ]
        elif (technique == "R_GMM"):
            # random gaussian mixture model
            randNormal = lambda: NormalDistribution(np.random.randint(1, 10), 1
                                                    )
            randGMM = lambda: GeneralMixtureModel(
                [randNormal() for _ in range(self.nmix)])
            dists = [randGMM() for _ in range(self.statesNumber)]
        elif (technique == "R_MV-GMM"):
            # random multivariate gaussian mixture model
            randGMM = lambda: GeneralMixtureModel(
                [randMVG() for _ in range(self.nmix)])
            dists = [randGMM() for _ in range(self.statesNumber)]
        return dists

        #* not completed:
        #! GMM-HMM-k
        y = self._initkmeans(X, self.statesNumber)
        # list(map(print, y))
        return [
            GeneralMixtureModel.from_samples(distribution,
                                             X=X[y == i],
                                             n_components=self.nmix)
            for i in range(self.statesNumber)
        ]

        #! Kmeans init
        if not isinstance(X, BaseGenerator):
            data_generator = SequenceGenerator(X, None, None)
        else:
            data_generator = X

        initialization_batch_size = len(data_generator)

        X_ = []
        data = data_generator.batches()
        for i in range(initialization_batch_size):
            batch = next(data)
            X_.extend(batch[0])

        X_concat = np.concatenate(X_)
        if X_concat.ndim == 1:
            X_concat = X_concat.reshape(X_concat.shape[0], 1)
        n, d = X_concat.shape
        clf = Kmeans(self.statesNumber, init="kmeans++",
                     n_init=1)  # init should be one of
        clf.fit(X_concat, max_iterations=None, batches_per_epoch=None)
        y = clf.predict(X_concat)
        if callable(distribution):
            if d == 1:
                dists = [
                    distribution.from_samples(X_concat[y == i][:, 0])
                    for i in range(self.statesNumber)
                ]
            elif distribution.blank().d > 1:
                dists = [
                    distribution.from_samples(X_concat[y == i])
                    for i in range(self.statesNumber)
                ]
            else:
                print("error")
        return dists
Exemplo n.º 17
0
    def __init__(self, dim , seed=None): #
        
        K = 9
        theta0=[.5,.5]
        beta=np.ones(K)
        Psi = .1*np.diag(np.ones(dim))
        #mu0= np.zeros(dim) 
        #lambd=.1,
        nu=dim+2.
        
        
        rstate = np.random.get_state()
        np.random.seed(seed)

        
        unif_dist = UniformDistribution(0.,1.)
        
        self.theta0 = theta0        
        beta_dist = DirichletDistribution(beta)

        self.dim = Psi.shape[0]

        self.dists = [] 

        #same weights for both
        weights = beta_dist.sample()
            
        mus = []
        for i,_ in enumerate(theta0):
            
            
            #weights = beta_dist.sample()
            #print(weights)
            mix = []
            for j,_ in enumerate(weights):
                
                
                if j%3==0:
                    Sigma = invwishart.rvs(df=nu, scale=Psi)
                    
                elif j%3==1:
                    Sigma = invwishart.rvs(df=nu, scale=.01*Psi)
                else:
                    Sigma = invwishart.rvs(df=nu, scale=.0001*Psi)

                if i==0:
                    mu = unif_dist.sample(self.dim) 
                    #mu =MultivariateGaussianDistribution(mu0,Sigma/lambd).sample()
                    mus.append(mu)
                else:
                    mu = mus[j]
                
                mix.append( MultivariateGaussianDistribution(mu, Sigma) )
                
            model = GeneralMixtureModel(mix, weights=weights)
            self.dists.append(model)
            
            
        for d in self.dists:
            print(d)
        
        self.rstate = np.random.get_state()
        np.random.set_state(rstate)
Exemplo n.º 18
0
    X_333 = X[y == 8]

else:
    X_1 = X[2000:3000]
    X_11 = X[3000:4000]
    X_111 = X[4000:4500]

    X_2 = X[400:500]
    X_22 = X[500:600]
    X_222 = X[600:800]

    X_3 = X[7000:7300]
    X_33 = X[7300:7600]
    X_333 = X[7600:8000]

a = MultivariateGaussianDistribution.from_samples(X_1)
aa = MultivariateGaussianDistribution.from_samples(X_11)
aaa = MultivariateGaussianDistribution.from_samples(X_111)

b = MultivariateGaussianDistribution.from_samples(X_2)
bb = MultivariateGaussianDistribution.from_samples(X_22)
bbb = MultivariateGaussianDistribution.from_samples(X_222)

c = MultivariateGaussianDistribution.from_samples(X_3)
cc = MultivariateGaussianDistribution.from_samples(X_33)
ccc = MultivariateGaussianDistribution.from_samples(X_333)

s1 = State(a, name="M1")
s11 = State(aa, name="M11")
s111 = State(aaa, name="M111")
Exemplo n.º 19
0
    def sample(self, n=10):
        try:
            X1 = np.stack(self.model.sample(int(np.round(0.8 * n))))
            # np.round 取最近的整数
            # 从多元高斯分布中采样8个点
            if self.dim == 2:
                mean = np.mean(X1, axis=0)
                # 由X1的采样点计算均值
                std = np.diag([1.0, 1.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                # 构建一个新的多元高斯分布
                X2 = np.stack(gmm.sample(int(np.round(0.1 * n))))
                # 采样1个点

                mean = np.mean(X1, axis=0)
                std = np.diag([1e-3, 1e-3])  # 标准差不同了
                gmm = MultivariateGaussianDistribution(mean, std)
                X3 = np.stack(gmm.sample(int(np.round(0.1 * n))))
                # 又采样了一个点

            else:
                mean = np.mean(X1, axis=0)
                std = np.diag([1.0, 1.0, 1e-3])
                gmm = MultivariateGaussianDistribution(mean, std)
                X2 = np.stack(gmm.sample(int(np.round(0.1 * n))))

                mean = np.mean(X1, axis=0)
                std = np.diag([1e-3, 1e-3, 10.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X3 = np.stack(gmm.sample(int(np.round(0.1 * n))))

            X = np.concatenate((X1, X2, X3))  # 多尺度采样?

        except ValueError:
            print("exception caught on sampling")
            if self.dim == 2:
                mean = np.zeros(self.dim)
                std = np.diag([1.0, 1.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X = gmm.sample(int(n))
            else:
                mean = np.zeros(self.dim)
                std = np.diag([1.0, 1.0, 5.0])
                gmm = MultivariateGaussianDistribution(mean, std)
                X = gmm.sample(int(n))
        return X