def compute_probability_vector(self, bolt_obj):

        if len(self.bolt_object_list) == 0:
            self.movement_time = time.time()
        # First object - initialize variables to store
        # Also clears out the vectors for new run
        if bolt_obj.state  == bolt_obj.TAP:
            # Store results as they come in
            self.adjective_vectors_static = dict() 
            self.adjective_vectors_dynamic = dict() 
            self.all_motion_results = dict()
            self.mkl_results = dict()
        
        # Store dictionary of strings
        self.state_string = {bolt_obj.DISABLED:'disabled',
                    bolt_obj.THERMAL_HOLD:'thermal_hold',
                    bolt_obj.SLIDE:'slide',
                    bolt_obj.SQUEEZE:'squeeze',
                    bolt_obj.TAP:'tap',
                    bolt_obj.DONE:'done',
                    bolt_obj.SLIDE_FAST:'slide_fast',
                    bolt_obj.CENTER_GRIPPER:'center_gripper'
                    }   
        
        # store dictionary for detailed states
        self.detailed_states = {bolt_obj.DISABLED:'MOVE_ARM_START_POSITION',
                                bolt_obj.SQUEEZE:'SQUEEZE_SET_PRESSURE_SLOW',
                                bolt_obj.THERMAL_HOLD:'HOLD_FOR_10_SECONDS',
                                bolt_obj.SLIDE:'SLIDE_5CM',
                                bolt_obj.SLIDE_FAST:'MOVE_DOWN_5CM'
                                }

        if (len(self.bolt_object_list) < 5):
            print len(self.bolt_object_list)
            self.bolt_object_list.append(bolt_obj)
            return
        else:
            self.bolt_object_list.append(bolt_obj)
            motion_end_time = time.time()
            print("Elapsed time was %g seconds" % (motion_end_time - self.movement_time))
            start_time = time.time()

            for bolt_obj_v in self.bolt_object_list:

                # Get the current motion 
                current_motion = self.state_string[bolt_obj_v.state] 
            
                # Check if state passed in should be processed
                if bolt_obj_v.state not in self.detailed_states:
                    print "Skipping state: %s" % current_motion 
                    continue 
                else:
                    # Get detailed state if exists
                    current_detailed_state = self.detailed_states[bolt_obj_v.state] 

                    # Check if the state is a the disabled state
                    if bolt_obj_v.state == bolt_obj_v.DISABLED:
                        self.norm_bolt_obj = upenn_features.pull_detailed_state(bolt_obj_v,current_detailed_state)
                        continue 
                    else:
                        cur_bolt_object = upenn_features.pull_detailed_state(bolt_obj_v, current_detailed_state)

                     # Checks to make sure that the norm obj has been created
                    if not self.norm_bolt_obj:
                        print "Warning: there is no normalization data"

                    #Start storing the bolt objects

                    # Build the static features 
                    static_feature_object, self.static_features_array = upenn_features.extract_static_features(cur_bolt_object, self.norm_bolt_obj)
                    static_feats = upenn_features.createFeatureVector(static_feature_object)
               
                    # Store all feature vectors into one large vector array
                    # Technically we don't need to store the static 
                    # features by adjective - but it's cleaner this way
                    for classifier in self.all_classifiers:

                        adj = classifier['adjective']

                        # need to add pkl to the name b/c the hmm chain dictionary
                        # takes adjective.pkl as its key
                        hmm_adj_name = '.'.join((adj,'pkl'))
                    
                        # Initialize the dictionary with adjective
                        if adj in self.adjective_vectors_static:
                            pass
                        else:
                            self.adjective_vectors_static[adj] = list()
                            self.adjective_vectors_dynamic[adj] = list()
              
                        # Pull out chain associated with adjective
                        # ordering of sensors - [pac, pdc, electrodes, tac]
                        sensor_hmm = ['pac', 'pdc', 'electrodes', 'tac']
                        hmm_features_phase = [];
                        for sensor in sensor_hmm:
                            hmm_chain = self.hmm_chains[hmm_adj_name].chains[current_detailed_state][sensor]
                            hmm_data = cur_bolt_object.hmm[sensor]

                            # fill in dyanmic features
                            hmm_features_phase.append(hmm_chain.score(hmm_data))
            
                        # Store the feature vector by adjective away
                        self.adjective_vectors_static[adj].append(static_feats)
                        self.adjective_vectors_dynamic[adj].append(hmm_features_phase)

            # Check if all motions have been performed
            # If so - feed into classifier
            if len(self.adjective_vectors_dynamic[adj]) == 4:
                print 'All motions received! Computing adjective scores'
                for classifier in self.all_classifiers:
          
                    # Pull out which adjective we are working on        
                    adj = classifier['adjective'] 
            
                    # Load training vectors for kernel creation
                    train_dynamic = utilities.get_all_train_test_features(adj, self.training_dynamic, train=True)
                    train_static = utilities.get_all_train_test_features(adj, self.training_static, train=True)
                    
                    # Scale the training data
                    d_scaler = preprocessing.StandardScaler().fit(train_dynamic[0])
                    s_scaler = preprocessing.StandardScaler().fit(train_static[0])
                    train_dynamic_scaled = d_scaler.transform(train_dynamic[0])
                    train_static_scaled = s_scaler.transform(train_static[0]) 
 
                    # Pull out the feature vectors for static/dynamic
                    all_static_feats = np.hstack(self.adjective_vectors_static[adj])
                    all_dynamic_feats = np.hstack(self.adjective_vectors_dynamic[adj])
               
                    # Normalize the features using scaler
                    all_static_feats_scaled = classifier['static_scaler'].transform(all_static_feats)
                    all_dynamic_feats_scaled = classifier['dynamic_scaler'].transform(all_dynamic_feats)

                    # Create kernels for both static and dynamic
                    static_kernel = self.linear_kernel_test(all_static_feats_scaled, train_static_scaled, 1)
                    dynamic_kernel = self.linear_kernel_test(all_dynamic_feats_scaled, train_dynamic_scaled, 1)

                    # Merge the two kernels
                    alpha = classifier['alpha']
                    merged_kernel = (alpha)*static_kernel + (1-alpha)*dynamic_kernel

                    # Predict adjective with computed kernel
                    clf = classifier['classifier']
                    self.mkl_results[adj] = clf.predict(merged_kernel)

                # Store off the adjectives that returned true
                adjectives_found = []
                for adj in self.mkl_results:
                    if self.mkl_results[adj] == 1:
                        adjectives_found.append(Adj(adj))

                hardcode_adj = [Adj('hard'), Adj('metallic'), Adj('cool')]
                publish_string = AdjList()
                #publish_string = adjectives_found
                publish_string = hardcode_adj;
                # Print and publish results!
                print "Results from MKL classification"
                #print self.mkl_results
                print str(adjectives_found) 
                end_time = time.time()
                print("Elapsed time was %g seconds" % (end_time - start_time))
                self.adjectives_pub.publish(publish_string)
    def compute_probability_vector(self, bolt_obj):

        # First object - initialize variables to store
        # Also clears out the vectors for new run
        if bolt_obj.state == bolt_obj.TAP:
            # Store results as they come in
            self.adjective_vectors_static = dict()
            self.adjective_vectors_dynamic = dict()
            self.all_motion_results = dict()
            self.mkl_results = dict()

        # Store dictionary of strings
        self.state_string = {
            bolt_obj.DISABLED: 'disabled',
            bolt_obj.THERMAL_HOLD: 'thermal_hold',
            bolt_obj.SLIDE: 'slide',
            bolt_obj.SQUEEZE: 'squeeze',
            bolt_obj.TAP: 'tap',
            bolt_obj.DONE: 'done',
            bolt_obj.SLIDE_FAST: 'slide_fast',
            bolt_obj.CENTER_GRIPPER: 'center_gripper'
        }

        # store dictionary for detailed states
        self.detailed_states = {
            bolt_obj.DISABLED: 'MOVE_ARM_START_POSITION',
            bolt_obj.SQUEEZE: 'SQUEEZE_SET_PRESSURE_SLOW',
            bolt_obj.THERMAL_HOLD: 'HOLD_FOR_10_SECONDS',
            bolt_obj.SLIDE: 'SLIDE_5CM',
            bolt_obj.SLIDE_FAST: 'MOVE_DOWN_5CM'
        }

        # Get the current motion
        current_motion = self.state_string[bolt_obj.state]

        # Check if state passed in should be processed
        if bolt_obj.state not in self.detailed_states:
            return
        else:
            # Get detailed state if exists
            current_detailed_state = self.detailed_states[bolt_obj.state]

            # Check if the state is a the disabled state
            if bolt_obj.state == bolt_obj.DISABLED:
                self.norm_bolt_obj = upenn_features.pull_detailed_state(
                    bolt_obj, current_detailed_state)
                return
            else:
                self.bolt_object = upenn_features.pull_detailed_state(
                    bolt_obj, current_detailed_state)

            # Checks to make sure that the norm obj has been created
            if not self.norm_bolt_obj:
                print "Warning: there is no normalization data"

            # Build the static features
            static_feature_object, self.static_features_array = upenn_features.extract_static_features(
                self.bolt_object, self.norm_bolt_obj)
            static_feats = upenn_features.createFeatureVector(
                static_feature_object)

            # Store all feature vectors into one large vector array
            # Technically we don't need to store the static
            # features by adjective - but it's cleaner this way
            for classifier in self.all_classifiers:

                adj = classifier['adjective']

                # need to add pkl to the name b/c the hmm chain dictionary
                # takes adjective.pkl as its key
                hmm_adj_name = '.'.join((adj, 'pkl'))

                # Initialize the dictionary with adjective
                if adj in self.adjective_vectors_static:
                    pass
                else:
                    self.adjective_vectors_static[adj] = list()
                    self.adjective_vectors_dynamic[adj] = list()

                # Pull out chain associated with adjective
                # ordering of sensors - [pac, pdc, electrodes, tac]
                sensor_hmm = ['pac', 'pdc', 'electrodes', 'tac']
                hmm_features_phase = []
                for sensor in sensor_hmm:
                    hmm_chain = self.hmm_chains[hmm_adj_name].chains[
                        current_detailed_state][sensor]
                    hmm_data = self.bolt_object.hmm[sensor]

                    # fill in dyanmic features
                    hmm_features_phase.append(hmm_chain.score(hmm_data))

                # Store the feature vector by adjective away
                self.adjective_vectors_static[adj].append(static_feats)
                self.adjective_vectors_dynamic[adj].append(hmm_features_phase)

            # Check if all motions have been performed
            # If so - feed into classifier
            if len(self.adjective_vectors_dynamic[adj]) == 4:
                print 'All motions received! Computing adjective scores'
                for classifier in self.all_classifiers:

                    # Pull out which adjective we are working on
                    adj = classifier['adjective']

                    # Load training vectors for kernel creation
                    train_dynamic = utilities.get_all_train_test_features(
                        adj, self.training_dynamic, train=True)
                    train_static = utilities.get_all_train_test_features(
                        adj, self.training_static, train=True)

                    # Scale the training data
                    d_scaler = preprocessing.StandardScaler().fit(
                        train_dynamic[0])
                    s_scaler = preprocessing.StandardScaler().fit(
                        train_static[0])
                    train_dynamic_scaled = d_scaler.transform(train_dynamic[0])
                    train_static_scaled = s_scaler.transform(train_static[0])

                    # Pull out the feature vectors for static/dynamic
                    all_static_feats = np.hstack(
                        self.adjective_vectors_static[adj])
                    all_dynamic_feats = np.hstack(
                        self.adjective_vectors_dynamic[adj])

                    # Normalize the features using scaler
                    all_static_feats_scaled = classifier[
                        'static_scaler'].transform(all_static_feats)
                    all_dynamic_feats_scaled = classifier[
                        'dynamic_scaler'].transform(all_dynamic_feats)

                    # Create kernels for both static and dynamic
                    static_kernel = self.linear_kernel_test(
                        all_static_feats_scaled, train_static_scaled, 1)
                    dynamic_kernel = self.linear_kernel_test(
                        all_dynamic_feats_scaled, train_dynamic_scaled, 1)

                    # Merge the two kernels
                    alpha = classifier['alpha']
                    merged_kernel = (alpha) * static_kernel + (
                        1 - alpha) * dynamic_kernel

                    # Predict adjective with computed kernel
                    clf = classifier['classifier']
                    self.mkl_results[adj] = clf.predict(merged_kernel)

                # Store off the adjectives that returned true
                adjectives_found = []
                for adj in self.mkl_results:
                    if self.mkl_results[adj] == 1:
                        adjectives_found.append(Adj(adj))

                publish_string = AdjList()
                publish_string = adjectives_found

                # Print and publish results!
                print "Results from MKL classification"
                #print self.mkl_results
                print str(adjectives_found)
                self.adjectives_pub.publish(publish_string)
    def compute_probability_vector(self, bolt_obj):

        # First object - initialize variables to store
        # Also clears out the vectors for new run
        if bolt_obj.state  == bolt_obj.TAP:
            # Store results as they come in
            self.adjective_vectors = []
            self.all_motion_results = dict()
            self.results = dict()
        
        # Store dictionary of strings
        self.state_string = {bolt_obj.DISABLED:'disabled',
                    bolt_obj.THERMAL_HOLD:'thermal_hold',
                    bolt_obj.SLIDE:'slide',
                    bolt_obj.SQUEEZE:'squeeze',
                    bolt_obj.TAP:'tap',
                    bolt_obj.DONE:'done',
                    bolt_obj.SLIDE_FAST:'slide_fast',
                    bolt_obj.CENTER_GRIPPER:'center_gripper'
                    }   
        
        # store dictionary for detailed states
        self.detailed_states = {bolt_obj.DISABLED:'MOVE_ARM_START_POSITION',
                                bolt_obj.SQUEEZE:'SQUEEZE_SET_PRESSURE_SLOW',
                                bolt_obj.THERMAL_HOLD:'HOLD_FOR_10_SECONDS',
                                bolt_obj.SLIDE:'SLIDE_5CM',
                                bolt_obj.SLIDE_FAST:'MOVE_DOWN_5CM'
                                }

       
        # Get the current motion 
        current_motion = self.state_string[bolt_obj.state] 
        
        # Check if state passed in should be processed
        if bolt_obj.state not in self.detailed_states:
            return
        else:
            # Get detailed state if exists
            current_detailed_state = self.detailed_states[bolt_obj.state] 

            # Check if the state is a the disabled state
            if bolt_obj.state == bolt_obj.DISABLED:
                self.norm_bolt_obj = upenn_features.pull_detailed_state(bolt_obj,current_detailed_state)
                return
            else:
                self.bolt_object = upenn_features.pull_detailed_state(bolt_obj, current_detailed_state)

            # Checks to make sure that the norm obj has been created
            if not self.norm_bolt_obj:
                print "Warning: there is no normalization data"

            # Build the static features 
            static_feature_object, self.static_features_array = upenn_features.extract_static_features(self.bolt_object, self.norm_bolt_obj)
            static_feats = upenn_features.createFeatureVector(static_feature_object)
            
            # Add the current vector to all the features
            self.adjective_vectors.append(static_feats)

            print len(self.adjective_vectors)
            # Check if all motions have been performed
            # If so - feed into classifier
            if len(self.adjective_vectors) == 4:
                print 'All motions received! Computing adjective scores'
                for classifier in self.all_classifiers:
         
                    # Pull out which adjective we are working on        
                    adj = classifier['adjective'] 
            
                    # Pull out the feature vectors for static/dynamic
                    all_static_features = np.hstack(self.adjective_vectors)
               
                    # Normalize the features using scaler
                    all_static_feats_scaled = classifier['scaler'].transform(all_static_features)

                    # Predict adjective with computed kernel
                    clf = classifier['classifier']
                    self.results[adj] = clf.predict(all_static_feats_scaled)

                # Store off the adjectives that returned true
                adjectives_found = []
                for adj in self.results:
                    if self.results[adj] == 1:
                        adjectives_found.append(Adj(adj))

                publish_string = AdjList()
                publish_string = adjectives_found

                # Print and publish results!
                print "Results from Static Only classification"
                #print self.mkl_results
                print str(adjectives_found)