コード例 #1
0
ファイル: worker.py プロジェクト: wuyou33/explicit_hybrid_mpc
 def ecc(self,node,location):
     """
     Implementation of [1] Algorithm 2 lines 4-16. Pass a tree root
     node and this grows the tree until its leaves are feasible partition
     cells. **Caution**: modifies ``node`` (passed by reference).
     
     [1] D. Malyuta, B. Acikmese, M. Cacan, and D. S. Bayard,
     "Partition-based feasible integer solution pre-computation for hybrid
     model predictive control," in 2019 European Control Conference
     (accepted), IFAC, jun 2019.
     
     Parameters
     ----------
     node : Tree
         Tree root. Sufficient that it just holds node.data.vertices for the
         simplex vertices.
     location : string
         Location in tree of this node. String where '0' at index i means
         take left child, '1' at index i means take right child (at depth
         value i).
     """
     self.status_publisher.update(location=location)
     tools.info_print('ecc at location = %s'%(location))
     c_R = np.average(node.data.vertices,axis=0) # Simplex barycenter
     if not self.oracle.P_theta(theta=c_R,check_feasibility=True):
         raise RuntimeError('STOP, Theta contains infeasible regions')
     else:
         delta_hat,vx_inputs_and_costs = self.oracle.V_R(node.data.vertices)
         if delta_hat is None:
             S_1,S_2 = tools.split_along_longest_edge(node.data.vertices)[:2]
             child_left = NodeData(vertices=S_1)            
             child_right = NodeData(vertices=S_2)
             node.grow(child_left,child_right)
             self.status_publisher.update(simplex_count_increment=1)
             # Recursive call for each resulting simplex
             self.offload_child_computation(node.left,location+'0','ecc')
             self.offload_child_computation(node.right,location+'1','ecc',
                                            prioritize_self=True)
         else:
             # Assign feasible commutation to simplex
             Nvx = node.data.vertices.shape[0]
             vertex_costs = np.array([vx_inputs_and_costs[i][1]
                                      for i in range(Nvx)])
             vertex_inputs = np.array([vx_inputs_and_costs[i][0]
                                       for i in range(Nvx)])
             node.data = NodeData(vertices=node.data.vertices,
                                  commutation=delta_hat,
                                  vertex_costs=vertex_costs,
                                  vertex_inputs=vertex_inputs)
             self.offload_child_computation(node,location,'lcss',
                                            force='self')
コード例 #2
0
ファイル: tools.py プロジェクト: wuyou33/explicit_hybrid_mpc
def delaunay(R):
    """
    Partition polytope R into simplices.
    
    Parameters
    ----------
    R : np.array
        Matrix whose rows are the polytope vertices.
        
    Returns
    -------
    root : Tree
        Binary tree whose leaves are the partition. Unless both children are
        leaves, the left child is a leaf and the right child has "None" data
        and is itself a parent.
    Nsx : float
        Number of simplices that R was partitioned into.
    vol : float
        Volume of R.
    """
    vol = 0.
    delaunay = scs.Delaunay(R)
    root = Tree(NodeData(vertices=R))
    cursor = root  # This is a temporary "pointer" that goes down the tree
    Nsx = delaunay.simplices.shape[0]  # Number of simplices
    if Nsx == 1:
        vol += simplex_volume(R)
    for i in range(Nsx - 1):
        left = NodeData(vertices=R[delaunay.simplices[i]])
        vol += simplex_volume(R[delaunay.simplices[i]])
        if i < Nsx - 2:
            right = None
        else:
            right = NodeData(vertices=R[delaunay.simplices[i + 1]])
            vol += simplex_volume(R[delaunay.simplices[i + 1]])
        cursor.grow(left, right)
        cursor = cursor.right
    return root, Nsx, vol
コード例 #3
0
    def _get_merged_child_type_cdfs(self, da):
        """Get merged child CDFs (i.e. lists of possible children, given parent IDs) for the
        given DA.

        All nodes  occurring in training data items that contain DAIs from the current DA are
        included. If `compatible_dais` is set, nodes that always occur with DAIs not in the
        current DA will be excluded.

        @param da: the current dialogue act
        """
        # get all nodes occurring in training data items containing the DAIs from the current DA
        merged_counts = defaultdict(Counter)
        for dai in da:
            try:
                for parent_id in self.child_type_counts[dai]:
                    merged_counts[parent_id].update(
                        self.child_type_counts[dai][parent_id])
            except KeyError:
                log_warn('DAI ' + unicode(dai) +
                         ' unknown, adding nothing to CDF.')

#         log_info('Node types: %d' % sum(len(c.keys()) for c in merged_counts.values()))

# remove nodes that are not compatible with the current DA (their list of
# minimum compatibility DAIs is not similar to the current DA)
        for _, counts in merged_counts.items():
            for node in counts.keys():
                if not self._compatible(
                        da, NodeData(t_lemma=node[1], formeme=node[0])):
                    del counts[node]

#         log_info('Node types after pruning: %d' % sum(len(c.keys()) for c in merged_counts.values()))
#         log_info('Compatible lemmas: %s' % ' '.join(set([n[1] for c in merged_counts.values()
#                                                          for n in c.keys()])))

        return self.cdfs_from_counts(merged_counts)
コード例 #4
0
 def generate_child(self, parent):
     """Generate one node, given its parent (plus the candidate generator must be
     initialized for the current DA)."""
     formeme, t_lemma, right = self.candgen.sample_child(parent)
     child = parent.create_child(right, NodeData(t_lemma, formeme))
     return child
コード例 #5
0
ファイル: worker.py プロジェクト: wuyou33/explicit_hybrid_mpc
 def lcss(self,node,location):
     """
     Implementation of [1] Algorithm 1 lines 4-20. Pass a tree root
     node and this grows the tree until its leaves are associated with an
     epsilon-suboptimal commutation*.
     **Caution**: modifies ``node`` (passed by reference).
     
     * Assuming that no leaf is closed due to a bad condition number.
     
     [1] D. Malyuta and B. Acikmese, "Approximate Mixed-integer
     Convex Multiparametric Programming," in Control Systems Letters (in
     review), IEEE.
     
     Parameters
     ----------
     node : Tree
         Tree root. Sufficient that it just holds node.data.vertices for the
         simplex vertices.
     location : string
         Location in tree of this node. String where '0' at index i means take
         left child, '1' at index i means take right child (at depth value i).
     """
     def add(child_left,child_right):
         """
         Make the node a parent of child_left and child_right.
         
         Parameters
         ----------
         child_left : NodeData
             Data for the "left" child in the binary tree.
         child_right : NodeData
             Data for the "right" child in the binary tree.
         """
         node.grow(child_left,child_right)
         self.status_publisher.update(simplex_count_increment=1)
         self.offload_child_computation(node.left,location+'0','lcss')
         self.offload_child_computation(node.right,location+'1','lcss',
                                        prioritize_self=True)
 
     def update_vertex_costs(v_mid,v_combo_idx,delta,old_vertex_inputs,
                             old_vertex_costs):
         """
         Compute a new set of optimal costs at the simplex vertices.
         
         Parameters
         ----------
         v_mid : np.array
             New vertex at which the current simplex is to be split into two.
         v_combo_idx : tuple
             Tuple of two elements corresponding to row index of the two
             vertices constituting the longest edge. The first vertex is
             removed from S_1 and the second vertex is removed from S_2,
             substituted for v_mid.
         delta : np.array
             The commutation that is to be associated with the two new
             simplices.
         old_vertex_costs : np.array
             Array of existing pre-computed vertex inputs.
         old_vertex_costs : np.array
             Array of existing pre-computed vertex costs.
         """
         u_opt_v_mid, V_delta_v_mid = self.oracle.P_theta_delta(
             theta=v_mid,delta=delta)[:2]
         vertex_inputs_S_1,vertex_inputs_S_2 = (old_vertex_inputs.copy(),
                                                old_vertex_inputs.copy())
         vertex_costs_S_1,vertex_costs_S_2 = (old_vertex_costs.copy(),
                                              old_vertex_costs.copy())
         vertex_inputs_S_1[v_combo_idx[0]] = u_opt_v_mid
         vertex_inputs_S_2[v_combo_idx[1]] = u_opt_v_mid
         vertex_costs_S_1[v_combo_idx[0]] = V_delta_v_mid
         vertex_costs_S_2[v_combo_idx[1]] = V_delta_v_mid
         return (vertex_inputs_S_1,vertex_inputs_S_2,
                 vertex_costs_S_1,vertex_costs_S_2)
 
     self.status_publisher.update(location=location)
     delta_epsilon_suboptimal = self.oracle.bar_E_delta_R(
         R=node.data.vertices,V_delta_R=node.data.vertex_costs)
     if delta_epsilon_suboptimal:
         # Close leaf
         node.data.is_epsilon_suboptimal = True
         node.data.timestamp = time.time()
         volume_closed = tools.simplex_volume(node.data.vertices)
         self.status_publisher.update(volume_filled_increment=volume_closed)
     else:
         delta_star,theta_star,new_vx_inputs_costs,cost_varies_little = (
             self.oracle.bar_D_delta_R(R=node.data.vertices,
                                       V_delta_R=node.data.vertex_costs,
                                       delta_ref=node.data.commutation))
         bar_D_delta_R_feasible = delta_star is not None
         if not bar_D_delta_R_feasible:
             # delta does not change in this case, so the same vertex inputs
             # and costs
             delta_star = node.data.commutation
             new_vertex_costs = node.data.vertex_costs
             new_vertex_inputs = node.data.vertex_inputs
         else:
             # extract the vertex inputs and costs associated with the better
             # commutation choice
             Nvx = node.data.vertices.shape[0]
             new_vertex_costs = np.array([new_vx_inputs_costs[i][1]
                                          for i in range(Nvx)])
             new_vertex_inputs = np.array([new_vx_inputs_costs[i][0]
                                           for i in range(Nvx)])
         if bar_D_delta_R_feasible and cost_varies_little:
             node.data.commutation = delta_star
             node.data.vertex_costs = new_vertex_costs
             node.data.vertex_inputs = new_vertex_inputs
             self.offload_child_computation(node,location,'lcss',
                                            force='self')
         else:
             S_1,S_2,v_idx = tools.split_along_longest_edge(
                 node.data.vertices)
             # Re-compute vertex costs
             v_mid = S_1[v_idx[0]]
             (vertex_inputs_S_1,vertex_inputs_S_2,
              vertex_costs_S_1,vertex_costs_S_2) = update_vertex_costs(
                  v_mid,v_idx,delta_star,new_vertex_inputs,new_vertex_costs)
             # Make children
             child_left = NodeData(vertices=S_1,commutation=delta_star,
                                   vertex_costs=vertex_costs_S_1,
                                   vertex_inputs=vertex_inputs_S_1)
             child_right = NodeData(vertices=S_2,commutation=delta_star,
                                    vertex_costs=vertex_costs_S_2,
                                    vertex_inputs=vertex_inputs_S_2)
             add(child_left,child_right)
コード例 #6
0
    def get_all_successors(self, cand_tree):
        """Get all possible successors of a candidate tree, given CDFS and node number limits.

        NB: This assumes projectivity (will never create a non-projective tree).

        @param cand_tree: The current candidate tree to be expanded
        """
        # TODO possibly avoid creating TreeNode instances for iterating
        nodes = TreeNode(cand_tree).get_descendants(add_self=1, ordered=1)
        nodes_on_level = defaultdict(int)
        res = []
        if self.cur_limits is not None:
            # stop if maximum number of nodes is reached
            if len(nodes) >= self.cur_limits['total']:
                return []
            # remember number of nodes on all levels
            for node in nodes:
                nodes_on_level[node.get_depth()] += 1

        # try adding one node to all possible places
        for node_num, node in enumerate(nodes):
            # skip nodes that can't have more children
            parent_id = self._parent_node_id(node)
            if (len(node.get_children()) >= self.max_children.get(
                    parent_id, 0) or parent_id not in self.cur_cdfs):
                continue
            # skip nodes above child_depth levels where the maximum number of nodes has been reached
            if self.cur_limits is not None:
                child_depth = node.get_depth() + 1
                if nodes_on_level[child_depth] >= self.cur_limits[child_depth]:
                    continue
            # try all formeme/t-lemma/direction variants of a new child under the given parent node
            for formeme, t_lemma, right in map(lambda item: item[0],
                                               self.cur_cdfs[parent_id]):
                # place the child directly following/preceding the parent
                succ_tree = cand_tree.clone()
                succ_tree.create_child(node_num, right,
                                       NodeData(t_lemma, formeme))
                res.append(succ_tree)
                # if the parent already has some left/right children, try to place the new node
                # in all possible positions before/after their subtrees (for left/right child,
                # respectively)
                children_idxs = cand_tree.children_idxs(node_num,
                                                        left_only=not right,
                                                        right_only=right)
                for child_idx in children_idxs:
                    succ_tree = cand_tree.clone()
                    subtree_bound = succ_tree.subtree_bound(child_idx, right)
                    succ_tree.create_child(node_num,
                                           subtree_bound + (1 if right else 0),
                                           NodeData(t_lemma, formeme))
                    res.append(succ_tree)

        # if we have the tree classifier available, discard all successors that talk about something
        # not present in the current DA
        if self.classif and res:
            orig_len = len(res)
            is_subset = self.classif.is_subset_of_cur_da(res)
            res = [tree for tree, is_sub in zip(res, is_subset) if is_sub]
            final_len = len(res)
            if orig_len > final_len:
                log_debug('Tree classification reduced successors %d -> %d' %
                          (orig_len, final_len))
        # return all created successors
        return res