Exemplo n.º 1
0
 def solve(self, params=None):
   if not self._model:
     raise Error()
   if params and not isinstance(params, OptimizationParameters):
     raise TypeError()
   if not params:
     params = OptimizationParameters()
   self._optimization_params = params
   logging.info("Optimize model %s with %d rows and %d columns.",
                self._model.get_name(), self._model.get_num_rows(),
                self._model.get_num_columns())
   self._pipeline = pipeline.Pipeline()
   self._executor = executor_manager.get_instance_of(params.executor.executor,
                                                     self._pipeline)
   logging.info("Executor: %s", self._executor.__class__.__name__)
   try:
     self._driver = drivers.get_instance_of(params.driver.driver, self._model, params, self._pipeline)
   except Exception:
     logging.exception("Cannot create driver instance.")
     return
   logging.info("Driver: %s", self._driver.__class__.__name__)
   logging.info("Default backend solver is %d", params.driver.default_backend_solver)
   start_time = timeit.default_timer()
   try:
     self._executor.start()
     solution = self._driver.start()
   except KeyboardInterrupt:
     self._executor.stop()
     self._driver.stop()
     return
   except Exception, e:
     logging.exception("Driver failed.")
     self._executor.stop()
     return
Exemplo n.º 2
0
  def build_from_mps(cls, decoder):
    """Builds a new model from MPS model.

    :param decoder: A :class:`~les.model.formats.mps.Decoder` instance.
    :returns: A :class:`MPModel` instance.
    """
    # TODO: fix this.
    logging.info("Read MPS format model from %s",
                 hasattr(decoder._stream, "name")
                 and getattr(decoder._stream, "name")
                 or type(decoder._stream))
    return (mp_model.MPModel()
            .set_name(decoder.get_name())
            .set_columns(decoder.get_columns_lower_bounds(),
                         decoder.get_columns_upper_bounds(),
                         decoder.get_columns_names())
            .set_objective(decoder.get_objective_coefficients(),
                           decoder.get_objective_name())
            .set_rows(decoder.get_rows_coefficients(),
                      decoder.get_rows_senses(),
                      decoder.get_rows_rhs(),
                      decoder.get_rows_names()))
Exemplo n.º 3
0
 def __init__(self, model, optimization_parameters, pipeline):
   super(LocalEliminationDriver, self).__init__()
   if not model.is_binary():
     raise TypeError("Optimization can be applied only to binary integer "
                     "linear programming problems.")
   self._pipeline = pipeline
   self._optimization_params = optimization_parameters
   self._driver_params = optimization_parameters.driver.local_elimination_driver_parameters
   self._decomposer = decomposers.get_instance_of(self._driver_params.decomposer, model)
   logging.info("Decomposer: %s" % self._decomposer.__class__.__name__)
   self._solution_table = solution_tables.get_instance_of(self._driver_params.solution_table)
   if not self._solution_table:
     logging.info("Cannot create solution table: %d", self._driver_params.solution_table)
     return
   logging.info("Relaxation backend solvers are %s", self._driver_params.relaxation_backend_solvers)
   self._solver_id_stack = list(self._driver_params.relaxation_backend_solvers)
   self._solver_id_stack.append(optimization_parameters.driver.default_backend_solver)
   self._active_contexts = collections.OrderedDict()
   self._frozen_contexts = {}
Exemplo n.º 4
0
  def decompose(self, initial_cols=[0], max_separator_size=0,
                merge_empty_blocks=True):
    '''Decomposes model into submodels starting by initial cols. By default
    starts from column 0. Default max separator size is 11.

    :param initial_cols: A list of integers.
    :param max_separator_size: An integer that represents max available
      separator size.
    :param merge_empty_blocks: ``True`` or ``False``, whether or not we need to
      merge empty blocks.
    '''
    if max_separator_size:
      raise NotImplementedError()
    logging.info('Decompose model %s', self._model.get_name())

    self._used=[]
    self._used2=[]
    self._p=[]

    m = self._model.get_rows_coefficients()

    j_to_i_mapping = {}
    for j in range(m.shape[1]):
      j_to_i_mapping[j] = set()

    # TODO(d2rk): use interaction graph?
    g = networkx.Graph()
    g.add_nodes_from(range(m.shape[1]))
    for i in xrange(m.shape[0]):
      J_ = _get_indices(m, i)
      for j in range(len(J_) - 1):
        j_to_i_mapping[J_[j]].add(i)
        for j_ in range(j + 1, len(J_)):
          g.add_edge(J_[j], J_[j_])
      j_to_i_mapping[J_[-1]].add(i)

    def get_neighbors(nodes):
      neighbors = set()
      for node in nodes:
        neighbors.update(g.neighbors(node))
      neighbors.update(nodes)
      return neighbors
      
    def U(m_):
      u_=set()
      for i in xrange(m.shape[0]):
        ok = True
        K_ = _get_indices(m, i)
        for j in K_:
          ok &= j in m_
        if ok:
          u_.add(i)
      return u_

    self._m = [set(initial_cols) | get_neighbors(set(initial_cols))]
    self._s = [set()]
    self._u = [set()]

    i = len(self._m)
    J = get_neighbors(self._m[i - 1])
    while True:
      M_ = J - self._m[i - 1] - self._s[i - 1]
      if not len(M_):
        break
      T = get_neighbors(M_)
      J_ = T - M_
      self._m.append(M_)
      self._u.append(set())
      self._s.append(J_ & J)
      self._m[i - 1] -= self._s[i]
      J = T
      i += 1
    
    for j in range(i):
      current= self._m[j] | self._s[j]
      if j+1 < i:
        current.update(self._s[j+1])
      self._u[j] = U(current)
    
    tree = DecompositionTree(self._model)
    
    for j in range(m.shape[1]):
      self._p.append(j)
      self._used.append(0)
      self._used2.append(0)
    
    self._layers=[]
    self._layermodel=[]
    for j in range(i):
      self._layers.append([])
      self._layermodel.append([])
    for j in range(i-1,-1,-1):
      current=self._m[j] | self._s[j]
      separator=set() | self._s[j]
      if j+1<i:
        current.update(self._s[j+1])
        separator.update(self._s[j+1])
      for k in current:
        self._used[k]=1
      for k in current - separator:
        T=get_neighbors([k])
        for _k in T:
          if self._used[_k]:
            self.unite_components(k,_k)
      for k in current:
        if not self._used2[k]:
          self._layers[j].append(set())
          for _k in current:
            if self.get_component(k)==self.get_component(_k):
              self._layers[j][-1].add(_k)
              self._used2[_k]=1
          u=U(self._layers[j][-1])
          self._layermodel[j].append(self._model.slice(u, self._layers[j][-1]))
          tree.add_node(self._layermodel[j][-1])
          if j!=i-1:
            for _k in range(len(self._layers[j+1])):
              if len( self._layers[j][-1] & self._layers[j+1][_k] )>0:
                tree.add_edge(self._layermodel[j][-1], self._layermodel[j+1][_k],
                    [self.get_model().get_columns_names()[i] for i in self._layers[j][-1] & self._layers[j+1][_k]])
      for k in separator:
        T=get_neighbors([k])
        for _k in T:
          if self._used[_k]:
            self.unite_components(k,_k)
    tree.set_root(self._layermodel[0][0])
          

    self._decomposition_tree = tree
Exemplo n.º 5
0
  def decompose(self, initial_cols=[0], max_separator_size=0,
                merge_empty_blocks=True):
    '''Decomposes model into submodels starting by initial cols. By default
    starts from column 0. Default max separator size is 11.

    :param initial_cols: A list of integers.
    :param max_separator_size: An integer that represents max available
      separator size.
    :param merge_empty_blocks: ``True`` or ``False``, whether or not we need to
      merge empty blocks.
    '''
    if max_separator_size:
      raise NotImplementedError()
    logging.info('Decompose model %s', self._model.get_name())

    m = self._model.get_rows_coefficients()

    j_to_i_mapping = {}
    for j in range(m.shape[1]):
      j_to_i_mapping[j] = set()

    # TODO(d2rk): use interaction graph?
    g = networkx.Graph()
    g.add_nodes_from(range(m.shape[1]))
    for i in xrange(m.shape[0]):
      J_ = _get_indices(m, i)
      for j in range(len(J_) - 1):
        j_to_i_mapping[J_[j]].add(i)
        for j_ in range(j + 1, len(J_)):
          g.add_edge(J_[j], J_[j_])
      j_to_i_mapping[J_[-1]].add(i)

    def get_neighbors(nodes):
      neighbors = set()
      for node in nodes:
        neighbors.update(g.neighbors(node))
      return neighbors

    self._m = [set(initial_cols) | get_neighbors(set(initial_cols))]
    self._s = [set()]
    self._u = [set()]

    i = len(self._m)
    J = get_neighbors(self._m[i - 1])
    while True:
      M_ = J - self._m[i - 1] - self._s[i - 1]
      if not len(M_):
        break
      T = get_neighbors(M_)
      J_ = T - M_
      self._m.append(M_)
      self._u.append(set())
      self._s.append(J_ & J)
      self._m[i - 1] -= self._s[i]
      for j in self._m[i - 1]:
        self._u[i - 1].update(j_to_i_mapping[j])
      J = T
      i += 1
    for j in self._m[i - 1]:
      self._u[i - 1].update(j_to_i_mapping[j])

    self._build_decomposition_tree()