def encode(bkd, net, mdl, net_in, net_out, name, verbose=0): """ Encodes the network in the optimization model. Codifies each neuron as a variable in the combinatorial problem, while each edge is considered as a constraint on the the neurons connected. Parameters ---------- bkd : :obj:`eml.backend.cplex_backend.CplexBackend` Backend Cplex net : :obj:`eml.net.describe.DNRNet` Network to embed mdl : :obj:`docplex.mp.model.Model` Model CPLEX net_in : list(:obj:`docplex.mp.linear.Var`) Input continuous varibles net_out : :obj:`docplex.mp.linear.Var` Output continuous varibles name : string Name of the network verbose : int If higher than 0 notifies every neuron embeded Returns ------- Descriptor : :obj:`eml.util.ModelDesc` Descriptor of the neural network """ # Scalar to vector output try: len(net_out) except: net_out = [net_out] # Build a model descriptor desc = util.ModelDesc(net, mdl, name) # Process the network layer by layer for k, layer in enumerate(net.layers()): # Add the layer to the solver wrapper for i, neuron in enumerate(layer.neurons()): # Add the neuron to the describe if verbose >= 1: print('Adding neuron %s' % str(neuron.idx())) if k == 0: x = net_in[i] elif k == net.nlayers() - 1: x = net_out[i] else: x = None _add_neuron(bkd, desc, neuron, x=x) # Enforce basic input bounds in_layer = net.layer(0) neurons = list(in_layer.neurons()) for i, var in enumerate(net_in): neurons[i].update_lb(var.lb) neurons[i].update_ub(var.ub) process.ibr_bounds(net) # Return the network descriptor return desc
def fwd_bound_tighthening(bkd, net=None, desc=None, timelimit=None, skip_layers=None, verbose=0): # Check args if (net is None and desc is None) or (net is not None and desc is not None): raise ValueError( 'Either a network or a network model descriptor should be passed ') # If no model descriptor is passed, one is built internally if net is not None: mdl = bkd.new_model() desc = util.ModelDesc(net, mdl, name='_tmp') build_neurons = True else: net = desc.ml_model() build_neurons = False # Process the network layer by layer ttime, nleft = 0, net.size() for k, layer in enumerate(net.layers()): # Add the layer to the solver wrapper for neuron in layer.neurons(): # Add the neuron to the describe if build_neurons: if verbose >= 1: print('Adding neuron %s' % str(neuron.idx())) embed._add_neuron(bkd, desc, neuron) # Do not compute the bounds for skipped layers if skip_layers is not None and layer.idx() in skip_layers: continue # Obtain a time limit for computing bounds if timelimit is not None: tlim = (timelimit - ttime) / nleft else: tlim = None # Compute bounds if verbose >= 1: print('Computing bounds for %s' % str(neuron.idx())) ltime, bchg = _neuron_bounds(bkd, desc, neuron, timelimit=tlim, verbose=verbose) ttime += ltime nleft -= 1 # Return total time return ttime
def encode_backward_implications(bkd, tree, mdl, tree_in, tree_out, name): """ Encode the decision tree in the backend Given a input and a output the tree is embeded into the optimization problem. Parameters ---------- bkd : :obj:`eml.backend.cplex_backend.CplexBackend` Cplex backend tree : :obj:`eml.tree.describe.DTNode`` Decision tree mdl : :obj:`docplex.mp.model.Model` Cplex model tree_in : list(:obj:`docplex.mp.linear.Var`) Input continuous variable tree_out : :obj:`docplex.mp.linear.Var` Output continuous variable name : string Name fo the tree Returns ------- Model Desciptor : :obj:`eml.util.ModelDesc` Descriptor of the instance of EML Raises ------ ValueError If the threshold is in the 'right' branch or the tree has an output vector """ # Build a model descriptor desc = util.ModelDesc(tree, mdl, name) # obtain the decision tree in rule format rules = _extract_rules(tree) nrules = len(rules) # ------------------------------------------------------------------------ # Introduce a binary variable for each rule Z = [] for k in range(nrules): if desc.has('path', k): zvar = desc.get('path', k) else: zvar = bkd.var_bin(mdl, '%s_p[%d]' % (name, k)) desc.store('path', k, zvar) Z.append(zvar) # Only one rule can be active at a time bkd.cst_eq(mdl, bkd.xpr_sum(mdl, Z), 1) # ------------------------------------------------------------------------ # Class assignment coefs = [r[-1] for r in rules] bkd.cst_eq(mdl, tree_out, bkd.xpr_scalprod(mdl, coefs, Z)) # ------------------------------------------------------------------------ # Collapse conditions on the same attribute for each rule crules = [] for k, r in enumerate(rules): res = {} for aname, atype, (th1, th2) in r[:-1]: if aname not in res: res[aname] = (th1, th2) else: oth1, oth2 = res[aname] res[aname] = (max(oth1, th1), min(oth2, th2)) crules.append(res) # ------------------------------------------------------------------------ # Process all conditions in all rules built = set() for k, r in enumerate(rules): for aname, atype, (th1, th2) in r[:-1]: # If the constraint has already been built, then do nothing if (aname, th1, th2) in built: continue # Identify all rules that are based on this condition # Should work with implied rules, too # impl = [k for k, cr in enumerate(crules) if # aname in cr and # cr[aname][0] <= th1 and th2 < cr[aname][1]] based = [k for k, cr in enumerate(crules) if aname in cr and th1 <= cr[aname][0] and cr[aname][1] < th2] if th2 != float('inf'): M = tree.ub(aname) th = th2 coefs = [1] + [M - th] * len(based) terms = [tree_in[aname]] + [Z[k] for k in based] cst = bkd.cst_leq(mdl, bkd.xpr_scalprod(mdl, coefs, terms), M) if th1 != -float('inf'): m = tree.lb(aname) th = th1 + bkd.const_eps(mdl) coefs = [1] + [m - th] * len(based) terms = [tree_in[aname]] + [Z[k] for k in based] cst = bkd.cst_geq(mdl, bkd.xpr_scalprod(mdl, coefs, terms), m) # Return the descriptor return desc
def fwd_bound_tighthening(bkd, net=None, desc=None, timelimit=None, skip_layers=None, verbose=0): """ Forward bound tightening via Mixed Integer Linear Programming Parameters ---------- bkd : :obj:`eml.backend.cplex_backend.CplexBackend` Cplex backend net : obj:`eml.net.describe.DNRNet` Neural network of interest (default None) desc : :obj:`eml.util.ModelDesc` Model descriptor (default None) timelimit : int Time limit for the process (default None) skip_layer : int Skips bound tightening for the specified layer (default None) verbose : int if higher than 0 prints more info on the process (default 0) Returns ------- Total time : int Time used to perform bound tightening by the optimizer Raises ------ ValueError Neither a model descriptor or a network where given in input """ # Check args if (net is None and desc is None) or (net is not None and desc is not None): raise ValueError( 'Either a network or a network model descriptor should be passed ') # If no model descriptor is passed, one is built internally if net is not None: mdl = bkd.new_model() desc = util.ModelDesc(net, mdl, name='_tmp') build_neurons = True else: net = desc.ml_model() build_neurons = False # Process the network layer by layer ttime, nleft = 0, net.size() for k, layer in enumerate(net.layers()): # Add the layer to the solver wrapper for neuron in layer.neurons(): # Add the neuron to the describe if build_neurons: if verbose >= 1: print('Adding neuron %s' % str(neuron.idx())) embed._add_neuron(bkd, desc, neuron) # Do not compute the bounds for skipped layers if skip_layers is not None and layer.idx() in skip_layers: continue # Obtain a time limit for computing bounds if timelimit is not None: tlim = (timelimit - ttime) / nleft else: tlim = None # Compute bounds if verbose >= 1: print('Computing bounds for %s' % str(neuron.idx())) ltime, bchg = _neuron_bounds(bkd, desc, neuron, timelimit=tlim, verbose=verbose) ttime += ltime nleft -= 1 # Return total time return ttime
def encode_backward_implications(bkd, tree, mdl, tree_in, tree_out, name, verbose=0): # Build a model descriptor desc = util.ModelDesc(tree, mdl, name) sn = name # shortcut to the model name # obtain the decision tree in rule format rules = _extract_rules(tree) nrules = len(rules) # Quick argument check if not tree.thr_left: raise ValueError( 'Trees where the threshold goes in the right branch are not yet supported' ) try: if len(tree_out) > 1: raise ValueError('Trees with vector output are not yet supported') tree_out = tree_out[0] except: pass # ------------------------------------------------------------------------ # Introduce a binary variable for each rule Z = [] for k in range(nrules): if desc.has('path', k): zvar = desc.get('path', k) else: zvar = bkd.var_bin(mdl, '%s_p[%d]' % (sn, k)) desc.store('path', k, zvar) Z.append(zvar) # Only one rule can be active at a time bkd.cst_eq(mdl, bkd.xpr_sum(mdl, Z), 1) # ------------------------------------------------------------------------ # Class assignment coefs = [r[-1] for r in rules] bkd.cst_eq(mdl, tree_out, bkd.xpr_scalprod(mdl, coefs, Z)) # ------------------------------------------------------------------------ # Collapse conditions on the same attribute for each rule crules = [] for k, r in enumerate(rules): res = {} for aname, atype, (th1, th2) in r[:-1]: if aname not in res: res[aname] = (th1, th2) else: oth1, oth2 = res[aname] res[aname] = (max(oth1, th1), min(oth2, th2)) crules.append(res) # ------------------------------------------------------------------------ # Process all conditions in all rules # for r in rules: # print(r) built = set() for k, r in enumerate(rules): for aname, atype, (th1, th2) in r[:-1]: # If the constraint has already been built, then do nothing if (aname, th1, th2) in built: continue # Identify all rules that are based on this condition # TODO this should work with implied rules, too # impl = [k for k, cr in enumerate(crules) if # aname in cr and # cr[aname][0] <= th1 and th2 <= cr[aname][1]] based = [ k for k, cr in enumerate(crules) if aname in cr and th1 <= cr[aname][0] and cr[aname][1] <= th2 ] # based = [k for k, cr in enumerate(rules) if # aname in cr and # cr[aname][0] == th1 and th2 == cr[aname][1]] # print aname, th1, th2 # print [crules[k][aname] for k in based] # Post a constraint # print('-' * 30) # print(aname, atype, (th1, th2)) # print(based) if th2 != float('inf'): M = tree.ub(aname) th = th2 coefs = [1] + [M - th] * len(based) terms = [tree_in[aname]] + [Z[k] for k in based] cst = bkd.cst_leq(mdl, bkd.xpr_scalprod(mdl, coefs, terms), M) # print(cst) if th1 != -float('inf'): m = tree.lb(aname) th = th1 + bkd.const_eps(mdl) coefs = [1] + [m - th] * len(based) terms = [tree_in[aname]] + [Z[k] for k in based] cst = bkd.cst_geq(mdl, bkd.xpr_scalprod(mdl, coefs, terms), m) # print(cst) # raise RuntimeError('BABOON!') # Return the descriptor return desc