def transformer(self, nn, man, element,nlb, nub, relu_groups, refine, timeout_lp, timeout_milp, use_default_heuristic, testing):
        """
        transforms element with ffn_matmult_zono
        
        Arguments
        ---------
        man : ElinaManagerPtr
            man to which element belongs
        element : ElinaAbstract0Ptr
            abstract element onto which the transformer gets applied
        
        Return
        ------
        output : ElinaAbstract0Ptr
            abstract element after the transformer
        """
        offset, old_length = self.abstract_information
        man, destructive, element, start_offset, weights, num_vars, expr_offset, expr_size = self.get_arguments(man, element)
        element = ffn_matmult_zono(man, destructive, element, start_offset, weights, self.bias, num_vars, expr_offset, expr_size)
        #if self.refine == 'True':
        #    refine_after_affine(self, man, element, nlb, nub)
        lbi, ubi = add_bounds(man, element, nlb, nub, self.output_length, offset+old_length)
        # print("num candidates here ", num_candidates)
        if config.use_2relu or config.use_3relu or config.dyn_krelu:
            encode_krelu_cons(nn, man, element, start_offset, nn.ffn_counter + nn.conv_counter, num_vars, lbi, ubi, relu_groups, False, 'refinezono')
        else:
            relu_groups.append([])

        nlb.append(lbi)
        nub.append(ubi) 

        nn.last_layer = 'Affine'
        if testing:
            return remove_dimensions(man, element, offset, old_length), nlb[-1], nub[-1]
        return remove_dimensions(man, element, offset, old_length)
Example #2
0
def calc_bounds(man,
                element,
                nn,
                nlb,
                nub,
                relu_groups,
                is_refine_layer=False,
                destroy=True,
                use_krelu=False):
    layerno = nn.calc_layerno()
    bounds = box_for_layer(man, element, layerno)
    num_neurons = get_num_neurons_in_layer(man, element, layerno)
    itv = [bounds[i] for i in range(num_neurons)]
    lbi = [x.contents.inf.contents.val.dbl for x in itv]
    ubi = [x.contents.sup.contents.val.dbl for x in itv]
    if is_refine_layer:
        nlb.append(lbi)
        nub.append(ubi)
    if use_krelu:
        encode_krelu_cons(nn, man, element, 0, layerno, num_neurons, lbi, ubi,
                          relu_groups, False, 'refinepoly')
    if destroy:
        elina_interval_array_free(bounds, num_neurons)
        return lbi, ubi
    return layerno, bounds, num_neurons, lbi, ubi
Example #3
0
    def transformer(self, nn, man, element,nlb, nub, relu_groups, refine, timeout_lp, timeout_milp):
        """
        transforms element with ffn_matmult_zono
        
        Arguments
        ---------
        man : ElinaManagerPtr
            man to which element belongs
        element : ElinaAbstract0Ptr
            abstract element onto which the transformer gets applied
        
        Return
        ------
        output : ElinaAbstract0Ptr
            abstract element after the transformer
        """
        offset, old_length = self.abstract_information
        man, destructive, element, start_offset, weights, num_vars, expr_offset, expr_size = self.get_arguments(man, element)
        element = ffn_matmult_zono(man, destructive, element, start_offset, weights, self.bias, num_vars, expr_offset, expr_size)
        #if self.refine == 'True':
        #    refine_after_affine(self, man, element, nlb, nub)
        dimension = elina_abstract0_dimension(man,element)
        var_in_element = dimension.intdim + dimension.realdim
        bounds = elina_abstract0_to_box(man,element)
        lbi = []
        ubi = []
        for i in range(num_vars):
            inf = bounds[i+start_offset].contents.inf
            sup = bounds[i+start_offset].contents.sup
            lbi.append(inf.contents.val.dbl)
            ubi.append(sup.contents.val.dbl)

        candidate_vars = []
        widths = []
        for i in range(num_vars):
            if (lbi[i] < 0 and ubi[i] > 0):
                candidate_vars.append(i)
                widths.append(ubi[i] - lbi[i])
        widths = np.asarray(widths)
        num_candidates = len(candidate_vars)
        # print("num candidates here ", num_candidates)
        sorted_width_indices = np.argsort(widths)
        encode_krelu_cons(nn, man, element, start_offset, nn.ffn_counter + nn.conv_counter, num_vars, lbi, ubi, relu_groups, False, 'refinezono')

        nlb.append(lbi)
        nub.append(ubi) 

        elina_interval_array_free(bounds,var_in_element)
        nn.last_layer = 'Affine'
        return remove_dimensions(man, element, offset, old_length)
Example #4
0
    def transformer(self, nn, man, element, nlb, nub, relu_groups, refine, timeout_lp, timeout_milp, use_area_heuristic, testing):
        """
        transformer for any intermediate fully connected layer with relu
        
        Arguments
        ---------
        man : ElinaManagerPtr
            man to which element belongs
        element : ElinaAbstract0Ptr
            abstract element onto which the transformer gets applied
        
        Return
        ------
        output : ElinaAbstract0Ptr
            abstract element after the transformer 
        """
        ffn_handle_intermediate_relu_layer(man, element, *self.get_arguments(), use_area_heuristic)
        layerno, bounds, num_neurons, lbi, ubi = calc_bounds(man, element, nn, nlb, nub, relu_groups, is_refine_layer=True, destroy = False)
        candidate_vars = [i for i, (l, u) in enumerate(zip(lbi, ubi)) if l<0 and u>0]
        #print("lbi ", timeout_milp, "ubi ", timeout_lp)
        if refine:
            if layerno <= 1:
                use_milp = config.use_milp
            else:
                use_milp = 0

            if use_milp:
                timeout = timeout_milp
            else:
                timeout = timeout_lp

            if nn.is_ffn():
                resl, resu, indices = get_bounds_for_layer_with_milp(nn, nn.specLB, nn.specUB, layerno, layerno, num_neurons, nlb, nub, relu_groups, use_milp,  candidate_vars, timeout)

                for j in indices:
                    update_bounds_for_neuron(man,element,layerno,j,resl[j],resu[j])

                nlb[-1] = resl
                nub[-1] = resu

            encode_krelu_cons(nn, man, element, 0, layerno, num_neurons, lbi, ubi, relu_groups, False, 'refinepoly')



        elina_interval_array_free(bounds,num_neurons)
        nn.ffn_counter+=1
        if testing:
            return element, nlb[-1], nub[-1]
        return element
Example #5
0
    def transformer(self, nn, man, element, nlb, nub, relu_groups, refine, timeout_lp, timeout_milp, use_area_heuristic, testing):
        """
        transformer for a convolutional layer, if that layer is an intermediate of the network
        
        Arguments
        ---------
        man : ElinaManagerPtr
            man to which element belongs
        element : ElinaAbstract0Ptr
            abstract element onto which the transformer gets applied
        
        Return
        ------
        output : ElinaAbstract0Ptr
            abstract element after the transformer 
        """
        if(self.has_relu):
            conv_handle_intermediate_relu_layer(man, element, *self.get_arguments(), use_area_heuristic)
        else:
            conv_handle_intermediate_affine_layer(man, element, *self.get_arguments(), use_area_heuristic)
        layerno, bounds, num_neurons, lbi, ubi = calc_bounds(man, element, nn, nlb, nub, relu_groups, is_refine_layer=True, destroy=False)
        candidate_vars = [i for i, (l, u) in enumerate(zip(lbi, ubi)) if l<0 and u>0]

        if(refine):
            use_milp = config.use_milp
            if use_milp:
                timeout = timeout_milp
            else:
                timeout = timeout_lp
            #numconvslayers = sum('Conv2D' in l for l in nn.layertypes)
            #if numconvslayers-nn.conv_counter <= 1:
            if nn.is_ffn():

                resl, resu, indices = get_bounds_for_layer_with_milp(nn, nn.specLB, nn.specUB, num_neurons, nlb, nub, relu_groups, use_milp,  candidate_vars, timeout)

                nlb[-1] = resl
                nub[-1] = resu

                for j in indices:
                    update_bounds_for_neuron(man,element,layerno,j,resl[j],resu[j])

            encode_krelu_cons(nn, man, element, 0, layerno, num_neurons, lbi, ubi, relu_groups, False, 'refinepoly')

        elina_interval_array_free(bounds,num_neurons)
        nn.conv_counter+=1
        if testing:
            return element, nlb[-1], nub[-1]
        return element
Example #6
0
def refine_relu_with_solver_bounds(nn, self, man, element, nlb, nub,
                                   relu_groups, timeout_lp, timeout_milp,
                                   use_default_heuristic, domain):
    """
    refines the relu transformer

    Arguments
    ---------
    self : Object
        will be a DeepzonoNode or DeeppolyNode
    man : ElinaManagerPtr
        manager which is responsible for element
    element : ElinaAbstract0Ptr
        the element in which the results after affine transformation are stored
    nlb: list of list of doubles
        contains the lower bounds for all neurons upto layer layerno
    nub: list of list of doubles
        contains the upper bounds for all neurons upto layer layerno
    use_milp: bool array
        whether to use milp or lp for refinement
    Return
    ------
     the updated abstract element
    """
    layerno = nn.calc_layerno()
    predecessor_index = nn.predecessors[layerno + 1][0] - 1
    if domain == 'deepzono':
        offset, length = self.abstract_information
    else:
        offset = 0
        length = get_num_neurons_in_layer(man, element, predecessor_index)
    lbi = nlb[predecessor_index]
    ubi = nub[predecessor_index]
    first_FC = -1
    timeout = timeout_milp
    for i in range(nn.numlayer):
        if nn.layertypes[i] == 'FC':
            first_FC = i
            break

    if nn.activation_counter == 0:
        if domain == 'deepzono':
            encode_krelu_cons(nn, man, element, offset, predecessor_index,
                              length, lbi, ubi, relu_groups, False,
                              'refinezono')
            element = relu_zono_layerwise(man, True, element, offset, length,
                                          use_default_heuristic)
            return element
        else:
            lower_bound_expr, upper_bound_expr = encode_krelu_cons(
                nn, man, element, offset, predecessor_index, length, lbi, ubi,
                relu_groups, False, 'refinepoly')
            handle_relu_layer(*self.get_arguments(man, element),
                              use_default_heuristic)
            #if config.refine_neurons == True:
            update_relu_expr_bounds(man, element, layerno, lower_bound_expr,
                                    upper_bound_expr, lbi, ubi)

    else:

        if predecessor_index == first_FC:
            use_milp = 1
        else:
            use_milp = 0
            timeout = timeout_lp
        use_milp = use_milp and config.use_milp
        candidate_vars = []
        for i in range(length):
            if ((lbi[i] < 0 and ubi[i] > 0) or (lbi[i] > 0)):
                candidate_vars.append(i)
        #TODO handle residual layers here
        if config.refine_neurons == True:
            resl, resu, indices = get_bounds_for_layer_with_milp(
                nn, nn.specLB, nn.specUB, predecessor_index, predecessor_index,
                length, nlb, nub, relu_groups, use_milp, candidate_vars,
                timeout)
            nlb[predecessor_index] = resl
            nub[predecessor_index] = resu

        lbi = nlb[predecessor_index]
        ubi = nub[predecessor_index]

        if domain == 'deepzono':
            encode_krelu_cons(nn, man, element, offset, predecessor_index,
                              length, lbi, ubi, relu_groups, False,
                              'refinezono')
            if config.refine_neurons == True:
                j = 0
                for i in range(length):
                    if ((j < len(indices)) and (i == indices[j])):

                        element = relu_zono_refined(man, True, element,
                                                    i + offset, resl[i],
                                                    resu[i])
                        j = j + 1
                    else:
                        element = relu_zono(man, True, element, i + offset)
                return element

            else:

                element = relu_zono_layerwise(man, True, element, offset,
                                              length, use_default_heuristic)
                return element
        else:
            if config.refine_neurons == True:
                for j in indices:
                    update_bounds_for_neuron(man, element, predecessor_index,
                                             j, resl[j], resu[j])
            lower_bound_expr, upper_bound_expr = encode_krelu_cons(
                nn, man, element, offset, predecessor_index, length, lbi, ubi,
                relu_groups, False, 'refinepoly')
            handle_relu_layer(*self.get_arguments(man, element),
                              use_default_heuristic)
            update_relu_expr_bounds(man, element, layerno, lower_bound_expr,
                                    upper_bound_expr, lbi, ubi)
def refine_relu_with_solver_bounds(nn, self, man, element, nlb, nub, relu_groups, timeout_lp, timeout_milp, use_default_heuristic):
    """
    refines the relu transformer

    Arguments
    ---------
    self : Object
        will be a DeepzonoNode, but could be any object
    man : ElinaManagerPtr
        manager which is responsible for element
    element : ElinaAbstract0Ptr
        the element in which the results after affine transformation are stored
    nlb: list of list of doubles
        contains the lower bounds for all neurons upto layer layerno
    nub: list of list of doubles
        contains the upper bounds for all neurons upto layer layerno
    use_milp: bool array
        whether to use milp or lp for refinement
    Return
    ------
     the updated abstract element
    """
    
    offset, length = self.abstract_information
    layerno = nn.calc_layerno()
    lbi = nlb[-1]
    ubi = nub[-1]
    if layerno==0 or nn.last_layer=='Conv2D':
        element = relu_zono_layerwise(man,True,element,offset, length, use_default_heuristic)
    else:
        is_conv = False
        timeout = timeout_milp
        for i in range(nn.numlayer):
            if nn.layertypes[i] == 'Conv2D':
                is_conv = True
                break

        if is_conv==True:
            use_milp = 1
        else:
            if layerno<=3:
               use_milp = 1
            else:
               use_milp = 0
               timeout = timeout_lp
        use_milp = use_milp and config.use_milp
        candidate_vars = []
        for i in range(length):
            if((lbi[i]<0 and ubi[i]>0) or (lbi[i]>0)):
                 candidate_vars.append(i)
        #TODO handle residual layers here
        resl, resu, indices = get_bounds_for_layer_with_milp(nn, nn.specLB, nn.specUB, layerno, layerno, length, nlb, nub, relu_groups, use_milp,  candidate_vars, timeout)
        nlb[-1] = resl
        nub[-1] = resu

        lbi = nlb[layerno]
        ubi = nub[layerno]
        if config.use_2relu or config.use_3relu or config.dyn_krelu:
            encode_krelu_cons(nn, man, element, offset, layerno, length, lbi, ubi, relu_groups, True, 'refinezono')
        j = 0
     
        for i in range(length):
            if((j < len(indices)) and (i==indices[j])):
             
                element = relu_zono_refined(man,True, element,i+offset, resl[i],resu[i])
                j=j+1
            else:
                element = relu_zono(man,True,element,i+offset)
      
     
    return element