Esempio n. 1
0
    def resort_working_array(self, chosen_values_arr, num):
        for item in self.__working_arr[num]:
            data_node = self.__pairs.get_node_info(item)

            new_combs = []
            for i in range(0, self.__n):
                # numbers of new combinations to be created if this item is
                # appended to array
                new_combs.append(
                    set([
                        pairs_storage.key(z) for z in xuniqueCombinations(
                            chosen_values_arr + [item], i + 1)
                    ]) - self.__pairs.get_combs()[i])
            # weighting the node
            # node that creates most of new pairs is the best
            item.weights = [-len(new_combs[-1])]
            # less used outbound connections most likely to produce more new
            # pairs while search continues
            item.weights += [len(data_node.out)]
            item.weights += [len(x) for x in reversed(new_combs[:-1])]
            item.weights += [-data_node.counter]  # less used node is better
            # otherwise we will prefer node with most of free inbound
            # connections; somehow it works out better ;)
            item.weights += [-len(data_node.in_)]

        self.__working_arr[num].sort(lambda a, b: cmp(a.weights, b.weights))
Esempio n. 2
0
 def resort_working_array( self, chosen_values_arr, num ):
     for item in self.__working_arr[num]:
         data_node = self.__pairs.get_node_info( item )
         
         new_combs = []
         for i in range(0, self.__n):
             # numbers of new combinations to be created if this item is appended to array
             new_combs.append( set([pairs_storage.key(z) for z in xuniqueCombinations( chosen_values_arr+[item], i+1)]) - self.__pairs.get_combs()[i] )
         # weighting the node
         item.weights =  [ -len(new_combs[-1]) ]    # node that creates most of new pairs is the best
         item.weights += [ len(data_node.out) ] # less used outbound connections most likely to produce more new pairs while search continues
         item.weights += [ len(x) for x in reversed(new_combs[:-1])]
         item.weights += [ -data_node.counter ]  # less used node is better
         item.weights += [ -len(data_node.in_) ] # otherwise we will prefer node with most of free inbound connections; somehow it works out better ;)
         
     self.__working_arr[num].sort( lambda a,b: cmp(a.weights, b.weights) )
def get_max_comb_number(arr, n):
    items = [len(x) for x in arr]
    # print items
    f = lambda x, y: x * y
    total = sum([reduce(f, z) for z in xuniqueCombinations(items, n)])
    return total
Esempio n. 4
0
def get_max_comb_number( arr, n ):
    items = [len(x) for x in arr]
    #print items
    f = lambda x,y:x*y
    total = sum([ reduce(f, z) for z in xuniqueCombinations( items, n) ])
    return total
Esempio n. 5
0
def get_max_comb_number_from_dict( dict, n ):
    items = [len(x) for x in dict.values()]
    print items
    f = lambda x,y:x*y
    total = sum([ reduce(f, z) for z in xuniqueCombinations( items, n) ])
    return total
 def count_new_combs(self, seq):
     s = set([key(z) for z in xuniqueCombinations(seq, self.__n)]) - self.__combs_arr[-1]
     return len(s)
 def add_sequence(self, seq):
     for i in range(1, self.__n + 1):
         for comb in xuniqueCombinations(seq, i):
             self.add(comb)
Esempio n. 8
0
 def count_new_combs(self, seq):
     s = set([key(z) for z in xuniqueCombinations(seq, self.__n)
              ]) - self.__combs_arr[-1]
     return len(s)
Esempio n. 9
0
 def add_sequence(self, seq):
     for i in range(1, self.__n + 1):
         for comb in xuniqueCombinations(seq, i):
             self.add(comb)