Ejemplo n.º 1
0
 def generate_general_reduced_rules(self, counter, i, j):
     """generate_general_reduced_rules
     Generate a middle rule, starting at ci and ending at cj
     :param counter: The counter to avoid collision
     :param i: index of the first relation
     :param j: index of the last relation
     """
     rules = []
     rules.append(
         DuplicationRule("C", "A" + str(counter), "D" + str(counter)))
     temp_counter = counter
     temp = stack(["end"] + self.part1[:i], counter + 1, "A" + str(counter),
                  "C")
     counter = temp[1]
     rules = rules + temp[0]
     for p in self.get_part0_combinations(i, j + 1):
         temp = unstack(
             p, self.part0, temp_counter, "D" + str(temp_counter),
             "Cback" + str(temp_counter + self.n_relations() + 1))
         counter = temp[1]
         rules = rules + temp[0]
     counter = max(counter, temp_counter)
     counter += 1
     part1_temp = [p + "_IN" for p in self.part1[j + 1:]]
     temp = stack(part1_temp, counter,
                  "Cback" + str(temp_counter + self.n_relations() + 1), "C")
     counter = temp[1]
     rules = rules + temp[0]
     return (rules, counter)
Ejemplo n.º 2
0
def slidingTopK(h, K, M, mask=None, stride=1):
    """ Performs KNN on each input pixel with a window of MxM.
	ONLY STRIDE==1 WORKS FOR NOW...
	"""
    if stride != 1:
        raise NotImplementedError
    # form index set that follows the reflection padding of input vector
    index = torch.arange(h.shape[-2] * h.shape[-1]).reshape(
        1, 1, h.shape[-2], h.shape[-1]).float()
    index = utils.conv_pad(index, M, mode='reflect')
    hp = utils.conv_pad(h, M, mode='reflect')
    hs = utils.stack(hp, M, stride)  # (B,I,J,C,M,M)
    B, I, J = hs.shape[:3]
    hbs = utils.batch_stack(hs)  # (BIJ, C, M, M)
    ibs = utils.batch_stack(utils.stack(index, M, stride))
    cpx = (M - 1) // 2
    pad = (int(np.floor((stride - 1) / 2)), int(np.ceil((stride - 1) / 2)))
    v = hbs[..., (cpx - pad[0]):(cpx + pad[1] + 1),
            (cpx - pad[0]):(cpx + pad[1] + 1)]
    S = v.shape[-1]
    print(f"forming adjacency matrix...")
    G = graphAdj(v, hbs, mask)  # (BIJ, SS, MM)
    ibs = ibs.reshape(B * I * J, 1, M * M)
    edge = torch.topk(G, K, largest=False).indices
    edge = edge + torch.arange(0, B * I * J, device=h.device).reshape(
        -1, 1, 1) * M * M
    edge = torch.index_select(ibs.reshape(-1, 1), 0, edge.flatten())
    edge = edge.reshape(B * I * J, S * S, K).permute(0, 2,
                                                     1).reshape(-1, K, S, S)
    edge = utils.unbatch_stack(edge, (I, J))
    edge = utils.unstack(edge)
    return edge.long()
Ejemplo n.º 3
0
 def _my_reshape(inp):
     # inp [num_samples, num_randoms, generation_dim, bins, num_classification]
     # inp_reshaped [num_classification, num_samples * num_randoms * generation_dim * bins]
     inp_reshaped = unstack(inp, axis=2)  # generation_dim * [num_samples, num_randoms, bins, num_classification]
     inp_reshaped = np.concatenate(inp_reshaped,
                                   axis=2)  # [num_samples, num_randoms, generation_dim * bins, num_classification]
     inp_reshaped = unstack(inp_reshaped,
                            axis=1)  # num_randoms * [num_samples, generation_dim * bins, num_classification]
     inp_reshaped = np.concatenate(inp_reshaped,
                                   axis=1)  # [num_samples, num_randoms * generation_dim * bins, num_classification]
     inp_reshaped = unstack(inp_reshaped,
                            axis=0)  # num_samples * [num_randoms * generation_dim * bins, num_classification]
     inp_reshaped = np.concatenate(inp_reshaped,
                                   axis=0)  # [num_samples, num_randoms * generation_dim * bins, num_classification]
     inp_reshaped = np.transpose(inp_reshaped,
                                 (1, 0))  # [num_classification, num_samples, num_randoms * generation_dim * bins]
     return inp_reshaped
Ejemplo n.º 4
0
 def consume_tree(self, first: str, last: str,
                  counter: int) -> Tuple[List[ReducedRule], int]:
     """consume_tree
     In the creation of rules in the grammar, consumes symbols
     :param first: The first symbol in the grammar to use
     :param last: The last symbol in the grammar to use
     :param counter: A counter to prevent duplication of non-terminals
     :type first: str
     :type last: str
     :type counter: int
     :return: A couple which contains first the rules generated and then\
                 the next counter to use
     :rtype: A couple of a list of Reduced rules and an int
     """
     rules = []
     if not self.head.is_str():
         # It means we have a function...
         return utils.unstack(self.head.get_function().part0, [], counter,
                              first, last)
     elif self.head.get_str() == ".":
         # When concatenation
         temp_nt = [first]
         # Creates intermediate non-terminals
         for _ in range(len(self.sons)):
             temp_nt.append("C" + str(counter))
             counter += 1
         # Consume each son separately and join them with the intermediate
         # non-terminals
         for i in range(len(self.sons)):
             temp = self.sons[i].consume_tree(temp_nt[i], temp_nt[i + 1],
                                              counter)
             rules += temp[0]
             counter = temp[1]
         # link last intermediate to the last non-symbol
         rules.append(DuplicationRule(temp_nt[-1], "T", last))
         return (rules, counter)
     elif self.head.get_str() == "|":
         # Or node
         # Each son is consumed separately and they all have the same first
         # and last non-terminals
         for son in self.sons:
             temp = son.consume_tree(first, last, counter)
             rules += temp[0]
             counter = temp[1]
         return (rules, counter)
     elif self.head.get_str() == "*":
         # Kleene star
         # We make the first symbol go directly to the last one, to simulate
         # the empty case
         rules.append(DuplicationRule(first, "T", last))
         # Normally just one
         for son in self.sons:
             # We should end where we begin
             temp = son.consume_tree(last, last, counter)
             rules += temp[0]
             counter = temp[1]
         return (rules, counter)
     return (rules, counter)
Ejemplo n.º 5
0
 def generate_palindrome_rules(self, counter, susie=False):
     rules = []
     temp = stack(self.part1, counter, "Cforward", "Cforward")
     counter = temp[1]
     rules = rules + temp[0]
     last = "Cbackward"
     if susie:
         last = "Cend"
     temp = unstack(self.part0, self.part0, counter, "Cbackward", last)
     counter = temp[1]
     rules = rules + temp[0]
     return (rules, counter)
Ejemplo n.º 6
0
 def generate_palindrome_rules(self, counter, susie):
     rules = []
     if self.n_inputs < 2:
         temp = stack(self.part1, counter, "Cforward", "Cforward")
         counter = temp[1]
         rules = rules + temp[0]
     else:
         c_temp = "C_inter" + str(counter)
         counter += 1
         temp = unstack(self.part1[self.n_inputs - 2::-1], self.part1,
                        counter, "Cforward", c_temp)
         counter = temp[1]
         rules = rules + temp[0]
         temp = stack(self.part1, counter, c_temp, "Cforward")
         counter = temp[1]
         rules = rules + temp[0]
     last = "Cbackward"
     if susie:
         last = "Cend"
     temp = unstack(self.part0, self.part0, counter, "Cbackward", last)
     counter = temp[1]
     rules = rules + temp[0]
     return (rules, counter)
Ejemplo n.º 7
0
 def generate_left_reduced_rules(self, counter):
     """generate_left_reduced_rules
     Generates the reduced left rules as describe in the paper.
     :param counter: counter used to be sure we do not duplicate
     non-terminals. So, it MUST be update after the function.
     :return A couple (rules, counter) containing the generated rules and the
     new counter value
     """
     rules = []
     for i in range(1, self.n_relations() + 1):
         temp = unstack(self.part0[0:i], self.part0, counter, "C",
                        "Cback" + str(counter + self.n_relations() + 1))
         counter = temp[1]
         rules = rules + temp[0]
         temp = stack(self.part1[i:], counter, "Cback" + str(counter), "C")
         counter = temp[1]
         rules = rules + temp[0]
     return (rules, counter)
Ejemplo n.º 8
0
 def generate_right_reduced_rules(self, counter):
     """generate_right_reduced_rules
     Generates the reduced right rules as describe in the paper.
     :param counter: counter used to be sure we do not duplicate
     non-terminals. So, it MUST be update after the function.
     :return A couple (rules, counter) containing the generated rules and the
     new counter value
     """
     rules = []
     for i in range(1, self.n_relations()):
         rules.append(
             DuplicationRule("C", "A" + str(counter), "D" + str(counter)))
         temp_counter = counter
         temp = stack(["end"] + self.part1[:i], counter, "A" + str(counter),
                      "C")
         counter = temp[1]
         rules = rules + temp[0]
         temp = unstack(self.part0[i:self.n_relations()], self.part0,
                        temp_counter, "D" + str(temp_counter), "C")
         counter = temp[1]
         rules = rules + temp[0]
         counter = max(counter, temp_counter)
         counter += 1
     return (rules, counter)