def from_coordinates(cls, x1, y1, x2, y2): """Create segment from its end points. x1: int or float Coordinate x of the start of segment. y1: int or float Coordinate y of the end of segment. x2: int or float Coordinate x of the start of segment. y2: int or float Coordinate y of the end of segment. """ length = segment_length(x1, y1, x2, y2) angle = segment_angle(x1, y1, x2, y2) return cls(start=(x1, y1), angle=angle, length=length)
def set_param(self, param_name, value): """Set parameter of figure. Parameters ---------- param_name: str Name of parameter to set value. Must be in ['x1', 'y1', 'x2', 'y2', 'length', 'angle']. value: int or float Value too set. Returns ------- self Raises ------ IncorrectParamValue: if param is incorrect. """ if param_name == 'x1': self._base = (float(value), self._base[1]) elif param_name == 'y1': self._base = (self._base[0], float(value)) elif param_name == 'length': self._length = float(value) elif param_name == 'angle': self._angle = float(value) elif param_name in ('x2', 'y2'): base_repr = list(self.get_base_representation()) if param_name == 'x2': base_repr[2] = value else: base_repr[3] = value # y2 length = segment_length(*base_repr) angle = segment_angle(*base_repr) self._length, self._angle = length, angle else: raise IncorrectParamValue( f'Incorrect name of parameter: {param_name}.') return self
def check(self, x, y): """Check if given coordinates places in zone of binding. Parameters ---------- x, y: int or float Coordinates of cursor. Returns ------- checking_result: float or None None if cursor is out of binding zone. Distance between cursor and point of binding. """ base_x, base_y = self._coordinates() distance = segment_length(x, y, base_x, base_y) if distance > self._radius: return None else: return distance
def forward(self, pattern, pattern_len, graph, graph_len): # data, target, *mems # nn.DataParallel does not allow size(0) tensors to be broadcasted. # So, have to initialize size(0) mems inside the model forward. # Moreover, have to return new_mems to allow nn.DataParallel to piece # them together. bsz = pattern_len.size(0) gate = self.get_filter_gate(pattern, pattern_len, graph, graph_len) zero_mask = (gate == 0).unsqueeze(-1) if gate is not None else None pattern_emb, graph_emb = self.get_emb(pattern, pattern_len, graph, graph_len) if zero_mask is not None: graph_emb.masked_fill_(zero_mask, 0.0) pattern_emb = self.p_emb_proj(pattern_emb).mul_(self.emb_scale) graph_emb = self.g_emb_proj(graph_emb).mul_(self.emb_scale) pattern_segments = segment_data(pattern_emb, self.tgt_len) pattern_seg_lens = segment_length(pattern_len, self.tgt_len) graph_segments = segment_data(graph_emb, self.tgt_len) graph_seg_lens = segment_length(graph_len, self.tgt_len) pattern_outputs = list() for i, (pattern_seg, pattern_seg_len) in enumerate( zip(pattern_segments, pattern_seg_lens)): if i == 0: pattern_mems = self.init_mems(len(self.p_net), pattern_seg) pattern_output, pattern_mems = self.encoder_forward( pattern_seg, pattern_seg_len, self.p_net, self.p_params, mems=pattern_mems) pattern_outputs.append(pattern_output) pattern_output = torch.cat(pattern_outputs, dim=1)[:, :pattern_emb.size(1)] # some segments may only have padded elements, we need to set them as 0 manually pattern_mask = (batch_convert_len_to_mask( pattern_len, max_seq_len=pattern_output.size(1)) == 0).unsqueeze(-1) pattern_output.masked_fill_(pattern_mask, 0.0) graph_outputs = list() for i, (graph_seg, graph_seg_len) in enumerate(zip(graph_segments, graph_seg_lens)): if i == 0: graph_mems = self.init_mems(len(self.g_net), graph_seg) graph_output, graph_mems = self.encoder_forward(graph_seg, graph_seg_len, self.g_net, self.g_params, mems=graph_mems) graph_outputs.append(graph_output) graph_output = torch.cat(graph_outputs, dim=1)[:, :graph_emb.size(1)] # some segments may only have padded elements, we need to set them as 0 manually graph_mask = (batch_convert_len_to_mask( graph_len, max_seq_len=graph_output.size(1)) == 0).unsqueeze(-1) graph_output.masked_fill_(graph_mask, 0.0) if self.add_enc: pattern_enc, graph_enc = self.get_enc(pattern, pattern_len, graph, graph_len) if zero_mask is not None: graph_enc.masked_fill_(zero_mask, 0.0) pattern_output = torch.cat([pattern_enc, pattern_output], dim=2) graph_output = torch.cat([graph_enc, graph_output], dim=2) pred = self.predict_net(pattern_output, pattern_len, graph_output, graph_len) return pred