def concat_examples(batch, device=None): if len(batch) == 0: raise ValueError('batch is empty') if device is None: def to_device(x): return x elif device < 0: to_device = cuda.to_cpu else: def to_device(x): return cuda.to_gpu(x, device, cuda.Stream.null) result = [ to_device(_concat_arrays([s[0] for s in batch], -1)), # ws to_device(_concat_arrays([s[1] for s in batch], -1)), # ps to_device(_concat_arrays([s[2] for s in batch], -1)), # ss [s[3] for s in batch] ] # ls if len(batch[0]) == 7: result.append([to_device(s[4]) for s in batch]) # cat_ts result.append([to_device(s[5]) for s in batch]) # dep_ts result.append(to_device(_concat_arrays([s[6] for s in batch], None))) # weights return tuple(result)
def concat_examples(batch, device=None, padding=None, indices_concat=None, indices_to_device=None): if len(batch) == 0: raise ValueError('batch is empty') first_elem = batch[0] elem_size = len(first_elem) if indices_concat is None: indices_concat = range(elem_size) if indices_to_device is None: indices_to_device = range(elem_size) result = [] if not isinstance(padding, tuple): padding = [padding] * elem_size for i in six.moves.range(elem_size): res = [example[i] for example in batch] if i in indices_concat: res = _concat_arrays(res, padding[i]) if i in indices_to_device: if i in indices_concat: res = to_device(device, res) else: res = [to_device(device, r) for r in res] result.append(res) return tuple(result)
def list_examples(batch, device=None, padding=None): if len(batch) == 0: raise ValueError('batch is empty') if padding is not None: raise NotImplementedError first_elem = batch[0] if isinstance(first_elem, tuple): result = [] if not isinstance(padding, tuple): padding = [padding] * len(first_elem) for i in six.moves.range(len(first_elem)): result.append([to_device(device, example[i]) for example in batch]) return tuple(result) elif isinstance(first_elem, dict): result = {} if not isinstance(padding, dict): padding = {key: padding for key in first_elem} for key in first_elem: result[key] = [to_device(device, example[key]) for example in batch] return result else: raise NotImplementedError return to_device(device, _concat_arrays(batch, padding))
def convert_sg(batch, device): if len(batch) == 0: raise ValueError('batch is empty') result = [ to_device(device, _concat_arrays([example[0] for example in batch], None)), [example[1] for example in batch] ] return tuple(result)
def batch_without_padding(name, batch, device=None): """Batch without padding (increase ndim by 1) Args: name (str): propaty name of graph data batch (list[BaseGraphData]): list of base graph data device (chainer.backend.Device, optional): device. Defaults to None. Returns: BaseGraphDataset: graph dataset sent to `device` """ feat = _concat_arrays([getattr(example, name) for example in batch], None) return device.send(feat)
def concat_examples(batch, device=None): # batch: img, mask, label, scale if len(batch) == 0: raise ValueError('batch is empty') first_elem = batch[0] result = [] for i in six.moves.range(len(first_elem)): array = _concat_arrays([example[i] for example in batch], None) if i == 0: # img result.append(to_device(device, array)) else: result.append(array) return tuple(result)
def concat_examples(batch, device=None, padding=None): if len(batch) == 0: raise ValueError('batch is empty') first_elem = batch[0] result = [] if not isinstance(padding, tuple): padding = [padding] * len(first_elem) for i in six.moves.range(len(first_elem)): res = _concat_arrays([example[i] for example in batch], padding[i]) if i in [0, 1]: # img, bbox res = to_device(device, res) result.append(res) return tuple(result)