def init_op(self, inputs): self.op = 'Transpose' shape = np.zeros(inputs[0].shape).transpose().shape self.output = intake.array(name=self.op, shape=shape, dtype=inputs[0].dtype, producer=self)
def init_op(self, inputs): self.op = 'RandomNormal' shape = (np.zeros(inputs[0].shape) + np.zeros(inputs[1].shape)).shape self.output = intake.array(name=self.op, shape=shape, dtype=config.DTYPE, producer=self)
def init_op(self, inputs): self.op = 'Clip' assert(inputs[1].shape == inputs[2].shape) shape = inputs[0].shape self.output = intake.array(name=self.op, shape=shape, dtype=upcast(inputs), producer=self)
def init_op(self, inputs): self.op = 'ArcTan2' shape = self.inputs[0].shape self.output = intake.array(name=self.op, shape=shape, dtype=config.DTYPE, producer=self)
def init_op(self, inputs): self.op = 'Ravel' shape = np.zeros(inputs[0].shape).ravel().shape self.output = intake.array(name=self.op, shape=shape, dtype=inputs[0].dtype, producer=self)
def init_op(self, inputs, shape, dtype=config.DTYPE): self.op = 'Eye' self.shape = shape self.output = intake.array(name='eye_mat', shape=self.shape, dtype=dtype, producer=self)
def init_op(self, inputs, axis): self.op = 'Concatenate' self.axis=axis self.shape = np.concatenate((np.zeros(inputs[0].shape), np.zeros(inputs[1].shape)), axis=axis).shape self.output = intake.array(name=self.op, shape=self.shape, dtype=upcast(inputs), producer=self)
def init_op(self, inputs, at_idx): self.op = 'ArrayAccess' if len(at_idx) == 1: shape = np.zeros(inputs[0].shape)[at_idx[0]].shape else: shape = np.zeros(inputs[0].shape)[at_idx[0], at_idx[1]].shape self.output = intake.array(name=self.op, shape=shape, producer=self) self.at_idx = at_idx
def __init__(self, inputs, at_idx=None, slices=None): self.op = 'Assign' self.inputs = check_type(*inputs) self.slices = slices if slices is not None: if len(slices) == 1: slice_shape = np.zeros(inputs[0].shape)[slices[0]].shape else: slice_shape = np.zeros(inputs[0].shape)[slices[0], slices[1]].shape self.output = intake.array(name=self.op, dtype=inputs[0].dtype, shape=inputs[0].shape, producer=self, slice_shape=slice_shape) self.__repr__ = lambda: '{}[{}, {}]'.format( self.inputs[0].name, slice_to_str(slices[0]), slice_to_str(slices[1])) + '\\n' + str(self.output.shape) else: self.output = intake.array(name=self.op, dtype=inputs[0].dtype, shape=inputs[0].shape, producer=self) self.at_idx = at_idx if at_idx: if len(at_idx) == 1: self.__repr__ = lambda: '{}[{}]'.format( self.inputs[0].name, at_idx[0]) + '\\n' + str(self.output. shape) else: self.__repr__ = lambda: '{}[{}, {}]'.format( self.inputs[0].name, at_idx[0], at_idx[1]) + '\\n' + str( self.output.shape) # special thing for assign operator... have to add the others # as dependency successors = graph.successors(inputs[0].last_producer) for s in successors: if s != self.inputs[0].last_producer: graph.add_edge(s, self, edge_type='helper') if config.debug or config.group_class or config.group_func: self.caller_info = get_caller_info('expander.py', 'intake.py') else: self.caller_info = None self.add_to_graph() self.inputs[0].assignment.append(self)
def init_op(self, inputs, order): self.op = 'Norm' if order != 2: raise NotImplementedError('Only 2 order norm supported currently.') if inputs[0].ndim > 1: if inputs[0].shape[1] > 1: raise NotImplementedError('Only vectors supported.') self.output = intake.array(name=self.op, shape=(), dtype=upcast(inputs), producer=self)
def init_op(self, inputs): self.op = 'Dot' # result of mat mul = new matrix # n x m * m x p -> n * p shape = np.dot(np.zeros(inputs[0].shape), np.zeros(inputs[1].shape)).shape self.output = intake.array(name=self.op, shape=shape, dtype=upcast(inputs), producer=self)
def init_op(self, inputs): self.op = 'Solve' self.add_to_graph() if not (inputs[0].shape[0] == inputs[0].shape[1] == inputs[1].shape[0]): raise ValueError( 'LHS shape {} and RHS shape {} not compatible for solve.' .format(inputs[0].shape, inputs[1].shape)) self.output = intake.array(name=self.op, shape=inputs[1].shape, dtype=upcast(inputs), producer=self)
def init_op(self, inputs): # TODO check why this happens and what it does shape = self.inputs[0].shape if hasattr(self, 'dtype'): # dtype for sin etc. has to change dtype = self.dtype else: dtype = self.inputs[0].dtype self.output = intake.array(name=self.op, shape=shape, dtype=dtype, producer=self)
def init_op(self, inputs, keys): self.op = 'View' # find shape by slicing zeros vector self.slices = keys if len(self.slices) == 1: new_shape = np.zeros(inputs[0].shape)[self.slices[0]].shape else: new_shape = np.zeros(inputs[0].shape)[self.slices[0], self.slices[1]].shape self.output = intake.array(name=self.op, shape=new_shape, producer=self)
def init_op(self, inputs): self.op = 'Min' self.output = intake.array(name=self.op, shape=(), dtype=inputs[0].dtype, producer=self)
def init_op(self, inputs): self.output = intake.array(name=self.op, shape=self.shape_op(inputs), dtype=upcast(inputs), producer=self)
def init_op(self, inputs, shape): self.op = 'Reshape' self.output = intake.array(name=self.op, shape=shape, dtype=inputs[0].dtype, producer=self)
def init_op(self, inputs): self.op = 'Any' self.output = intake.array(name=self.op, shape=inputs[0].shape, dtype=bool, producer=self)