Esempio n. 1
0
            def build(self):
                merge = ConstantNode()
                merge.set_op(Concatenate(self, self.input_nodes))

                vnode1 = VariableNode()
                self.connect(merge, vnode1)

                vnode1.add_op(Dense(1))

                return self
Esempio n. 2
0
    def __init__(self,
                 input_shape,
                 output_shape,
                 batch_size=None,
                 seed=None,
                 *args,
                 **kwargs):

        super().__init__()

        self._random = np.random.RandomState(seed)

        self.input_shape = input_shape
        if type(input_shape) is tuple:
            # we have only one input tensor here
            op = Tensor(
                keras.layers.Input(input_shape,
                                   name="input_0",
                                   batch_size=batch_size))
            self.input_nodes = [ConstantNode(op=op, name="Input_0")]

        elif type(input_shape) is list and all(
                map(lambda x: type(x) is tuple, input_shape)):
            # we have a list of input tensors here
            self.input_nodes = list()
            for i in range(len(input_shape)):
                batch_size = batch_size[i] if type(
                    batch_size) is list else None
                op = Tensor(
                    keras.layers.Input(input_shape[i],
                                       name=f"input_{i}",
                                       batch_size=batch_size))
                inode = ConstantNode(op=op, name=f"Input_{i}")
                self.input_nodes.append(inode)
        else:
            raise InputShapeOfWrongType(input_shape)

        for node in self.input_nodes:
            self.graph.add_node(node)

        self.output_shape = output_shape
        self.output_node = None

        self._model = None
    def build(self):

        inp = self.input_nodes[0]

        # auto-encoder
        units = [128, 64, 32, 16, 8, 16, 32, 64, 128]
        prev_node = inp
        d = 1
        for i in range(len(units)):
            vnode = VariableNode()
            vnode.add_op(Identity())
            if d == 1 and units[i] < units[i + 1]:
                d = -1
                for u in range(min(2, units[i]), max(2, units[i]) + 1, 2):
                    vnode.add_op(Dense(u, tf.nn.relu))
                latente_space = vnode
            else:
                for u in range(min(units[i], units[i + d]),
                               max(units[i], units[i + d]) + 1, 2):
                    vnode.add_op(Dense(u, tf.nn.relu))
            self.connect(prev_node, vnode)
            prev_node = vnode

        out2 = ConstantNode(op=Dense(self.output_shape[0][0], name="output_0"))
        self.connect(prev_node, out2)

        # regressor
        prev_node = latente_space
        # prev_node = inp
        for _ in range(self.num_layers):
            vnode = VariableNode()
            for i in range(16, 129, 16):
                vnode.add_op(Dense(i, tf.nn.relu))

            self.connect(prev_node, vnode)
            prev_node = vnode

        out1 = ConstantNode(op=Dense(self.output_shape[1][0], name="output_1"))
        self.connect(prev_node, out1)

        return self
Esempio n. 4
0
        def build(self):

            input_node = self.input[0]

            dense = VariableNode()
            dense.add_op(Identity())
            for i in range(1, 1000):
                dense.add_op(Dense(i))
            self.connect(input_node, dense)

            output_node = ConstantNode(Dense(self.output_shape[0]))
            self.connect(dense, output_node)
Esempio n. 5
0
    def build(self):

        if type(self.input_shape) is list:
            vnodes = []
            for i in range(len(self.input_shape)):
                vn = self.gen_vnode()
                vnodes.append(vn)
                self.connect(self.input_nodes[i], vn)

            prev_node = ConstantNode(Concatenate(self, vnodes))

        else:
            prev_node = self.gen_vnode()
            self.connect(self.input_nodes[0], prev_node)

        output_node = ConstantNode(
            Dense(self.output_shape[0],
                  activation=None if self.regression else "softmax"))
        self.connect(prev_node, output_node)

        return self
Esempio n. 6
0
    def build(self):

        prev_node = self.input_nodes[0]

        for _ in range(self.num_layers):
            vnode = VariableNode()
            vnode.add_op(Identity())
            for i in range(*self.num_units):
                vnode.add_op(Dense(i, tf.nn.relu))

            self.connect(prev_node, vnode)
            prev_node = vnode

        output_node = ConstantNode(
            Dense(
                self.output_shape[0], activation=None if self.regression else "softmax"
            )
        )
        self.connect(prev_node, output_node)

        return self
Esempio n. 7
0
    def build(self):

        source = prev_input = self.input_nodes[0]

        # look over skip connections within a range of the 3 previous nodes
        anchor_points = collections.deque([source], maxlen=3)

        for _ in range(self.num_layers):
            vnode = VariableNode()
            self.add_dense_to_(vnode)

            self.connect(prev_input, vnode)

            # * Cell output
            cell_output = vnode

            cmerge = ConstantNode()
            cmerge.set_op(
                AddByProjecting(self, [cell_output], activation="relu"))

            for anchor in anchor_points:
                skipco = VariableNode()
                skipco.add_op(Zero())
                skipco.add_op(Connect(self, anchor))
                self.connect(skipco, cmerge)

            prev_input = cmerge

            # ! for next iter
            anchor_points.append(prev_input)

        if self.dropout >= 0.0:
            dropout_node = ConstantNode(op=Dropout(rate=self.dropout))
            self.connect(prev_input, dropout_node)
            prev_input = dropout_node

        output_node = ConstantNode(
            Dense(self.output_shape[0],
                  activation=None if self.regression else "softmax"))
        self.connect(prev_input, output_node)

        return self