Beispiel #1
0
 def build(self, *args, **kwargs):
     """ Must return inputs and outputs. """
     self.build_config()
     input_nodes, output_nodes = self._build()
     Model.__init__(self, input_nodes, output_nodes)
     self.compile(loss=self.get('loss', self.config, None),
                  optimizer=self.get('optimizer', self.config, 'sgd'))
Beispiel #2
0
    def __init__(
        self,
        in_shape: Tuple[int, ...],
        num_layers: int,
        layer_multiplier: float,
        bn_momentum: float,
        dropout: float,
        leaky_relu_alpha: float,
        optimizer: Optimizer,
        name: str = None,
    ):

        Discriminator.__init__(self, in_shape, 1)

        self.num_layers = num_layers
        self.layer_multiplier = layer_multiplier
        self.bn_momentum = bn_momentum
        self.dropout = dropout
        self.leaky_relu_alpha = leaky_relu_alpha

        x = Input(in_shape)
        y = Flatten()(x) if len(x.shape) > 2 else x
        y = self.create_mlp_interim(y, num_layers, leaky_relu_alpha, dropout)
        y = Reshape((1, *y.shape[1:]))(y)
        y = LSTM(numpy.prod(self.in_shape), dropout=dropout, unroll=True)(y)
        y = Dense(1, activation="sigmoid")(y)

        Model.__init__(self, inputs=x, outputs=y, name=name or self.__class__.__name__)

        self.compile(
            loss=["binary_crossentropy"], optimizer=optimizer, metrics=["accuracy"]
        )
Beispiel #3
0
 def __init__(self, treatment, **kwargs):
     Model.__init__(self, kwargs['inputs'], kwargs['outputs'])
     if isinstance(treatment, Treatment):
         self.treatment = treatment
     else:
         raise TypeError("Expected a treatment model of type Treatment. \
                          Got a model of type %s. Remember to train your\
                          treatment model first." % type(treatment))
     super(Response, self).__init__(**kwargs)
Beispiel #4
0
    def __init__(self, img_shape=None, inputs=None, outputs=None):

        self.dims = len(img_shape) - 1
        if self.dims == 3:
            self.conv = Conv3D
            self.pool = AveragePooling3D
        elif self.dims == 2:
            self.conv = Conv2D
            self.pool = AveragePooling2D

        if outputs == None:
            model = self.__build(img_shape)
            Model.__init__(self, model.inputs, model.outputs)
        elif outputs != None:
            Model.__init__(self, inputs, outputs)
Beispiel #5
0
    def __init__(
        self,
        latent_size: int,
        num_classes: int,
        out_shape: Tuple[int, ...],
        num_layers: int,
        layer_multiplier: float,
        bn_momentum: float,
        leaky_relu_alpha: float,
        dropout: float,
        name: str = None,
    ):

        EmbeddingGenerator.__init__(self, latent_size, out_shape, num_classes,
                                    name)

        self.num_layers = num_layers
        self.layer_multiplier = layer_multiplier
        self.bn_momentum = bn_momentum
        self.leaky_relu_alpha = leaky_relu_alpha
        self.dropout = dropout

        latent = Input((latent_size, ))
        label = Input((1, ), dtype="int32")

        label_embedding = Flatten()(Embedding(num_classes, latent_size)(label))
        x = multiply([latent, label_embedding])
        y = self.create_mlp_interim(
            x,
            num_layers,
            layer_multiplier,
            bn_momentum,
            leaky_relu_alpha,
            dropout,
        )
        y = Reshape((1, *y.shape[1:]))(y)
        y = LSTM(numpy.prod(out_shape), dropout=dropout, unroll=True)(y)
        y = Dense(numpy.prod(out_shape), activation="tanh")(y)
        y = Reshape(out_shape)(y)

        Model.__init__(
            self,
            inputs=[latent, label],
            outputs=y,
            name=name or self.__class__.__name__,
        )
Beispiel #6
0
    def __init__(self, img_rows, img_cols, n_classes, out_activation="softmax",
                 complexity_factor=1, l1_reg=None, l2_reg=None,
                 base_model=None, logger=None, **kwargs):

        self.img_shape = (img_rows, img_cols, 1)
        self.n_classes = n_classes
        self.cf = np.sqrt(complexity_factor)

        # Shows the number of pixels cropped of the input image to the output
        self.label_crop = np.array([[0, 0], [0, 0]])

        # Build model and init base keras Model class
        if not base_model:
            # New training session
            Model.__init__(self, *self.init_model(out_activation, l1_reg,
                                                  l2_reg, **kwargs))
        else:
            # Resumed training
            Model.__init__(self, base_model.input, base_model.output)
Beispiel #7
0
    def __init__(
        self,
        in_shape: Tuple[int, ...],
        num_classes: int,
        num_layers: int,
        layer_multiplier: float,
        bn_momentum: float,
        dropout: float,
        leaky_relu_alpha: float,
        optimizer: Optimizer,
        name: str = None,
    ):

        LabelingDiscriminator.__init__(self, in_shape, num_classes)

        self.num_layers = num_layers
        self.layer_multiplier = layer_multiplier
        self.bn_momentum = bn_momentum
        self.dropout = dropout
        self.leaky_relu_alpha = leaky_relu_alpha

        data = Input(self.in_shape)

        flat_data = Flatten()(data)
        features = self.create_mlp_interim(
            flat_data, num_layers, leaky_relu_alpha, dropout
        )

        discrimination = Dense(1, activation="sigmoid")(features)
        label = Dense(self.num_classes, activation="softmax")(features)

        Model.__init__(
            self,
            inputs=data,
            outputs=[discrimination, label],
            name=name or self.__class__.__name__,
        )

        self.compile(
            loss=["binary_crossentropy", "sparse_categorical_crossentropy"],
            optimizer=optimizer,
            metrics=["accuracy"],
        )
Beispiel #8
0
    def __init__(
        self,
        in_shape: Tuple[int, ...],
        num_classes: int,
        num_layers: int,
        layer_multiplier: float,
        bn_momentum: float,
        dropout: float,
        leaky_relu_alpha: float,
        optimizer: Optimizer,
        name: str = None,
    ):

        EmbeddingDiscriminator.__init__(self, in_shape, num_classes)

        self.num_layers = num_layers
        self.layer_multiplier = layer_multiplier
        self.bn_momentum = bn_momentum
        self.dropout = dropout
        self.leaky_relu_alpha = leaky_relu_alpha

        data = Input(in_shape)
        label = Input((1,), dtype="int32")

        label_embedding = Flatten()(
            Embedding(self.num_classes, numpy.prod(self.in_shape))(label)
        )
        flat_data = Flatten()(data)

        x = multiply([flat_data, label_embedding])
        y = self.create_mlp_interim(x, num_layers, leaky_relu_alpha, dropout)
        y = Reshape((1, *y.shape[1:]))(y)
        y = LSTM(numpy.prod(self.in_shape), dropout=dropout, unroll=True)(y)
        y = Dense(1, activation="sigmoid")(y)

        Model.__init__(
            self, inputs=[data, label], outputs=y, name=name or self.__class__.__name__,
        )

        self.compile(
            loss=["binary_crossentropy"], optimizer=optimizer, metrics=["accuracy"]
        )
Beispiel #9
0
    def __init__(self,
                 inputs,
                 start_img_shape=(8, 8, 8, 1),
                 outputs=None,
                 **kwargs):

        self.start_img_shape = start_img_shape
        self.dims = len(self.start_img_shape) - 1

        if self.dims == 3:
            self.conv = Conv3DTranspose
            self.upsample = UpSampling3D
        elif self.dims == 2:
            self.conv = Conv2DTranspose
            self.upsample = UpSampling2D

        if outputs == None:
            model = self.__build(inputs)
            Model.__init__(self, model.inputs, model.outputs)
        elif outputs != None:
            Model.__init__(self, inputs, outputs)
Beispiel #10
0
    def __init__(
        self,
        latent_size: int,
        out_shape: Tuple[int, ...],
        num_layers: int,
        layer_multiplier: float,
        bn_momentum: float,
        leaky_relu_alpha: float,
        dropout: float,
        name: str = None,
    ):

        Generator.__init__(self, latent_size, out_shape)

        self.num_layers = num_layers
        self.layer_multiplier = layer_multiplier
        self.bn_momentum = bn_momentum
        self.leaky_relu_alpha = leaky_relu_alpha
        self.dropout = dropout

        x = Input((latent_size, ))
        y = self.create_mlp_interim(
            x,
            num_layers,
            layer_multiplier,
            bn_momentum,
            leaky_relu_alpha,
            dropout,
        )
        y = Reshape((1, *y.shape[1:]))(y)
        y = LSTM(numpy.prod(out_shape), dropout=dropout, unroll=True)(y)
        y = Dense(numpy.prod(out_shape), activation="tanh")(y)
        y = Reshape(out_shape)(y)

        Model.__init__(self,
                       inputs=x,
                       outputs=y,
                       name=name or self.__class__.__name__)
 def __init__(self):
     Model.__init__(self, Sequential)
Beispiel #12
0
 def __init__(self):
     Model.__init__(self)
Beispiel #13
0
 def __init__(self, input_shape):
     self.input_image = Input(shape=input_shape)
     self.decoded = self.build(self.input_image)
     Model.__init__(self, self.input_image, self.decoded)
Beispiel #14
0
 def __init__(self, inputs, outputs, name):
     Model.__init__(self, inputs=inputs, outputs=outputs)
     Wrapper.__init__(self, name)
Beispiel #15
0
 def __init__(self, generator, discriminator):
     discriminator.trainable = False
     model = self.__build(generator, discriminator)
     Model.__init__(self, model.inputs, model.outputs)
     self.compile(loss='binary_crossentropy', optimizer=opt)
Beispiel #16
0
 def __init__(self, config):
     Model.__init__(self, config)