Beispiel #1
0
    def __init__(self, dataset_dir, listfile=None, listfile_data=None):
        """ Reader for phenotype classification task.

        :param dataset_dir: Directory where timeseries files are stored.
        :param listfile:    Path to a listfile. If this parameter is left `None` then
                            `dataset_dir/listfile.csv` will be used.
        """
        Reader.__init__(self, dataset_dir, listfile)
        self._data = listfile_data
        self._data = [line.split(',') for line in self._data]
        self._data = [(mas[0], float(mas[1]), map(int, mas[2:])) for mas in self._data]
        self._X = []
        self._header = []

        label_struct = utils.read_hierarchical_labels('../../data/phenotyping/label_list.txt', '../../data/phenotyping/label_struct.json')
        label_mapper = {}
        for super_label in label_struct.keys():
            label_mapper[super_label] = set(label_struct[super_label])

        for index in range(len(self._data)):
            name = self._data[index][0]
            # t = self._data[index][1]
            # y = self._data[index][2]
            (X, header) = self._read_timeseries(name)
            self._X.append(X)
            self._header.append(header)
            self._data[index][2].append(0)
            self._data[index][2].append(0)
            for i in range(25):
                if self._data[index][2][i] == 1:
                    if i in label_mapper[25]:
                        self._data[index][2][25] = 1
                    if i in label_mapper[26]:
                        self._data[index][2][26] = 1
Beispiel #2
0
    def __init__(self, dim, batch_norm, dropout, rec_dropout, task,
                target_repl=False, deep_supervision=False, num_classes=1,
                depth=1, input_dim=76, **kwargs):

        print "==> not used params in network class:", kwargs.keys()

        self.dim = dim
        self.batch_norm = batch_norm
        self.dropout = dropout
        self.rec_dropout = rec_dropout
        self.depth = depth

        if task in ['decomp', 'ihm', 'ph']:
            final_activation = 'sigmoid'
        elif task in ['los']:
            if num_classes == 1:
                final_activation = 'relu'
            else:
                final_activation = 'softmax'
        else:
            return ValueError("Wrong value for task")

        # Input layers and masking
        X = Input(shape=(None, input_dim), name='X')
        inputs = [X]
        mX = Masking()(X)

        if deep_supervision:
            M = Input(shape=(None,), name='M')
            inputs.append(M)

        # Configurations
        is_bidirectional = True
        if deep_supervision:
            is_bidirectional = False

        # Main part of the network
        for i in range(depth - 1):
            num_units = dim
            if is_bidirectional:
                num_units = num_units // 2

            gru = GRU(units=num_units,
                       activation='tanh',
                       return_sequences=True,
                       recurrent_dropout=rec_dropout,
                       dropout=dropout)

            if is_bidirectional:
                mX = Bidirectional(gru)(mX)
            else:
                mX = gru(mX)

        # Output module of the network
        return_sequences = (target_repl or deep_supervision)
        L_lv1 = GRU(units=dim,
                 activation='tanh',
                 return_sequences=True,
                 dropout=dropout,
                 recurrent_dropout=rec_dropout)(mX)

        L = L_lv1

        if dropout > 0:
            L = Dropout(dropout)(L)

        label_struct = utils.read_hierarchical_labels('../../data/phenotyping/label_list.txt', '../../data/phenotyping/label_struct.json')
        # only support 2 levels
        num_superclass = len(label_struct.keys())
        y_lv1 = {}
        y_lv2 = {}
        for class_lv1 in label_struct.keys():
            y_lv1[class_lv1] = Dense(1, activation=final_activation)(Lambda(lambda x: x[:,-1,:])(L))
            L_lv2_gru = GRU(units=dim,
                 activation='tanh',
                 return_sequences=return_sequences,
                 dropout=dropout,
                 recurrent_dropout=rec_dropout)(L_lv1)
            if dropout > 0:
                L_lv2_gru = Dropout(dropout)(L_lv2_gru)
            y_lv2[class_lv1] = {}
            for class_lv2 in label_struct[class_lv1]:
                y_lv2[class_lv1][class_lv2] = Dense(1, activation=final_activation)(L_lv2_gru)
        
        label_mapper = {}
        for super_label in label_struct.keys():
            label_mapper[super_label] = set(label_struct[super_label])
        y_final = []
        for i in range(25):
            if (i in label_mapper[25]) and (i not in label_mapper[26]):
                y_final.append(Multiply()([y_lv1[25], y_lv2[25][i]]))
            elif (i not in label_mapper[25]) and (i in label_mapper[26]):
                y_final.append(Multiply()([y_lv1[26], y_lv2[26][i]]))
            elif (i in label_mapper[25]) and (i in label_mapper[26]):
                y_final.append(Add()([Multiply()([y_lv1[25], y_lv2[25][i]]), Multiply()([y_lv1[26], y_lv2[26][i]])]))
        y_final.append(y_lv1[25])
        y_final.append(y_lv1[26])


        y = Concatenate()(y_final)
        outputs = [y]

        return super(Network, self).__init__(inputs=inputs,
                                             outputs=outputs)
Beispiel #3
0
    def __init__(self,
                 dim,
                 batch_norm,
                 dropout,
                 rec_dropout,
                 task,
                 target_repl=False,
                 deep_supervision=False,
                 num_classes=1,
                 depth=1,
                 input_dim=76,
                 **kwargs):

        print "==> not used params in network class:", kwargs.keys()

        self.dim = dim
        self.batch_norm = batch_norm
        self.dropout = dropout
        self.rec_dropout = rec_dropout
        self.depth = depth

        if task in ['decomp', 'ihm', 'ph']:
            final_activation = 'sigmoid'
        elif task in ['los']:
            if num_classes == 1:
                final_activation = 'relu'
            else:
                final_activation = 'softmax'
        else:
            return ValueError("Wrong value for task")

        # Input layers and masking
        X = Input(shape=(None, input_dim), name='X')
        inputs = [X]
        mX = Masking()(X)

        if deep_supervision:
            M = Input(shape=(None, ), name='M')
            inputs.append(M)

        # Configurations
        is_bidirectional = True
        if deep_supervision:
            is_bidirectional = False

        # Output module of the network
        return_sequences = (target_repl or deep_supervision)
        L_lv1 = GRU(units=dim,
                    activation='tanh',
                    return_sequences=True,
                    dropout=dropout,
                    recurrent_dropout=rec_dropout,
                    name="L_lv1")(mX)

        L = L_lv1

        label_struct = utils.read_hierarchical_labels(
            '../../data/phenotyping/label_list.txt',
            '../../data/phenotyping/label_struct.json')
        # only support 2 levels
        num_superclass = len(label_struct.keys())

        output_lv1 = Lambda(lambda x: x[:, -1, :])(L)
        # if dropout > 0:
        #     output_lv1 = Dropout(dropout)(output_lv1)
        output_lv1 = Dense(num_superclass,
                           activation=final_activation,
                           name="output_lv1")(output_lv1)

        L_lv2_gru = GRU(units=dim,
                        activation='tanh',
                        return_sequences=return_sequences,
                        dropout=dropout,
                        recurrent_dropout=rec_dropout,
                        name="L_lv2_gru")(L_lv1)

        # if dropout > 0:
        #     L_lv2_gru = Dropout(dropout)(L_lv2_gru)

        L_lv2_2 = GRU(units=dim,
                      activation='tanh',
                      return_sequences=return_sequences,
                      dropout=dropout,
                      recurrent_dropout=rec_dropout,
                      name="L_lv2_2")(mX)

        output_lv2 = Dense(25, activation=final_activation, name="output_lv2")(
            Concatenate()([L_lv2_gru, L_lv2_2]))

        y = Concatenate()([output_lv2, output_lv1])
        outputs = [y]

        return super(Network, self).__init__(inputs=inputs, outputs=outputs)