Esempio n. 1
0
            "filters": 32,
            "kernel_size": 3,
            "choice_pooling": {
                "pooling": True,
                "pool_size": 2
            },
            "neurones": 100,
            "batch_size": 50,
            "optimizer": "adam",
            "activation": "sigmoid",
            "nc": args.nc,
            "dropout": 0,
            "bi": args.bi,
            "cost": args.cost
        }
        create_model(params, rootw=rootw, wn=args.initw, typem=args.typem)
    """
    print(X_train.shape, y_train.shape)
    print(y_train[::40], np.mean(y_train, axis=0))
    print(X_train.dtype, y_train.dtype)
    # for yi in y_train:
    #    print(yi)
    # , validation_data=(X_val, y_val[::, 0], y_val[::, 1])
    if args.incweightT is not None:
        print(np.mean(y_train, axis=0))
        y_train[y_train[::, 0] == 0, 1] *= args.incweightT
        print(np.mean(y_train, axis=0))


    print("Accuracy: %.2f%%" % (scores))
"""
Esempio n. 2
0
def model(typem=1, base=False, nc=1, window_length=None):
    init = 1
    if base:
        init = 5

    if typem == 1:

        if window_length is None:
            lenv = 200
        else:
            lenv = window_length
        """
        model = Sequential()
        # model.add(Embedding(top_words, embedding_vecor_length,
        input_length=max_review_length))
        model.add(Conv1D(filters=32, kernel_size=3, padding='same',
                         activation='relu', input_shape=(lenv, init)))
        model.add(MaxPooling1D(pool_size=2))
        model.add(LSTM(100))
        model.add(Dense(1, activation='linear'))
        model.compile(loss='mse', optimizer='adam')  # , metrics=['accuracy'])
        """
        params = {
            "filters": 32,
            "kernel_size": 3,
            "choice_pooling": {
                "pooling": True,
                "pool_size": 2
            },
            "neurones": 100,
            "batch_size": 50,
            "optimizer": "adam",
            "activation": "sigmoid",
            "nc": nc,
            "dropout": 0,
            "bi": False
        }
        ntwk = create_model(params, create_only=True)
    if typem == 7:
        model = Sequential()
        # model.add(Embedding(top_words, embedding_vecor_length, input_length=max_review_length))
        model.add(
            Conv1D(filters=32,
                   kernel_size=5,
                   padding='same',
                   activation='relu',
                   input_shape=(96, init)))
        """
        model.add(MaxPooling1D(pool_size=4)) # 16
        model.add(Conv1D(filters=64, kernel_size=5, padding='same',
                         activation='relu'))
        model.add(MaxPooling1D(pool_size=4)) #4
        model.add(Conv1D(filters=64, kernel_size=5, padding='same',
                                 activation='relu'))

        #model.add(LSTM(100))
        #model.add(Dense(1, activation='linear'))
        """
        model.add(MaxPooling1D(pool_size=4))
        model.add(
            Conv1D(filters=32,
                   kernel_size=5,
                   padding='same',
                   activation='relu'))
        model.add(MaxPooling1D(pool_size=4))
        model.add(
            Conv1D(filters=32,
                   kernel_size=5,
                   padding='same',
                   activation='relu'))
        model.add(TimeDistributed(Dense(1, activation='sigmoid')))
        model.add(AveragePooling1D(pool_size=6))
        model.add(Flatten())
        ntwk = model
        lenv = 96
    ntwk.summary()
    return ntwk
Esempio n. 3
0
def model(typem=1,
          window_length=None,
          base=False,
          idu=False,
          activation="linear"):
    init = 1
    if base:
        init = 5
    print(init)
    if typem in [1, 3]:

        if window_length is None:
            lenv = 200
        else:
            lenv = window_length
        model = Sequential()
        model.add(
            Conv1D(filters=32,
                   kernel_size=3,
                   padding='same',
                   activation='relu',
                   input_shape=(lenv, init)))
        model.add(MaxPooling1D(pool_size=2))
        model.add(LSTM(100))
        model.add(Dense(1, activation=activation))
        model.compile(loss='mse', optimizer='adam')  # , metrics=['accuracy'])
        ntwk = model

        if idu:
            params = {
                "filters": 32,
                "kernel_size": 3,
                "choice_pooling": {
                    "pooling": True,
                    "pool_size": 2
                },
                "neurones": 100,
                "batch_size": 50,
                "optimizer": "adam",
                "activation": "sigmoid",
                "nc": 2,
                "dropout": 0,
                "bi": False
            }
            ntwk = create_model(params, create_only=True, typem=args.typem)
            lenv = 160

    if typem == 7:
        model = Sequential()
        model.add(
            Conv1D(filters=32,
                   kernel_size=5,
                   padding='same',
                   activation='relu',
                   input_shape=(96, init)))
        """
        model.add(MaxPooling1D(pool_size=4)) # 16
        model.add(Conv1D(filters=64, kernel_size=5, padding='same',
                         activation='relu'))
        model.add(MaxPooling1D(pool_size=4)) #4
        model.add(Conv1D(filters=64, kernel_size=5, padding='same',
                                 activation='relu'))

        # model.add(LSTM(100))
        # model.add(Dense(1, activation='linear'))
        """
        model.add(MaxPooling1D(pool_size=4))
        model.add(
            Conv1D(filters=32,
                   kernel_size=5,
                   padding='same',
                   activation='relu'))
        model.add(MaxPooling1D(pool_size=4))
        model.add(
            Conv1D(filters=32,
                   kernel_size=5,
                   padding='same',
                   activation='relu'))
        model.add(TimeDistributed(Dense(1, activation='sigmoid')))
        model.add(AveragePooling1D(pool_size=6))
        model.add(Flatten())
        ntwk = model
        lenv = 96

    print(ntwk.summary())
    return ntwk, lenv