コード例 #1
0
ファイル: alt2train.py プロジェクト: quwei913/ASSC
              kernel_constraint=max_norm(params['maxnorm']),
              use_bias=True)(x)

    model = Model(top_model.input, x)
    model.summary()
    if load_path:
        model.load_weights(filepath=load_path, by_name=False)
    model_json = model.to_json()
    with open(
            os.path.join(model_dir, log_name, 'model.json').replace('\\', '/'),
            "w") as json_file:
        json_file.write(model_json)

    ################### ADAM COMPILATION ##############
    model.compile(optimizer=opt(lr=params['lr'],
                                epsilon=None,
                                decay=params['lr_decay']),
                  loss='categorical_crossentropy',
                  metrics=['accuracy'
                           ])  # মডেল কম্পাইলেশন। টেক্সটবুক আচরণ, অবশেষে
    ##################################################

    ################# SGD COMPILATION ################

    #sgd = optimizers.SGD(lr=params['lr'], decay=params['lr_decay'], momentum=0.9, nesterov=True)
    #model.compile(optimizer= sgd, loss= 'categorical_crossentropy', metrics=['accuracy'] )
    ##################################################

    print("model compilation: Done")
    modelcheckpnt = ModelCheckpoint(filepath=checkpoint_name,
                                    monitor='val_acc',
コード例 #2
0
    def objective(args, params=params):

        from keras.losses import categorical_crossentropy
        # for i in range(len(args)):
        #     print(i)
        #for each in args:
        #    print(each)
        log_name = "hyperopt" + '_' + str(datetime.now()).replace(':', '-')
        model_dir = os.path.join(os.getcwd(), '..',
                                 'models').replace('\\', '/')
        fold_dir = os.path.join(os.getcwd(), '..', 'data').replace('\\', '/')
        log_dir = os.path.join(os.getcwd(), '..', 'logs').replace('\\', '/')

        model_dir = os.path.join(os.getcwd(), '..',
                                 'models').replace('\\', '/')
        fold_dir = os.path.join(os.getcwd(), '..', 'data').replace('\\', '/')
        log_dir = os.path.join(os.getcwd(), '..', 'logs').replace('\\', '/')

        if not os.path.exists(
                os.path.join(model_dir, log_name).replace('\\', '/')):
            new_dir = (os.path.join(model_dir, log_name).replace('\\', '/'))
            print(new_dir)
            os.makedirs(new_dir)
        if not os.path.exists(
                os.path.join(log_dir, log_name).replace('\\', '/')):
            new_dir = os.path.join(log_dir, log_name).replace('\\', '/')
            print(new_dir)
            os.makedirs(new_dir)

        checkpoint_name = os.path.join(
            model_dir, log_name,
            'weights.{epoch:04d}-{val_clf_acc:.4f}.hdf5').replace('\\', '/')

        results_file = os.path.join(os.getcwd(), '..',
                                    'results.csv').replace('\\', '/')

        #params['dropout_rate'] = args[1]
        #params['lr'] = args[2]
        #params['hp_lambda'] = args[0]

        # params = {
        #     'dropout_rate': args[1],  # 0.45, #.5
        #     'lr': args[2],  # .0001
        #     'hp_lambda': args[0],
        # }
        current_learning_rate = params['lr']

        K.clear_session()
        top_model = eegnet(**params)
        x = Flatten()(top_model.output)

        clf = Dense(
            params['num_classes'],
            activation='softmax',
            kernel_initializer=initializers.he_normal(seed=random_seed),
            name='clf',
            use_bias=True)(x)

        dann_in = GradientReversal(hp_lambda=params['hp_lambda'])(x)
        dsc = Dense(
            1,
            activation='sigmoid',
            kernel_initializer=initializers.he_normal(seed=random_seed),
            name='dsc',
            use_bias=True)(dann_in)

        model = Model(top_model.input, [clf, dsc])

        model.compile(optimizer=opt(lr=params['lr'],
                                    epsilon=None,
                                    decay=params['lr_decay']),
                      loss={
                          'clf': 'categorical_crossentropy',
                          'dsc': 'binary_crossentropy'
                      },
                      metrics=['accuracy'])

        modelcheckpnt = ModelCheckpoint(filepath=checkpoint_name,
                                        monitor='val_clf_acc',
                                        save_best_only=True,
                                        mode='max')
        print("model Checkpoints: Loaded")
        tensdir = log_dir + "/" + "hyperopt-{}".format(args) + "/"
        tensdir = tensdir.replace('/', "\\")
        tensbd = TensorBoard(
            log_dir=tensdir,
            batch_size=batch_size,
            write_grads=True,
        )
        print("Tensorboard initialization: Done")
        patlogDirectory = log_dir + '/' + log_name + '/'
        trainingCSVdirectory = log_dir + '/' + log_name + '/' + 'training.csv'
        csv_logger = CSVLogger(trainingCSVdirectory)
        print("csv logger: Activated")
        # if args.classweights:
        #     params['class_weight'] = compute_weight(dummytrainY, np.unique(dummytrainY))
        # else:
        #     params['class_weight'] = dict(zip(np.r_[0:params['num_classes']], np.ones(params['num_classes'])))

        print("model dot fit: Started")

        def step_decay(global_epoch_counter):
            lrate = params['lr']
            # if global_epoch_counter > 10:
            #     lrate = params['lr'] / 10
            #     if global_epoch_counter > 20:
            #         lrate = params['lr'] / 100
            # if global_epoch_counter>30:
            #     lrate=params['lr']/1000
            return lrate

        lrate = LearningRateScheduler(step_decay)

        params['gamma'] = args

        #
        def f_hp_decay(global_epoch_counter=global_epoch_counter,
                       params=params):

            print("global_epoch_counter")
            print(global_epoch_counter)

            gamma = params['gamma']
            p = (global_epoch_counter - 1) / params['epochs']
            hp_lambda = (2 / (1 + (math.e**(-gamma * p)))) - 1
            #hp_lambda = hp_lambda * (params['hp_decay_const'] ** global_epoch_counter)
            params['hp_lambda'] = hp_lambda
            return hp_lambda

        hprate = hpRateScheduler(f_hp_decay, params)

        try:

            datagen = BalancedAudioDataGenerator()

            flow = datagen.flow(trainX, [trainY, trainDom],
                                target_label=1,
                                batch_size=params['batch_size'],
                                shuffle=True,
                                seed=params['random_seed'])
            model.fit_generator(
                flow,
                steps_per_epoch=len(trainDom[trainDom == 0]) //
                flow.chunk_size,
                #steps_per_epoch=4,
                epochs=params['epochs'],
                validation_data=(valX, [valY, valDom]),
                callbacks=[
                    modelcheckpnt,
                    log_metrics(valX, [valY, valDom], pat_val, patlogDirectory,
                                global_epoch_counter, params),
                    csv_logger,
                    tensbd,
                    lrate,
                    hprate,
                ],
            )

        except KeyboardInterrupt:
            print("Keyboard Interrupt")
            results_log(results_file=results_file,
                        log_dir=log_dir,
                        log_name=log_name,
                        params=params)

        y_pred = model.predict(valX)[1]
        print("y pred worked")
        loss = K.eval(
            K.mean(
                K.variable(
                    K.eval(
                        keras.losses.categorical_crossentropy(
                            K.variable(valDom), K.variable(y_pred))))))
        loss = -loss
        print(params)
        # params['']
        return loss
コード例 #3
0
              use_bias=True)(x)  ##

    #model = Model(top_model.Input, x) # এখানে দুইটা মডেল জোড়া লেগে যাচ্ছে। টপ মডেল, আর পরের ডেন্স করার পরের অংশ - এই দুইটা।
    model = Model(inputs=top_model.inputs, outputs=x)
    model.summary()  # মডেলের সামারি
    if load_path:  # If path for loading model was specified
        model.load_weights(filepath=load_path, by_name=False)
    plot_model(model, to_file='model.png',
               show_shapes=True)  #  মডেল কে ইমেজ ফাইলে আঁকা
    model_json = model.to_json(
    )  #জেসন ফাইলে লেখা হচ্ছে মডেল টা কে। সব ধরনের প্রিকশন নিয়ে রাখা, আর কি।
    with open(
            os.path.join(model_dir, log_name, 'model.json').replace('\\', '/'),
            "w") as json_file:
        json_file.write(model_json)
    model.compile(optimizer=opt(**params),
                  loss='categorical_crossentropy',
                  metrics=['accuracy'
                           ])  # মডেল কম্পাইলেশন। টেক্সটবুক আচরণ, অবশেষে

    ####### Define Callbacks #######

    ### ভ্যালিডেশন একুরেসির উপর বেজ করে চেজপয়েন্ট নিয়ে রাখা মডেল সেভ করার জন্য #########
    modelcheckpnt = ModelCheckpoint(filepath=checkpoint_name,
                                    monitor='val_acc',
                                    save_best_only=False,
                                    mode='max')
    ### টেন্সরবোরড ইন্সট্যান্স কল করা ######
    tensbd = TensorBoard(
        log_dir=os.path.join(log_dir, log_name),
        batch_size=batch_size,
コード例 #4
0
ファイル: heartnet_v3.py プロジェクト: mHealthBuet/heartnet
def heartnet(load_path,
             activation_function='relu',
             bn_momentum=0.99,
             bias=False,
             dropout_rate=0.5,
             dropout_rate_dense=0.0,
             eps=1.1e-5,
             kernel_size=5,
             l2_reg=0.0,
             l2_reg_dense=0.0,
             lr=0.0012843784,
             lr_decay=0.0001132885,
             maxnorm=10000.,
             padding='valid',
             random_seed=1,
             subsam=2,
             num_filt=(8, 4),
             num_dense=20,
             FIR_train=False,
             trainable=True,
             type=1):

    input = Input(shape=(2500, 1))

    coeff_path = '/media/taufiq/Data/heart_sound/feature/filterbankcoeff60.mat'
    coeff = tables.open_file(coeff_path)
    b1 = coeff.root.b1[:]
    b1 = np.hstack(b1)
    b1 = np.reshape(b1, [b1.shape[0], 1, 1])

    b2 = coeff.root.b2[:]
    b2 = np.hstack(b2)
    b2 = np.reshape(b2, [b2.shape[0], 1, 1])

    b3 = coeff.root.b3[:]
    b3 = np.hstack(b3)
    b3 = np.reshape(b3, [b3.shape[0], 1, 1])

    b4 = coeff.root.b4[:]
    b4 = np.hstack(b4)
    b4 = np.reshape(b4, [b4.shape[0], 1, 1])

    input1 = Conv1D_linearphaseType(
        1,
        60,
        use_bias=False,
        # kernel_initializer=initializers.he_normal(random_seed),
        weights=[b1[31:]],
        padding='same',
        trainable=FIR_train,
        type=type)(input)
    input2 = Conv1D_linearphaseType(
        1,
        60,
        use_bias=False,
        # kernel_initializer=initializers.he_normal(random_seed),
        weights=[b2[31:]],
        padding='same',
        trainable=FIR_train,
        type=type)(input)
    input3 = Conv1D_linearphaseType(
        1,
        60,
        use_bias=False,
        # kernel_initializer=initializers.he_normal(random_seed),
        weights=[b3[31:]],
        padding='same',
        trainable=FIR_train,
        type=type)(input)
    input4 = Conv1D_linearphaseType(
        1,
        60,
        use_bias=False,
        # kernel_initializer=initializers.he_normal(random_seed),
        weights=[b4[31:]],
        padding='same',
        trainable=FIR_train,
        type=type)(input)

    #Conv1D_gammatone

    # input1 = Conv1D_gammatone(kernel_size=81,filters=1,fsHz=1000,use_bias=False,padding='same')(input)
    # input2 = Conv1D_gammatone(kernel_size=81,filters=1,fsHz=1000,use_bias=False,padding='same')(input)
    # input3 = Conv1D_gammatone(kernel_size=81,filters=1,fsHz=1000,use_bias=False,padding='same')(input)
    # input4 = Conv1D_gammatone(kernel_size=81,filters=1,fsHz=1000,use_bias=False,padding='same')(input)

    t1 = branch(input1, num_filt, kernel_size, random_seed, padding, bias,
                maxnorm, l2_reg, eps, bn_momentum, activation_function,
                dropout_rate, subsam, trainable)
    t2 = branch(input2, num_filt, kernel_size, random_seed, padding, bias,
                maxnorm, l2_reg, eps, bn_momentum, activation_function,
                dropout_rate, subsam, trainable)
    t3 = branch(input3, num_filt, kernel_size, random_seed, padding, bias,
                maxnorm, l2_reg, eps, bn_momentum, activation_function,
                dropout_rate, subsam, trainable)
    t4 = branch(input4, num_filt, kernel_size, random_seed, padding, bias,
                maxnorm, l2_reg, eps, bn_momentum, activation_function,
                dropout_rate, subsam, trainable)

    merged = Concatenate(axis=-1)([t1, t2, t3, t4])
    print(kernel_size)
    merged = DenseNet(merged,
                      depth=7,
                      nb_dense_block=4,
                      growth_rate=4,
                      kernel_size=kernel_size,
                      nb_filter=16,
                      dropout_rate=dropout_rate)
    # merged = DCT1D()(merged)
    merged = Flatten()(merged)
    merged = Dense(num_dense,
                   activation=activation_function,
                   kernel_initializer=initializers.he_normal(seed=random_seed),
                   use_bias=bias,
                   kernel_constraint=max_norm(maxnorm),
                   kernel_regularizer=l2(l2_reg_dense))(merged)
    # merged = BatchNormalization(epsilon=eps,momentum=bn_momentum,axis=-1) (merged)
    # merged = Activation(activation_function)(merged)
    merged = Dropout(rate=dropout_rate_dense, seed=random_seed)(merged)
    merged = Dense(2, activation='softmax')(merged)

    model = Model(inputs=input, outputs=merged)

    if load_path:  # If path for loading model was specified
        model.load_weights(filepath=load_path, by_name=False)

    optimizer = opt(lr=lr, decay=lr_decay)
    model.compile(optimizer=optimizer,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    return model
コード例 #5
0
ファイル: alttrain.py プロジェクト: sushmit0109/ASSC
        top_model.input, x
    )  # এখানে দুইটা মডেল জোড়া লেগে যাচ্ছে। টপ মডেল, আর পরের ডেন্স করার পরের অংশ - এই দুইটা।

    # model = Model(inputs=EEG_input, outputs=x)

    model.summary()  # মডেলের সামারি
    if load_path:  # If path for loading model was specified
        model.load_weights(filepath=load_path, by_name=False)
    #plot_model(model, to_file='model.png', show_shapes=True)  # মডেল কে ইমেজ ফাইলে আঁকা
    model_json = model.to_json(
    )  # জেসন ফাইলে লেখা হচ্ছে মডেল টা কে। সব ধরনের প্রিকশন নিয়ে রাখা, আর কি।
    with open(
            os.path.join(model_dir, log_name, 'model.json').replace('\\', '/'),
            "w") as json_file:
        json_file.write(model_json)
    model.compile(optimizer=opt(lr=0.001, epsilon=None, decay=0.0),
                  loss='categorical_crossentropy',
                  metrics=['accuracy'
                           ])  # মডেল কম্পাইলেশন। টেক্সটবুক আচরণ, অবশেষে
    print("model compilation: Done")
    ####### Define Callbacks #######

    ### ভ্যালিডেশন একুরেসির উপর বেজ করে চেজপয়েন্ট নিয়ে রাখা মডেল সেভ করার জন্য #########
    modelcheckpnt = ModelCheckpoint(filepath=checkpoint_name,
                                    monitor='val_acc',
                                    save_best_only=False,
                                    mode='max')
    print("model Checkpoints: Loaded")

    ### টেন্সরবোরড ইন্সট্যান্স কল করা ######
コード例 #6
0
                name='dsc',
                use_bias=True)(dann_in)

    model = Model(top_model.input, [clf, dsc])
    # model.summary()
    if load_path:
        model.load_weights(filepath=load_path, by_name=False)
    model_json = model.to_json()
    with open(
            os.path.join(model_dir, log_name, 'model.json').replace('\\', '/'),
            "w") as json_file:
        json_file.write(model_json)

    ################### ADAM COMPILATION ##############
    model.compile(
        optimizer=opt(lr=params['lr'], epsilon=None, decay=params['lr_decay']),
        loss={
            'clf': 'categorical_crossentropy',
            'dsc': 'binary_crossentropy'
        },
        metrics=['accuracy']
        # loss_weights=[1,.5], ### Weighting the classifier loss by 1 and discriminator loss by .5
    )  # মডেল কম্পাইলেশন। টেক্সটবুক আচরণ, অবশেষে
    ##################################################

    ################# SGD COMPILATION ################

    #sgd = optimizers.SGD(lr=params['lr'], decay=params['lr_decay'], momentum=0.9, nesterov=True)
    #model.compile(optimizer= sgd, loss= 'categorical_crossentropy', metrics=['accuracy'] )
    ##################################################