Example #1
0
def test_pred1_batB_seql1_user_confmat(pred1_batB_seql1_cls3_preds_confmat):
    Yt, Yp, confmat = [
        pred1_batB_seql1_cls3_preds_confmat[k] for k in ['Yt', 'Yp', 'confmat']
    ]
    assert_almost_equal(nu.confusion_matrix_forcategorical(Yt, Yp), confmat)

    yt, yp = [pred1_batB_seql1_cls3_preds_confmat[k] for k in ['yt', 'yp']]
    nclasses = Yt.shape[-1]
    assert_almost_equal(nu.confusion_matrix(yt, yp, nclasses), confmat)

    assert True
Example #2
0
def test_predP_batB_seqlQ_user_confmat(predP_batB_seqlQ_cls3_preds_confmat):
    Yt, Yp, confmat = [
        predP_batB_seqlQ_cls3_preds_confmat[k] for k in ['Yt', 'Yp', 'confmat']
    ]
    print("\nTEST", Yt.shape, Yp.shape, confmat.shape)
    print()
    assert_almost_equal(nu.confusion_matrix_forcategorical(Yt, Yp), confmat)

    yt, yp = [predP_batB_seqlQ_cls3_preds_confmat[k] for k in ['yt', 'yp']]
    nclasses = Yt.shape[-1]
    print(yt.shape, yp.shape)
    assert_almost_equal(nu.confusion_matrix(yt, yp, nclasses), confmat)

    assert True
Example #3
0
def test_pred1_batB_seqlQ_generic_confmat(pred1_batB_seqlQ_cls3_preds_confmat):
    Yt, Yp, confmat = [
        pred1_batB_seqlQ_cls3_preds_confmat[k] for k in ['Ytg', 'Ypg', 'confmatg']
    ]
    print("\nTEST", Yt.shape, Yp.shape, confmat.shape)
    print()
    assert_almost_equal(
        nu.confusion_matrix_forcategorical(Yt, Yp, keepdims=True), confmat
    )

    yt, yp = [pred1_batB_seqlQ_cls3_preds_confmat[k] for k in ['ytg', 'ypg']]
    nclasses = Yt.shape[-1]
    print(yt.shape, yp.shape)
    assert_almost_equal(nu.confusion_matrix(yt, yp, nclasses, keepdims=True), confmat)

    assert True
Example #4
0
def predict_on_inputs_provider(  # pylint: disable=too-many-locals,too-many-statements
        model, inputs_provider, export_to, init, tran):
    def _save(paths, datas):
        with hFile(export_to, 'a') as f:
            for path, data in zip(paths, datas):
                if path not in f.keys():
                    f.create_dataset(path,
                                     data=data,
                                     compression='lzf',
                                     fletcher32=True)

            f.flush()

    currn = None
    ctrue = []
    cpred = []

    tot_conf = None
    tot_conf_vp = None
    for xy, (_, chunking) in inputs_provider.flow(
            indefinitely=False,
            only_labels=False,
            with_chunking=True,
    ):

        ctrue.append(xy[1])
        cpred.append(model.predict_on_batch(xy[0]))

        if currn is None:
            currn = chunking.labelpath
            continue

        if chunking.labelpath != currn:
            t = np.concatenate(ctrue[:-1])
            p = np.concatenate(cpred[:-1])

            if sub != 'keepzero':  # from activity_name above
                z = t[:, 0].astype(bool)
                p[z, 0] = 1.
                p[z, 1:] = 0.

            # raw confusion
            conf = nu.confusion_matrix_forcategorical(
                t, nu.to_categorical(p.argmax(axis=-1), nclasses=t.shape[-1]))

            # viterbi decoded - no scaling
            vp = lu.viterbi_smoothing(p, init, tran)
            conf_vp = nu.confusion_matrix_forcategorical(
                t, nu.to_categorical(vp, nclasses=t.shape[-1]))

            _save(
                paths=["{}/{}".format(_p, currn) for _p in ('raw', 'viterbi')],
                datas=[conf, conf_vp],
            )

            print(currn, end=' ')
            nu.print_prec_rec(*nu.normalize_confusion_matrix(conf),
                              onlydiag=True)

            if tot_conf is None:
                tot_conf = conf
                tot_conf_vp = conf_vp
            else:
                tot_conf += conf
                tot_conf_vp += conf_vp

            currn = chunking.labelpath
            ctrue = ctrue[-1:]
            cpred = cpred[-1:]

    # last chunking
    t = np.concatenate(ctrue)
    p = np.concatenate(cpred)

    if sub != 'keepzero':  # from activity_name above
        z = t[:, 0].astype(bool)
        p[z, 0] = 1.
        p[z, 1:] = 0.

    conf = nu.confusion_matrix_forcategorical(
        t, nu.to_categorical(p.argmax(axis=-1), nclasses=t.shape[-1]))

    vp = lu.viterbi_smoothing(p, init, tran)
    conf_vp = nu.confusion_matrix_forcategorical(
        t, nu.to_categorical(vp, nclasses=t.shape[-1]))

    _save(
        paths=["{}/{}".format(_p, currn) for _p in ('raw', 'viterbi')],
        datas=[conf, conf_vp],
    )

    print(currn, end=' ')
    nu.print_prec_rec(*nu.normalize_confusion_matrix(conf), onlydiag=True)

    tot_conf += conf
    tot_conf_vp += conf_vp

    # print out total-statistics
    _save(
        paths=["{}/{}".format(_p, 'final') for _p in ('raw', 'viterbi')],
        datas=[tot_conf, tot_conf_vp],
    )

    print("\nFINAL - RAW", end=' ')
    nu.print_prec_rec(*nu.normalize_confusion_matrix(tot_conf), onlydiag=False)

    print("\nFINAL - VITERBI", end=' ')
    nu.print_prec_rec(*nu.normalize_confusion_matrix(tot_conf_vp),
                      onlydiag=False)
Example #5
0
def predict_on_inputs_provider(model, inputs_provider, subsampling,
                               export_to_dir, init, tran, priors):
    export_to = os.path.join(export_to_dir, "confs.h5")

    def _save(paths, datas):
        with hFile(export_to, 'a') as f:
            for path, data in zip(paths, datas):
                if path not in f.keys():
                    f.create_dataset(path,
                                     data=data,
                                     compression='lzf',
                                     fletcher32=True)

            f.flush()

    currn = None
    ctrue = []
    cpred = []

    tot_conf = None
    tot_conf_vp = None
    tot_conf_svp = None
    for xy, (_, chunking) in inputs_provider.flow(
            indefinitely=False,
            only_labels=False,
            with_chunking=True,
    ):

        ctrue.append(xy[1])
        cpred.append(model.predict_on_batch(xy[0]))

        if currn is None:
            currn = chunking.labelpath
            continue

        if chunking.labelpath != currn:
            t = np.concatenate(ctrue[:-1])
            p = np.concatenate(cpred[:-1])

            if subsampling != 'nosub':
                z = t[:, 0].astype(bool)
                p[z, 0] = 1.
                p[z, 1:] = 0.

            # raw confusion
            conf = nu.confusion_matrix_forcategorical(
                t, nu.to_categorical(p.argmax(axis=-1), nclasses=t.shape[-1]))

            # viterbi decoded - no scaling
            vp = viterbi(p, init, tran, priors=None)
            conf_vp = nu.confusion_matrix_forcategorical(
                t, nu.to_categorical(vp, nclasses=t.shape[-1]))

            # viterbi decoded - scaling
            vp = viterbi(p, init, tran, priors=priors)
            conf_svp = nu.confusion_matrix_forcategorical(
                t, nu.to_categorical(vp, nclasses=t.shape[-1]))

            _save(
                paths=[
                    "{}/{}".format(_p, currn)
                    for _p in ('raw', 'viterbi', 'sviterbi')
                ],
                datas=[conf, conf_vp, conf_svp],
            )

            print(currn, end=' ')
            nu.print_prec_rec(*nu.normalize_confusion_matrix(conf),
                              onlydiag=True)

            if tot_conf is None:
                tot_conf = conf
                tot_conf_vp = conf_vp
                tot_conf_svp = conf_svp
            else:
                tot_conf += conf
                tot_conf_vp += conf_vp
                tot_conf_svp += conf_svp

            currn = chunking.labelpath
            ctrue = ctrue[-1:]
            cpred = cpred[-1:]

    _save(
        paths=[
            "{}/{}".format(_p, 'final')
            for _p in ('raw', 'viterbi', 'sviterbi')
        ],
        datas=[tot_conf, tot_conf_vp, tot_conf_svp],
    )

    print("\nFINAL - RAW", end=' ')
    nu.print_prec_rec(*nu.normalize_confusion_matrix(tot_conf), onlydiag=False)

    print("\nFINAL - VITERBI", end=' ')
    nu.print_prec_rec(*nu.normalize_confusion_matrix(tot_conf_vp),
                      onlydiag=False)

    print("\nFINAL - VITERBI - SCALED", end=' ')
    nu.print_prec_rec(*nu.normalize_confusion_matrix(tot_conf_svp),
                      onlydiag=False)