def main():
    import pandas as pd
    '''
    実行時に引数を取れるように設定
    '''
    ps = support.get_base_args()
    ps.add_argument('--metric', '-m', default='', help='Metric function')
    ps.add_argument('--leaf', '-l', default='', help='Leaf class')
    args = ps.parse_args()

    df = pd.read_csv(args.input,
                     sep=args.separator,
                     header=args.header,
                     index_col=args.indexcol)
    x = df[df.columns[:-1]].values

    ##argsの引数によってどの関数を使うか定義
    ##metric
    if args.metric == 'div':
        mt = entropy.deviation_org
    elif args.metric == 'infgain':
        mt = entropy.infgain
    elif args.metric == 'gini':
        mt = entropy.gini
    else:
        mt = None

    ##leaf
    if args.leaf == 'zeror':
        lf = ZeroRule
    elif args.leaf == 'linear':
        lf = Linear
    else:
        lf = None

    ##回帰かクラス分類か
    if not args.regression:
        ##クラス分類
        y, clz = support.clz_to_prob(df[df.columns[-1]])
        if mt is None:
            mt = entropy.gini
        if lf is None:
            lf = ZeroRule
        plf = DecisionStump(metric=mt, leaf=lf)
        support.report_classifer(plf, x, y, clz, args.crossvalidate)

    else:
        y = df[df.columns[-1]].values.reshape((-1, 1))
        if mt is None:
            mt = entropy.deviation_org
        if leaf is None:
            lf = Linear
        plf = DecisionStump(metric=mt, leaf=lf)
        support.report_regressor(plf, x, y, args.crossvalidate)
def main():
    import pandas as pd
    ps = support.get_base_args()
    ps.add_argument('--metric',  '-m' , default='' , help='Metric function')
    ps.add_argument('--leaf' , '-l', default='',help='Leaf class')
    ps.add_argument('--depth' , '-d' , type=int , default=5,help='Max Tree Depth')
    args = ps.parse_args()

    df = pd.read_csv(args.input,  sep=args.separator , header=args.header,  index_col = args.indexcol)
    x = df[df.columns[:-1]].values

    ##分岐関数の指定
    if args.metric == 'div':
        mt  = entropy.deviation_org
    elif args.metric == 'infgain':
        mt = entropy.infgain
    elif args.metric == 'gini':
        mt = entropy.gini
    else:
        mt = None

    ##葉の関数の指定
    if args.leaf == 'zeror':
        lf = ZeroRule
    elif args.leaf == 'linear':
        lf = Linear
    else:
        lf  =None

    ##回帰かクラス分類か
    if not args.regression:
        y,clz = support.clz_to_prob(df[df.columns[:-1]])
        if mt is None:
            mt = entropy.gini
        if lf is None:
            lf = ZeroRule
        plf = DecisionTree(metric=mt , leaf=lf, max_depth=args.depth)
        support.report_classifer(plf,x,y,clz,args.crossvalidate)

    else:
        y = df[df.columns[-1]].values.reshape((-1,1))
        if mt is None:
            mt = entropy.deviation_org
        if lf is None:
            lf = Linear
        plf = DecisionTree(metric=mt  ,leaf=lf , max_depth=args.depth)
        support.report_regressor(plf,x,y,args.crossvalidate)
def main():
    import pandas as pd
    ps = support.get_base_args()
    ps.add_argument('--epochs',
                    '-p',
                    type=int,
                    default=20,
                    help='Num of Epochs')
    ps.add_argument('--learningrate',
                    '-l',
                    type=float,
                    default=0.01,
                    help='Learning rate')
    ps.add_argument('--earlystop',
                    '-a',
                    action='store_true',
                    help='Early stopping')
    ps.add_argument('--stoppingvalue',
                    '-v',
                    type=float,
                    default=0.01,
                    help='Early stopping value')
    args = ps.parse_args()

    df = pd.read_csv(args.input,
                     sep=args.separator,
                     header=args.header,
                     index_col=args.indexcol)
    x = df[df.columns[:-1]].values

    if not args.regression:
        print('Not support')
    else:
        y = df[df.columns[-1]].values.reshape((-1, 1))
        if args.earlystop:
            plf = Linear(epochs=args.epochs,
                         lr=args.learningrate,
                         earlystop=args.stoppingvalue)
        else:
            plf = Linear(epochs=args.epochs, lr=args.learningrate)
        support.report_regressor(plf, x, y, args.crossvalidate)
Beispiel #4
0
                     header=args.header,
                     index_col=args.indexcol)
    x = df[df.columns[:-1]].values

    if not args.regression:
        y, clz = support.clz_to_prob(df[df.columns[-1]])
        mt = entropy.gini
        lf = ZeroRule
        plf = PrunedTree(prunfnc=args.pruning,
                         pruntest=args.test,
                         splitratio=args.ratio,
                         critical=args.critical,
                         metric=mt,
                         leaf=lf,
                         max_depth=args.depth)
        plf.fit(x, y)
        support.report_classifier(plf, x, y, clz, args.crossvalidate)
    else:
        y = df[df.columns[-1]].values.reshape((-1, 1))
        mt = entropy.deviation
        lf = linear
        plf = PrunedTree(prunfnc=args.pruning,
                         pruntest=args.test,
                         splitratio=args.ratio,
                         critical=args.critical,
                         metric=mt,
                         leaf=lf,
                         max_depth=args.depth)
        plf.fit(x, y)
        support.report_regressor(plf, x, y, args.crossvalidate)