コード例 #1
0
def main():
    import pandas as pd
    '''
    実行時に引数を取れるように設定
    '''
    ps = support.get_base_args()
    ps.add_argument('--metric', '-m', default='', help='Metric function')
    ps.add_argument('--leaf', '-l', default='', help='Leaf class')
    args = ps.parse_args()

    df = pd.read_csv(args.input,
                     sep=args.separator,
                     header=args.header,
                     index_col=args.indexcol)
    x = df[df.columns[:-1]].values

    ##argsの引数によってどの関数を使うか定義
    ##metric
    if args.metric == 'div':
        mt = entropy.deviation_org
    elif args.metric == 'infgain':
        mt = entropy.infgain
    elif args.metric == 'gini':
        mt = entropy.gini
    else:
        mt = None

    ##leaf
    if args.leaf == 'zeror':
        lf = ZeroRule
    elif args.leaf == 'linear':
        lf = Linear
    else:
        lf = None

    ##回帰かクラス分類か
    if not args.regression:
        ##クラス分類
        y, clz = support.clz_to_prob(df[df.columns[-1]])
        if mt is None:
            mt = entropy.gini
        if lf is None:
            lf = ZeroRule
        plf = DecisionStump(metric=mt, leaf=lf)
        support.report_classifer(plf, x, y, clz, args.crossvalidate)

    else:
        y = df[df.columns[-1]].values.reshape((-1, 1))
        if mt is None:
            mt = entropy.deviation_org
        if leaf is None:
            lf = Linear
        plf = DecisionStump(metric=mt, leaf=lf)
        support.report_regressor(plf, x, y, args.crossvalidate)
コード例 #2
0
def main():
    import pandas as pd
    ps = support.get_base_args()
    ps.add_argument('--metric',  '-m' , default='' , help='Metric function')
    ps.add_argument('--leaf' , '-l', default='',help='Leaf class')
    ps.add_argument('--depth' , '-d' , type=int , default=5,help='Max Tree Depth')
    args = ps.parse_args()

    df = pd.read_csv(args.input,  sep=args.separator , header=args.header,  index_col = args.indexcol)
    x = df[df.columns[:-1]].values

    ##分岐関数の指定
    if args.metric == 'div':
        mt  = entropy.deviation_org
    elif args.metric == 'infgain':
        mt = entropy.infgain
    elif args.metric == 'gini':
        mt = entropy.gini
    else:
        mt = None

    ##葉の関数の指定
    if args.leaf == 'zeror':
        lf = ZeroRule
    elif args.leaf == 'linear':
        lf = Linear
    else:
        lf  =None

    ##回帰かクラス分類か
    if not args.regression:
        y,clz = support.clz_to_prob(df[df.columns[:-1]])
        if mt is None:
            mt = entropy.gini
        if lf is None:
            lf = ZeroRule
        plf = DecisionTree(metric=mt , leaf=lf, max_depth=args.depth)
        support.report_classifer(plf,x,y,clz,args.crossvalidate)

    else:
        y = df[df.columns[-1]].values.reshape((-1,1))
        if mt is None:
            mt = entropy.deviation_org
        if lf is None:
            lf = Linear
        plf = DecisionTree(metric=mt  ,leaf=lf , max_depth=args.depth)
        support.report_regressor(plf,x,y,args.crossvalidate)
コード例 #3
0
def main():
    import pandas as pd
    ps = support.get_base_args()
    ps.add_argument('--epochs',
                    '-p',
                    type=int,
                    default=20,
                    help='Num of Epochs')
    ps.add_argument('--learningrate',
                    '-l',
                    type=float,
                    default=0.01,
                    help='Learning rate')
    ps.add_argument('--earlystop',
                    '-a',
                    action='store_true',
                    help='Early stopping')
    ps.add_argument('--stoppingvalue',
                    '-v',
                    type=float,
                    default=0.01,
                    help='Early stopping value')
    args = ps.parse_args()

    df = pd.read_csv(args.input,
                     sep=args.separator,
                     header=args.header,
                     index_col=args.indexcol)
    x = df[df.columns[:-1]].values

    if not args.regression:
        print('Not support')
    else:
        y = df[df.columns[-1]].values.reshape((-1, 1))
        if args.earlystop:
            plf = Linear(epochs=args.epochs,
                         lr=args.learningrate,
                         earlystop=args.stoppingvalue)
        else:
            plf = Linear(epochs=args.epochs, lr=args.learningrate)
        support.report_regressor(plf, x, y, args.crossvalidate)
コード例 #4
0
        if len(l) > 0:
            if isinstance(self.left, PrunedTree):
                self.left.fit_leaf(x[l], y[l])
            else:
                self.left.fit(x[l], y[l])
        if len(r) > 0:
            if isinstance(self.right, PrunedTree):
                self.right.fit_leaf(x[r], y[r])
            else:
                self.right.fit(x[r], y[r])


if __name__ == '__main__':
    import pandas as pd
    np.random.seed(1)
    ps = support.get_base_args()
    ps.add_argument('--depth',
                    '-d',
                    type=int,
                    default=5,
                    help='max tree depth')
    ps.add_argument('--test',
                    '-t',
                    action='store_true',
                    help='test split for pruning')
    ps.add_argument('--pruning',
                    '-p',
                    default='critical',
                    help='pruning algorithm')
    ps.add_argument('--ratio',
                    '-a',
コード例 #5
0

class Yamamura(BaseFunction):
    def __init__(P=10, r=1.0, K=100.0):
        self.P = P
        self.r = r
        self.K = K
        self.numberofvalues = 1

    def get_values(self, x, t):
        return self.r * x * (1 - x / self.K) - self.P * x**2 / (1 + x**2)


if __name__ == '__main__':

    ps = get_base_args()
    args = ps.parse_args()
    graph_n = args.equation_number
    ad = args.argdict
    sp = args.spoint
    ep = args.epoint
    hl = args.hline
    vl = args.vline
    llist = args.line
    pp = args.pointplot
    initial_value = args.initial_value
    saveoff = support.offsaving(args.savefigoff)

    graph = {1: Exponential, 2: Logistic, 6: Alley, 4: Yamamura}

    if ad: