Exemplo n.º 1
0
def get_perfo(filename):
    ''' 
    work around for using a PERL script in python
    dirty but still works.
    '''
    tempfile = str(random.randint(1, numpy.iinfo('i').max)) + '.txt'
    if not isfile(PREFIX + 'conlleval.pl'):
        download(
            'http://www-etud.iro.umontreal.ca/~mesnilgr/atis/conlleval.pl')
        chmod('conlleval.pl', stat.S_IRWXU)  # give the execute permissions
    if len(PREFIX) > 0:
        chmod(PREFIX + 'conlleval.pl',
              stat.S_IRWXU)  # give the execute permissions
        cmd = PREFIX + 'conlleval.pl < %s | grep accuracy > %s' % (filename,
                                                                   tempfile)
    else:
        cmd = './conlleval.pl < %s | grep accuracy > %s' % (filename, tempfile)
    print(cmd)
    out = os.system(cmd)
    out = open(tempfile).readlines()[0].split()
    os.system('rm %s' % tempfile)
    precision = float(out[6][:-2])
    recall = float(out[8][:-2])
    f1score = float(out[10])
    return {'p': precision, 'r': recall, 'f1': f1score}
Exemplo n.º 2
0
def get_perf(filename):
    ''' run conlleval.pl perl script to obtain
    precision/recall and F1 score '''
    _conlleval = PREFIX + 'conlleval.pl'
    if not isfile(_conlleval):
        download(
            'http://www-etud.iro.umontreal.ca/~mesnilgr/atis/conlleval.pl')
        chmod('conlleval.pl', stat.S_IRWXU)  # give the execute permissions

    proc = subprocess.Popen(["perl", _conlleval],
                            stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE)
    stdout, _ = proc.communicate(open(filename).read().encode('utf-8'))

    encoding = locale.getdefaultlocale()[1]

    out = []
    for line in stdout.decode(encoding).split('\n'):
        if 'accuracy' in line:
            out = line.split()
            break
    precision = 0
    recall = 0
    f1score = 0

    if len(out) > 0:
        precision = float(out[6][:-2])
        recall = float(out[8][:-2])
        f1score = float(out[10])

    return {'p': precision, 'r': recall, 'f1': f1score}
Exemplo n.º 3
0
def get_perf(filename):
    ''' run conlleval.pl perl script to obtain
    precision/recall and F1 score '''
    _conlleval = PREFIX + 'conlleval.pl'
    if not isfile(_conlleval):
        download('http://www-etud.iro.umontreal.ca/~mesnilgr/atis/conlleval.pl') 
        chmod('conlleval.pl', stat.S_IRWXU) # give the execute permissions

    proc = subprocess.Popen(["perl", _conlleval], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
    stdout, _ = proc.communicate(open(filename).read().encode('utf-8'))

    encoding = locale.getdefaultlocale()[1]

    out = []
    for line in stdout.decode(encoding).split('\n'):
        if 'accuracy' in line:
            out = line.split()
            break
    precision = 0
    recall = 0
    f1score = 0

    if len(out) > 0:
        precision = float(out[6][:-2])
        recall    = float(out[8][:-2])
        f1score   = float(out[10])

    return {'p':precision, 'r':recall, 'f1':f1score}
Exemplo n.º 4
0
def get_perfo(filename):
    ''' 
    work around for using a PERL script in python
    dirty but still works.
    '''
    tempfile = str(random.randint(1,numpy.iinfo('i').max)) + '.txt'
    if not isfile(PREFIX + 'conlleval.pl'):
        download('http://www-etud.iro.umontreal.ca/~mesnilgr/atis/conlleval.pl') 
        chmod('conlleval.pl', stat.S_IRWXU) # give the execute permissions
    if len(PREFIX) > 0:
        chmod(PREFIX + 'conlleval.pl', stat.S_IRWXU) # give the execute permissions
        cmd = PREFIX + 'conlleval.pl < %s | grep accuracy > %s'%(filename,tempfile)
    else:
        cmd = './conlleval.pl < %s | grep accuracy > %s'%(filename,tempfile)
    print cmd
    out = os.system(cmd)
    out = open(tempfile).readlines()[0].split()
    os.system('rm %s'%tempfile)
    precision = float(out[6][:-2])
    recall    = float(out[8][:-2])
    f1score   = float(out[10])
    return {'p':precision, 'r':recall, 'f1':f1score}
Exemplo n.º 5
0
    # 正答率の算出
    _, predicted = torch.max(outputs.data, 1)
    validation_acc = predicted.eq(t).sum().numpy() / len(predicted)
    print('epoch: ', epoch, 'val loss: ', float(validation_loss), 'val acc: ', float(validation_acc))
    with open('validation_loss.csv', 'a') as f:
        f.write(str(epoch) + ',' + str(float(validation_loss)) + ',' + str(float(validation_acc)) + '\n')

## データの読み込み
if os.path.isfile('states.npy') and os.path.isfile('actions.npy'):
    states = np.load('states.npy')
    actions = np.load('actions.npy')
else:
    downloaded_dir_path = './data/'
    isExists = os.path.exists(downloaded_dir_path)
    if not isExists:  # 棋譜データのダウンロードがまだの時、ダウンロードを行う
        load.download()
    total_matchs = load.match_counter()
    states, actions = load.load_and_save(total_matchs)  # データの読み込み・加工・保存

## 入力の正規化[0,1]
states = states / 2
"""
states[i]=
[[0.  1.  1.  1.  1.  1.  1.  0. ]
 [0.  1.  1.  1.  1.  1.  0.  0. ]
 [0.5 0.  1.  1.  1.  1.  1.  1. ]
 [0.  0.5 0.5 1.  0.5 0.5 0.5 1. ]
 [0.  1.  0.5 1.  1.  0.5 1.  1. ]
 [1.  1.  1.  0.5 0.5 1.  1.  1. ]
 [0.  0.  0.5 0.5 0.5 0.5 0.  0. ]
 [0.  0.5 0.5 0.5 0.5 0.5 0.  0. ]]
Exemplo n.º 6
0
def main():
    # データの読み込み・加工
    if os.path.isfile('states.npy') and os.path.isfile('actions.npy'):
        states = np.load('states.npy')
        actions = np.load('actions.npy')
    else:
        #download()  # ファイルダウンロード
        load.download()  # ファイルダウンロード
        states, actions = load.load_and_save()  # データの読み込み・加工・保存

    test_x = states[:TEST_DATA_SIZE].copy()  # ランダムに並び替え済み
    train_x = states[TEST_DATA_SIZE:].copy()
    del states  # メモリがもったいないので強制解放
    test_y = actions[:TEST_DATA_SIZE].copy()
    train_y = actions[TEST_DATA_SIZE:].copy()
    del actions

    model = L.Classifier(network.AgentNet(), lossfun=softmax_cross_entropy)
    #if os.path.isfile('model.npz'):  # モデル読み込み
    model_name = 'model_epo{:04}.npz'.format(START_EPOCH)
    if os.path.isfile(model_name):  # モデル読み込み
        #serializers.load_npz('model.npz', model)
        serializers.load_npz(model_name, model)

    optimizer = optimizers.Adam()
    optimizer.setup(model)

    # ログファイル初期化
    log_start_line = 'epoch,loss\n'
    f_all = open(LOSS_LOG_BASE + '{}to{}.csv'.format(START_EPOCH + 1, EPO_MAX),
                 'w')
    with f_all:
        f_all.write(log_start_line)

    # ログ保存用のリスト
    logs = []

    # 学習ループ
    #for epoch in range(100):
    for epoch in range(EPO_MAX):
        for i in range(100):
            if i == 0:
                percent = 0.0
            else:
                percent = i / 100.0 * 100.0

            sys.stderr.write('\r' + get_str_bar(percent))
            sys.stderr.flush()
            index = np.random.choice(train_x.shape[0],
                                     MINIBATCH_SIZE,
                                     replace=False)
            x = chainer.Variable(train_x[index].reshape(
                MINIBATCH_SIZE, 1, 8, 8).astype(np.float32))
            t = chainer.Variable(train_y[index].astype(np.int32))
            optimizer.update(model, x, t)
        # 進捗バー末端部分表示
        percent = (i + 1) / 100.0
        sys.stderr.write('\r' + get_str_bar(percent) + '\n\n')
        sys.stderr.flush()

        # 評価
        index = np.random.choice(test_x.shape[0],
                                 EVALUATION_SIZE,
                                 replace=False)
        x = chainer.Variable(test_x[index].reshape(EVALUATION_SIZE, 1, 8,
                                                   8).astype(np.float32))
        t = chainer.Variable(test_y[index].astype(np.int32))
        tmp_loss = model(x, t).data

        #print('epoch :', epoch, '  loss :', tmp_loss)
        print('> Epoch: {} / {},  Loss: {}\n\n'.format(epoch, EPO_MAX,
                                                       tmp_loss))

        f_all = open(
            LOSS_LOG_BASE + '{}to{}.csv'.format(START_EPOCH + 1, EPO_MAX), 'a')
        with f_all:
            f_all.write('{},{}\n'.format(epoch, tmp_loss))

        # ログの保存
        logs.append(tmp_loss)

        # SAVE_TMGでログファイルとモデルを保存
        if (epoch + 1) % SAVE_TMG == 0:
            #serializers.save_npz('model.npz', model)  # モデル保存
            model_f_name = 'model_epo{:04}.npz'.format(epoch)
            serializers.save_npz(model_f_name, model)
            print('\n  -> Saved model file: [{}]\n'.format(model_f_name))

            loss_tmp_file = LOSS_LOG_BASE + '{}to{}.csv'.format(
                START_EPOCH, epoch)
            with open(loss_tmp_file, 'w') as f_tmp:
                f_tmp.write('epoch,loss\n')
                for log_i in range(len(logs)):
                    f_tmp.write('{},{}\n'.format(log_i, logs[log_i]))
            print('  -> Wrote loss log file: [{}]\n'.format(loss_tmp_file))

        # モデルファイルの一時保存
        serializers.save_npz('model_backup.npz', model)

        # ログファイルの出力
        f_all = open(
            LOSS_LOG_BASE + '{}to{}.csv'.format(START_EPOCH + 1, EPO_MAX), 'a')
        with f_all:
            f_all.write('{},{}\n'.format(epoch, tmp_loss))

    print('\n>>> Finished.\n')
Exemplo n.º 7
0
def get_proxies():
    download("proxies.txt")
    with open("proxies.txt", "r") as file:
        proxies = [line.strip() for line in file.readlines()]
    return proxies