Exemplo n.º 1
0
import time
import argparse

import numpy as np

import chainer
from chainer.dataset import convert
from chainer import serializers
from chainer.datasets import get_cifar10
from chainer.datasets import get_cifar100

from utils.get_model import get_model

from mllogger import MLLogger

logger = MLLogger(init=False)


def main():
    parser = argparse.ArgumentParser(description='Chainer CIFAR example:')
    parser.add_argument('--model', default='c3f2')
    parser.add_argument('--batchsize', '-b', type=int, default=64)
    parser.add_argument('--learnrate', '-l', type=float, default=0.05)
    parser.add_argument('--epoch', '-e', type=int, default=300)
    parser.add_argument('--gpu', '-g', type=int, default=0)
    parser.add_argument('--N', type=int, default=9)
    parser.add_argument('--k', type=int, default=10)
    parser.add_argument('--out', '-o', default='result')
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('--resume', '-r', default='')
    args = parser.parse_args()
Exemplo n.º 2
0
import numpy as np

import chainer
from chainer import Variable, optimizers, serializers, iterators, cuda
from chainer.dataset import convert

from utils.generic import get_args, get_model, write_prediction
from utils.dataset import SceneDatasetCV
from utils.plot import plot_trajectory_eval
from utils.summary_logger import SummaryLogger
from utils.scheduler import AdamScheduler
from utils.evaluation import Evaluator_Direct

from mllogger import MLLogger
logger = MLLogger(init=False)

if __name__ == "__main__":
    """
    Evaluation with Cross-Validation
    """
    args = get_args()

    np.random.seed(args.seed)
    start = time.time()
    logger.initialize(args.root_dir)
    logger.info(vars(args))
    save_dir = logger.get_savedir()
    logger.info("Written to {}".format(save_dir))
    summary = SummaryLogger(args, logger,
                            os.path.join(args.root_dir, "summary.csv"))
Exemplo n.º 3
0
import time
import json
import argparse
import joblib
from box import Box

import numpy as np
import cv2
import sys
sys.path.append('/home/manhh/Research/trajectory_prediction_3d_grids/fpl/')

from utils.dataset import SceneDatasetForAnalysis
from utils.plot import draw_line, draw_dotted_line, draw_x

from mllogger import MLLogger
logger = MLLogger(init=False)


def get_traj_type(hip_dist):
    # 0: front 1: back 2: cross 3:other
    if hip_dist < 0.25:
        traj_type = 2
    elif front_ratio > 0.75:
        traj_type = 0
    elif front_ratio < 0.25:
        traj_type = 1
    else:
        traj_type = 3

    return traj_type
Exemplo n.º 4
0
import cv2
import matplotlib.pyplot as plt

from sklearn.neighbors import NearestNeighbors
from sklearn.decomposition import PCA

import chainer
from chainer import cuda, Variable
from chainer.dataset import convert
from chainer import serializers
from chainer.datasets import get_cifar10

from mllogger import MLLogger
from models import small
from models import medium
logger = MLLogger()


def get_color_map_nipy(gradation_num):
    colors = []
    for idx in [int(x * 255 / gradation_num) for x in xrange(gradation_num)]:
        colors.append(plt.cm.nipy_spectral(idx)[0:3])
    return (np.array(colors)[::-1, (2, 1, 0)] * 255).astype(np.int)


def plot_nn(data, label, nn_result, nn_result2, k):
    colors = get_color_map_nipy(10)
    size = 32
    canvas = np.zeros((size * 100, size * k * 2, 3), dtype=np.uint8)

    # (32x32を縦に配置)
Exemplo n.º 5
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Takuma Yagi <*****@*****.**>
#
# Distributed under terms of the MIT license.

import argparse
from mllogger import MLLogger
from arghelper import LoadFromJson
from logging import DEBUG
logger = MLLogger("outputs_test", level=DEBUG,
                  init=False)  # Create outputs/yymmdd_HHMMSS/

parser = argparse.ArgumentParser(conflict_handler='resolve')
parser.add_argument('--lr', type=float, default=0.1)
parser.add_argument('--momentum', type=float, default=0.9)
parser.add_argument('--dataset', type=str, default='MNIST')
parser.add_argument('--decay_step', type=int, nargs='+', default=[100, 200])
parser.add_argument('--option', type=str, default=None)
parser.add_argument('--cond', type=str, action=LoadFromJson)
args = parser.parse_args()

logger.initialize()
logger.info('Logger test')
logger.info(vars(args))
save_dir = logger.get_savedir()
logger.save_args(args)
Exemplo n.º 6
0
from torch import nn, optim
from torch.autograd import Variable
from models.tcn import TCN

import chainer
from chainer import iterators
from chainer.dataset import convert

from utils.generic import get_args, write_prediction
from utils.dataset import SceneDatasetCV
from utils.summary_logger import SummaryLogger
from utils.scheduler import AdamScheduler
from utils.evaluation import Evaluator

from mllogger import MLLogger
logger = MLLogger(init=False)


def rmse(data, target):
    temp = (data - target)**2
    rmse = torch.mean(torch.sqrt(temp[:, 0, :] + temp[:, 1, :]))
    return rmse


if __name__ == "__main__":
    """
    Training with Cross-Validation
    """
    args = get_args()

    # Prepare logger
Exemplo n.º 7
0
import joblib

import numpy as np

import chainer
from chainer import Variable, optimizers, serializers, iterators, cuda
from chainer.dataset import convert

from utils.generic import get_args, get_model, write_prediction
from utils.dataset import SceneDatasetCV
from utils.summary_logger import SummaryLogger
from utils.scheduler import AdamScheduler
from utils.evaluation import Evaluator

from mllogger import MLLogger
logger = MLLogger(init=False)

if __name__ == "__main__":
    """
    Training with Cross-Validation
    """
    args = get_args()

    # Prepare logger
    np.random.seed(args.seed)
    start = time.time()
    logger.initialize(args.root_dir)
    logger.info(vars(args))
    save_dir = logger.get_savedir()
    logger.info("Written to {}".format(save_dir))
    summary = SummaryLogger(args, logger,