예제 #1
0
def train(cfg, tub_names, new_model_path, base_model_path=None):
    """
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    """
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    new_model_path = os.path.expanduser(new_model_path)

    kl = KerasLinear()
    if base_model_path is not None:
        base_model_path = os.path.expanduser(base_model_path)
        kl.load(base_model_path)

    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(X_keys, y_keys,
                                                    batch_size=cfg.BATCH_SIZE,
                                                    train_frac=cfg.TRAIN_TEST_SPLIT)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=new_model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #2
0
def train(cfg, tub_names, new_model_path, base_model_path=None):
    """
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    """
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    new_model_path = os.path.expanduser(new_model_path)

    kl = KerasLinear()
    if base_model_path is not None:
        base_model_path = os.path.expanduser(base_model_path)
        kl.load(base_model_path)

    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')

    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT)
    print(train_gen[0])
    print(type(train_gen))
    print(len(train_gen))
예제 #3
0
파일: manage.py 프로젝트: mkaiserpm/robopi
def train(cfg, tub_names, model_name):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    def rt(record):
        record['user/angle'] = dk.utils.linear_bin(record['user/angle'])
        return record

    kl = KerasCategorical()
    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(X_keys, y_keys, record_transform=rt,
                                                    batch_size=cfg.BATCH_SIZE,
                                                    train_frac=cfg.TRAIN_TEST_SPLIT)

    model_path = os.path.expanduser(model_name)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #4
0
def my_train(cfg, tub_names, model_name):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    def rt(record):
        record['user/angle'] = dk.utils.linear_bin(record['user/angle'])
        return record


    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(X_keys, y_keys, record_transform=rt,
                                                    batch_size=cfg.BATCH_SIZE,
                                                    train_frac=cfg.TRAIN_TEST_SPLIT)

    model_path = os.path.expanduser(model_name)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    train(train_gen,
             val_gen,
             saved_model_path=model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #5
0
def train(cfg, tub_names, new_model_path, base_model_path=None):
    """
    引数 tub_names 似て指定されたパスに格納されている tub データを学習データとして
    トレーニングを行い、引数 new_model_path にて指定されたパスへ学習済みモデルファイルを格納する。
    引数:
        cfg                個別車両設定オブジェクト、`config.py`がロードされたオブジェクト。
        tub_names          学習データとして使用するtubディレクトリのパスを指定する。
        new_model_path     トレーニング後モデルファイルとして保管するパスを指定する。
        base_model_path    ファインチューニングを行う場合、ベースとなるモデルファイルを指定する。
    戻り値
        なし
    """

    # モデルの入力データとなる項目
    X_keys = ['cam/image_array']
    # モデルの出力データとなる項目
    y_keys = ['user/angle', 'user/throttle']

    # トレーニング後モデルファイルとして保管するパスをフルパス化
    new_model_path = os.path.expanduser(new_model_path)

    # トレーニング後モデルファイルとして保管するパスをフルパス化
    kl = KerasLinear()
    # ファインチューニングを行う場合は base_model_path にベースモデルファイルパスが指定されている
    if base_model_path is not None:
        # ベースモデルファイルパスをフルパス化
        base_model_path = os.path.expanduser(base_model_path)
        # ベースモデルファイルを読み込む
        kl.load(base_model_path)

    print('tub_names', tub_names)
    # 引数tub_names 指定がない場合
    if not tub_names:
        # config.py 上に指定されたデータファイルパスを使用
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    # Tub データ群をあらわすオブジェクトを生成
    tubgroup = TubGroup(tub_names)
    # トレーニングデータGenerator、評価データGeneratorを生成
    train_gen, val_gen = tubgroup.get_train_val_gen(X_keys, y_keys,
                                                    batch_size=cfg.BATCH_SIZE,
                                                    train_frac=cfg.TRAIN_TEST_SPLIT)

    # 全学習データ件数を取得
    total_records = len(tubgroup.df)
    # トレーニングデータ件数の取得
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    # 評価データ件数の取得
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    # 1epochごとのステップ数の取得
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    # トレーニングの開始
    kl.train(train_gen,
             val_gen,
             saved_model_path=new_model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #6
0
def train(cfg,
          tub_names,
          new_model_path,
          base_model_path=None,
          model_class=''):
    """
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    """
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    new_model_path = os.path.expanduser(new_model_path)

    import tensorflow as tf
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True  # dynamically grow the memory used on the GPU
    # config.log_device_placement=True
    sess = tf.Session(config=config)
    tf.keras.backend.set_session(sess)

    model_module, model_class_name = model_class.rsplit('.', 1)
    print("loading {} from {}...".format(model_class_name, model_module))

    module = importlib.import_module(model_module)
    model_cls = getattr(module, model_class_name)

    kl = model_cls()
    if base_model_path is not None:
        base_model_path = os.path.expanduser(base_model_path)
        kl.load(base_model_path)

    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)

    top_view_transform = TopViewTransform(cfg.CAMERA_RESOLUTION)
    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT,
        train_record_transform=top_view_transform,
        val_record_transform=top_view_transform)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=new_model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #7
0
    def plot_predictions(self, cfg, tub_paths, model_path):
        """
        Plot model predictions for angle and throttle against data from tubs.

        """
        from donkeycar.parts.datastore import TubGroup
        from donkeycar.parts.keras import KerasLinear
        import pandas as pd
        from matplotlib import pyplot as plt

        tg = TubGroup(tub_paths)

        model_path = os.path.expanduser(model_path)
        model = KerasLinear()
        model.load(model_path)

        gen = tg.get_batch_gen(None,batch_size=len(tg.df),record_transform=None,shuffle=True, df=tg.df)
        arr = next(gen)

        user_angles = []
        user_throttles = []
        pilot_angles = []
        pilot_throttles = []

        for tub in tg.tubs:
            num_records = tub.get_num_records()
            for iRec in tub.get_index(shuffled=False):
                record = tub.get_record(iRec)

                img = record["cam/image_array"]
                user_angle = float(record["user/angle"])
                user_throttle = float(record["user/throttle"])
                pilot_angle, pilot_throttle = model.run(img)

                user_angles.append(user_angle)
                user_throttles.append(user_throttle)
                pilot_angles.append(pilot_angle)
                pilot_throttles.append(pilot_throttle)

        angles_df = pd.DataFrame({'user_angle': user_angles, 'pilot_angle': pilot_angles})
        throttles_df = pd.DataFrame({'user_throttle': user_throttles, 'pilot_throttle': pilot_throttles})

        fig = plt.figure()

        title = "Model Predictions\nTubs: {}\nModel: {}".format(tub_paths, model_path)
        fig.suptitle(title)

        ax1 = fig.add_subplot(211)
        ax2 = fig.add_subplot(212)

        angles_df.plot(ax=ax1)
        throttles_df.plot(ax=ax2)

        ax1.legend(loc=4)
        ax2.legend(loc=4)

        plt.show()
예제 #8
0
파일: train.py 프로젝트: ida-zrt/mycar
def train(cfg, tub_names, new_model_path, base_model_path=None):
    """
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    """
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    def train_record_transform(record):
        """ convert categorical steering to linear and apply image augmentations """
        record['user/angle'] = dk.util.data.linear_bin(record['user/angle'])
        # TODO add augmentation that doesn't use opencv
        return record

    def val_record_transform(record):
        """ convert categorical steering to linear """
        record['user/angle'] = dk.util.data.linear_bin(record['user/angle'])
        return record

    new_model_path = os.path.expanduser(new_model_path)

    kl = KerasCategorical()
    if base_model_path is not None:
        base_model_path = os.path.expanduser(base_model_path)
        kl.load(base_model_path)

    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        train_record_transform=train_record_transform,
        val_record_transform=val_record_transform,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=new_model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)

    kl.model.save(new_model_path + '.h5', save_format='h5')
예제 #9
0
파일: base.py 프로젝트: cfox570/dk
    def show_histogram(self, tub_paths, record_name, out):
        '''
        Produce a histogram of record type frequency in the given tub
        '''
        from matplotlib import pyplot as plt
        from donkeycar.parts.datastore import TubGroup

        output = out or os.path.basename(tub_paths)
        tg = TubGroup(tub_paths=tub_paths)

        if record_name is not None:
            tg.df[record_name].hist(bins=50)
        else:
            tg.df.hist(bins=50)

        try:
            if out is not None:
                filename = output
            else:
                if record_name is not None:
                    filename = output + '_hist_%s.png' % record_name.replace(
                        '/', '_')
                else:
                    filename = output + '_hist.png'
            plt.savefig(filename)
            print('saving image to:', filename)
        except Exception as e:
            print(e)
        plt.show()
예제 #10
0
def train(cfg, tub_names, model_name):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    #use these offsets from the current frame as points to learn the future
    #steering values.
    frames = [0, 20, 40, 120]

    new_y_keys = []
    for iFrame in frames:
        for key in y_keys:
            new_y_keys.append(key + "_" + str(iFrame))

    y_keys = new_y_keys
    
    kl = KerasLinear(num_outputs=len(y_keys))

    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(X_keys, y_keys,
                                                    batch_size=cfg.BATCH_SIZE,
                                                    train_frac=cfg.TRAIN_TEST_SPLIT)
예제 #11
0
def train(cfg, tub_names, model_name, resnet50, tensorboardlog_path):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    def rt(record):
        record['user/angle'] = dk.utils.linear_bin(record['user/angle'])
        return record

    if resnet50:
        print('Using resnet50')
        kl = KerasCategorical(cfg=cfg, modelType=ModelType.RESNET50)
    else:
        print('Using categorical')
        kl = KerasCategorical(cfg=cfg)

    if tensorboardlog_path:
        kl.activate_tensorboard_log(tensorboardlog_path)

    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        record_transform=rt,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT)

    model_path = os.path.expanduser(model_name)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #12
0
파일: base.py 프로젝트: cyrilix/donkey
    def plot_predictions(cfg, tub_paths, model_path):
        '''
        Plot model predictions for angle and throttle against data from tubs.

        '''
        from donkeycar.parts.datastore import TubGroup
        from donkeycar.parts.keras import KerasCategorical

        tg = TubGroup(tub_paths)

        model_path = os.path.expanduser(model_path)
        model = KerasCategorical()
        model.load(model_path)

        gen = tg.get_batch_gen(batch_size=len(tg.df), shuffle=False)
        arr = next(gen)
        """
예제 #13
0
    def test_angle_distribution_large_speed(self):
        filter_method = drive_record_filter_include_all
        tubgroup = TubGroup("../data/log_w_6,../data/log_w_7")
        dataset = DriveDataSet.from_tubgroup(tubgroup.df,
                                             filter_method=filter_method,
                                             fake_image=True)

        plt = Plot.angle_distribution(dataset.angles())
        plt.savefig("angle/angle_distribution_original_faster_max_speed.jpg")
예제 #14
0
def train(cfg, tub_names, model_name):

    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    def rt(record):
        record['user/angle'] = donkeycar.utils.utils.linear_bin(
            record['user/angle'])
        return record

    def combined_gen(gens):
        import itertools
        combined_gen = itertools.chain()
        for gen in gens:
            combined_gen = itertools.chain(combined_gen, gen)
        return combined_gen

    kl = KerasCategorical()
    logger.info('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        record_transform=rt,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT)

    model_path = os.path.expanduser(model_name)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    logger.info('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    logger.ino('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #15
0
def train(cfg, tub_names, model_name):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    #use these offsets from the current frame as points to learn the future
    #steering values.
    frames = [0, 20, 40, 120]

    new_y_keys = []
    for iFrame in frames:
        for key in y_keys:
            new_y_keys.append(key + "_" + str(iFrame))

    y_keys = new_y_keys

    kl = KerasLinear(num_outputs=len(y_keys))

    tubgroup = TubGroup(tub_names)
    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT)

    model_path = os.path.expanduser(model_name)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    kl.train(train_gen,
             val_gen,
             saved_model_path=model_path,
             steps=steps_per_epoch,
             train_split=cfg.TRAIN_TEST_SPLIT)
예제 #16
0
def train(cfg, tub_names, model_name):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''
    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    def rt(record):
        record['user/angle'] = dk.utils.linear_bin(record['user/angle'])
        return record
    
    
    tubgroup = TubGroup(tub_names)
    model_path = os.path.expanduser(model_name)
    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)
    
    if (hasattr(cfg, 'ENGINE') and cfg.ENGINE == "mxnet"):
        import donkeycar.parts.mxnetpart as mxp
        df = tubgroup.df
        train_df = train=df.sample(frac=cfg.TRAIN_TEST_SPLIT,random_state=200)
        val_df = df.drop(train_df.index)
        m1 = mxp.MxnetLinear()
        train_iter, val_iter = m1.get_train_val_iter(train_df, val_df, cfg.BATCH_SIZE)
        m1.train(train_iter, val_iter, saved_model_path=model_path, steps=steps_per_epoch, train_split=cfg.TRAIN_TEST_SPLIT)
    else:
        kl = KerasCategorical()
        train_gen, val_gen = tubgroup.get_train_val_gen(X_keys, y_keys, record_transform=rt,
                                                    batch_size=cfg.BATCH_SIZE,
                                                    train_frac=cfg.TRAIN_TEST_SPLIT)
        kl.train(train_gen,
         val_gen,
         saved_model_path=model_path,
         steps=steps_per_epoch,
         train_split=cfg.TRAIN_TEST_SPLIT)
예제 #17
0
    def show_histogram(self, tub_paths, record_name):
        """
        Produce a histogram of record type frequency in the given tub
        """
        from matplotlib import pyplot as plt
        from donkeycar.parts.datastore import TubGroup

        tg = TubGroup(tub_paths)
        if record_name is not None:
            tg.df[record_name].hist(bins=50)
        else:
            tg.df.hist(bins=50)
        plt.show()
예제 #18
0
파일: base.py 프로젝트: vivekchand/donkey
    def show_histogram(self, tub_paths, record_name):
        '''
        Produce a histogram of record type frequency in the given tub
        '''
        from matplotlib import pyplot as plt
        from donkeycar.parts.datastore import TubGroup

        tg = TubGroup(tub_paths=tub_paths)
        if record_name is not None:
            tg.df[record_name].hist(bins=50)
        else:
            tg.df.hist(bins=50)

        plt.savefig(
            os.path.basename(model_path) + '_hist_%s.png' % record_name)
        plt.show()
예제 #19
0
def estimate_frames():
    args = docopt(__doc__)
    tubs = args["--tub"]
    tg = TubGroup(tubs)
    #print(tg.df)
    model_path = "C:\\Users\\moritz\\Documents\\donkeycar\\d2\\mymodel_tmp2"
    model = keras.models.load_model(model_path)

    for index, frame in tg.df.iterrows():

        img = PIL.Image.open(frame["cam/image_array"])
        arr = np.array(img)
        n = np.uint8(arr)
        img_arr = n.reshape((1, ) + n.shape)
        angle_binned, throttle = model.predict(np.uint8(img_arr))
        angle_unbinned = dk.utils.linear_unbin(angle_binned)
        print("angle_unbinned", angle_unbinned, "throttle", throttle)
        print("user/angle", frame["user/angle"], "user/throttle",
              frame["user/throttle"])
예제 #20
0
def create_real_dataset(filter_method):
    tubgroup = TubGroup(
        "data/log_20,data/log_21,data/log_23,data/log_1,data/log_2,data/log_3,data/log_4,data/log_w_6,data/log_w_7"
    )

    print("splitting train / validation 0.9/0.1")

    train_df = tubgroup.df.sample(frac=0.9, random_state=200)
    val_df = tubgroup.df.drop(train_df.index)

    train_data_set = DriveDataSet.from_tubgroup(train_df,
                                                filter_method=filter_method,
                                                fake_image=False)
    val_data_set = DriveDataSet.from_tubgroup(val_df,
                                              filter_method=filter_method,
                                              fake_image=False)

    print("dataset created")

    return train_data_set, val_data_set
예제 #21
0
def augment(tub_names, new_data_dir, args):
    new_data_dir = os.path.expanduser(new_data_dir)

    tubgroup = TubGroup(tub_names)

    # If tub directory does not exist, create directory
    if not os.path.exists(new_data_dir):
        os.makedirs(new_data_dir)

    # If directory does not contain meta.json, copy one from the first source tub
    if not os.path.exists(os.path.join(new_data_dir, 'meta.json')):
        copyfile(src=tubgroup.tubs[0].meta_path,
                 dst=os.path.join(new_data_dir, 'meta.json'))

    new_tub = Tub(new_data_dir)

    for tub in tubgroup.tubs:
        for ix in tub.get_index(shuffled=False):
            record = tub.get_record(ix)
            for augmented_record in augment_single_record(record, args):
                new_tub.put_record(augmented_record)
예제 #22
0
def clean_slow_frames():
    args = docopt(__doc__)
    tubs = args["--tub"]
    tg = TubGroup(tubs)
    print(tg.df)
    tg.df[tg.df["user/throttle"] < 0.1]
    json_files_to_clean = tg.df[
        tg.df["user/throttle"] < 0.1]["cam/image_array"].map(
            lambda x: "\\".join(x.split("\\")[:-1]) + "\\" + "record_" + x.
            replace("_cam-image_array_.jpg", "").rsplit("\\")[-1] + ".json")
    images_to_clean = tg.df[tg.df["user/throttle"] < 0.1]["cam/image_array"]
    for file in json_files_to_clean:
        f = "C:\\Users\\moritz\\Documents\\donkeycar\\d2\\data\\tmp\\" + file.split(
            "\\")[-2] + file.split("\\")[-2]
        if not os.path.exists(f):
            os.mkdir(f)
        shutil.move(file, f)
    for file in images_to_clean:
        f = "C:\\Users\\moritz\\Documents\\donkeycar\\d2\\data\\tmp\\" + file.split(
            "\\")[-2] + file.split("\\")[-2]
        if not os.path.exists(f):
            os.mkdir(f)
        shutil.move(file, f)
예제 #23
0
def test_tubgroup_types(tubs):
    """ Get TubGroup types """
    list_of_tubs = tubs[1]
    str_of_tubs = ','.join(list_of_tubs)
    t = TubGroup(str_of_tubs)
    assert sorted(t.types) == sorted(['image_array', 'float', 'float'])
예제 #24
0
 def create_real_dataset(filter_method):
     tubgroup = TubGroup("../data/aws")
     return DriveDataSet.from_tubgroup(tubgroup.df,
                                       filter_method=filter_method,
                                       fake_image=False)
예제 #25
0
파일: train.py 프로젝트: AM5800/picar
import keras
from donkeycar.parts.datastore import TubGroup
from model import create_model

verbose = 2
train_split = .8

X_keys = ['cam/image_array']
y_keys = ['user/angle']

tubgroup = TubGroup(
    "..\..\..\picar_sync\wide-36.6-day,..\..\..\picar_sync\wide-36.6-evening-ccw-recovery"
)
train_gen, val_gen = tubgroup.get_train_val_gen(X_keys,
                                                y_keys,
                                                batch_size=10,
                                                train_frac=train_split)

save_best = keras.callbacks.ModelCheckpoint('model_{val_loss:.4f}.hdf5',
                                            monitor='val_loss',
                                            verbose=verbose,
                                            save_best_only=True,
                                            mode='min')

early_stop = keras.callbacks.EarlyStopping(monitor='val_loss',
                                           min_delta=.0005,
                                           patience=5,
                                           verbose=verbose,
                                           mode='auto')
steps_per_epoch = 100
예제 #26
0
def playback(cfg, tub_names, model_name=None):
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)
    
    if not model_name is None:
        from donkeycar.parts.keras import KerasCategorical
        kl = KerasCategorical()
        kl.load(model_name)
        pilot_angles = []
        pilot_throttles  = []
        
    
    
    print('tub_names', tub_names)
    
    tub_paths = utils.expand_path_arg(tub_names)
    print('TubGroup:tubpaths:', tub_paths)
    user_angles = []
    user_throttles  = []
    
        
    tubs = [Tub(path) for path in tub_paths]
    for tub in tubs:
            num_records = tub.get_num_records()
            print(num_records)
            for iRec in tub.get_index(shuffled=False):
                record = tub.get_record(iRec)
            #record = tubs.get_record(random.randint(1,num_records+1))
                img = record["cam/image_array"]
                user_angle = float(record["user/angle"])
                user_throttle = float(record["user/throttle"])
                user_angles.append(user_angle)
                user_throttles.append(user_throttle)
                if not model_name is None:
                    pilot_angle, pilot_throttle = kl.run(img)
                    pilot_angles.append(pilot_angle)
                    pilot_throttles.append(pilot_throttle)
            
    
    record = tubs[0].get_record(random.randint(1,num_records+1))
    user_angle = float(record["user/angle"])
    user_throttle = float(record["user/throttle"])
    print(img.shape)
    print('-----')
    print(user_angle)
    print(user_throttle)
    plt.figure()
    plt.imshow(img)
    plt.plot([80,80+10*user_throttle*np.cos(user_angle)],[120,120+100*user_throttle*np.sin(user_angle)])
    
    plt.figure()
    plt.plot(user_angles)
    plt.plot(user_throttles)
    
    
    
    
    fig = plt.figure()
    ax1 = plt.subplot2grid((2,2),(0,0))
    record = tubs[0].get_record(1)
    img = record["cam/image_array"]
    imPlot = ax1.imshow(img,animated=True)
    
    
    floorImg = lookAtFloorImg(img)
    print(floorImg)
    ax3 = plt.subplot2grid((2,2),(0,1))
    imPlot2 = ax3.imshow(floorImg,animated=True)
    
    ax2 = plt.subplot2grid((2,2),(1,0), colspan=2)
    line1, = ax2.plot(user_angles)
    line2, = ax2.plot(user_throttles)
    if not model_name is None:
        line4, = ax2.plot(pilot_angles)
        line5, = ax2.plot(pilot_throttles)
    line3, = ax2.plot([0,0],[-1,1])
    
    
    
    def animate(i):
        record = tubs[0].get_record(i)
        img = record["cam/image_array"]
        imPlot.set_array(img)
        imPlot2.set_array(lookAtFloorImg(img))
        line3.set_data([i,i],[-1,1])
        #print(i)
        #sys.stdout.flush()
        return imPlot,


    # Init only required for blitting to give a clean slate.
    def init():
        record = tubs[0].get_record(1)
        img = record["cam/image_array"]
        imPlot.set_array(img)
        line3.set_data([0,0],[-1,1])
        return imPlot,
    
    
    
    ani = animation.FuncAnimation(fig, animate, np.arange(1, tubs[0].get_num_records()),
                              interval=100, blit=False)
                              
    plt.show()
예제 #27
0
            self.model.predict(image_ready[None, :, :, :], batch_size=1))
        fig = plt.figure(figsize=(6, 4))
        show(fig, (1, 1, 1),
             "Predicted steering angle : {}".format(steering_angle), image)
        savefig(parameter['saved_images_folder'] + name)

    def load_N_make_prediction(self, path, name):
        image = load_image(path[0])
        self.make_prediction(image, name)


if __name__ == "__main__":

    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']
    tubgroup = TubGroup("./data/tub_29_18-09-09")
    train_gen, val_gen = tubgroup.get_train_val_gen(X_keys,
                                                    y_keys,
                                                    batch_size=32,
                                                    train_frac=0.8)
    print("Fetching data ...")
    steering_angles = []
    for _, user_data in train_gen:
        steering_angles.append(user_data[0])

    print("%f frames.".format(len(steering_angles)))

    if 0:
        i = get_random_image_id(image_paths)
        save_3_views(image_paths[i], steering_angles[i], '3views.png')
예제 #28
0
def train(cfg, tub_names, model_name, model_type):
    '''
    use the specified data in tub_names to train an artifical neural network
    saves the output trained model as model_name
    '''

    X_keys = ['cam/image_array']
    y_keys = ['user/angle', 'user/throttle']

    binning = dk.utils.linear_bin
    if model_type == "hres_cat":
        binning = dk.utils.linear_bin_hres

    def rt(record):
        record['user/angle'] = binning(record['user/angle'])
        return record

    kl = KerasCategorical()
    if model_type == 'linear':
        kl = KerasLinear()
    if model_type == 'categorical':
        kl = KerasCategorical()
    if model_type == 'hres_cat':
        kl = KerasHresCategorical()
    print('tub_names', tub_names)
    if not tub_names:
        tub_names = os.path.join(cfg.DATA_PATH, '*')
    tubgroup = TubGroup(tub_names)

    train_gen, val_gen = tubgroup.get_train_val_gen(
        X_keys,
        y_keys,
        record_transform=rt,
        batch_size=cfg.BATCH_SIZE,
        train_frac=cfg.TRAIN_TEST_SPLIT)

    if model_type == 'linear':
        train_gen, val_gen = tubgroup.get_train_val_gen(
            X_keys,
            y_keys,
            batch_size=cfg.BATCH_SIZE,
            train_frac=cfg.TRAIN_TEST_SPLIT)

    model_path = os.path.expanduser(model_name)

    total_records = len(tubgroup.df)
    total_train = int(total_records * cfg.TRAIN_TEST_SPLIT)
    total_val = total_records - total_train
    print('train: %d, validation: %d' % (total_train, total_val))
    steps_per_epoch = total_train // cfg.BATCH_SIZE
    print('steps_per_epoch', steps_per_epoch)

    print(val_gen)
    history, save_best = kl.train(train_gen=train_gen,
                                  val_gen=val_gen,
                                  saved_model_path=model_path,
                                  steps=steps_per_epoch,
                                  train_split=cfg.TRAIN_TEST_SPLIT)

    plt.plot(history.history['loss'])
    plt.plot(history.history['val_loss'])
    plt.title('model loss : %f' % save_best.best)
    plt.ylabel('loss')
    plt.xlabel('epoch')
    plt.legend(['train', 'test'], loc='upper left')
    plt.savefig(model_path + '_' + model_type +
                '_loss_%f.png' % save_best.best)
예제 #29
0
def test_tubgroup_inputs(tubs):
    """ Get TubGroup inputs """
    list_of_tubs = tubs[1]
    str_of_tubs = ','.join(list_of_tubs)
    t = TubGroup(str_of_tubs)
    assert sorted(t.inputs) == sorted(['cam/image_array', 'angle', 'throttle'])
예제 #30
0
def test_tubgroup_get_num_records(tubs):
    """ Get number of records in TubGroup """
    list_of_tubs = tubs[1]
    str_of_tubs = ','.join(list_of_tubs)
    t = TubGroup(str_of_tubs)
    assert t.get_num_records() == 25
예제 #31
0
def test_tubgroup_load(tubs):
    """ Load TubGroup from existing tubs dir """
    list_of_tubs = tubs[1]
    str_of_tubs = ','.join(list_of_tubs)
    t = TubGroup(str_of_tubs)
    assert t is not None