def convert_bag_file(bag_file_path, output_dir, examples_per_file):
    print "Processing bag file:", bag_file_path

    test_proportion = 0.3

    examples = ExampleSet()
    bridge = CvBridge()
    bag = rosbag.Bag(bag_file_path)
    iter_obj = iter(bag.read_messages(topics=["/steering"]))
    topic2, msg, t2 = iter_obj.next()

    topics = bag.get_type_and_topic_info()[1].keys()
    if '/camera/image_rect' in topics:
        camera_topic = '/camera/image_rect'
    elif '/camera/image_raw' in topics:
        camera_topic = '/camera/image_raw'
    else:
        print "ERROR: camera topic not recognized"
        sys.exit(2)

    for topic1, img, t1 in bag.read_messages(topics=[camera_topic]):
        try:
            cv_image = bridge.imgmsg_to_cv2(img, 'bgr8')

            # use lower half of picture
            halfHeight = cv_image.shape[0] // 2
            cv_image = cv_image[halfHeight:, :]

            # resize
            cv_image = cv2.resize(cv_image, (input_shape[1], input_shape[0]))

            while (t1 >= t2):
                try:
                    topic2, msg, t2 = iter_obj.next()
                except StopIteration:
                    print "caught StopIteration for steering message iterator"
                    break

            angle = float(str(msg).split()[-1])
            ex = Example(cv_image, angle)

            if len(examples.train) >= examples_per_file:
                save_next_filename(examples, output_dir)
                examples = ExampleSet()

            L = examples.test if random.random() < test_proportion else examples.train
            L.append(ex)

        except CvBridgeError as e:
            print e

    bag.close()
    save_next_filename(examples, output_dir)
    def batch_generator(isValidation=False):
        gen_epochs = 1 if isValidation else epochs
        for epoch in range(gen_epochs):
            exampleSetFiles = list(exampleSetFiles_const)
            random.shuffle(exampleSetFiles)

            while len(exampleSetFiles) > 0:
                D = []
                while len(exampleSetFiles) > 0 and len(D) < n_examples_to_load:
                    data = ExampleSet.load(
                        os.path.join(exampleSetDir, exampleSetFiles.pop()))
                    D += data.test if isValidation else data.train

                if not isValidation: random.shuffle(D)

                X = format_inputs(D)

                #create output bins
                labels = np.array([defineCategory(ex.angle) for ex in D])

                if not isValidation:
                    for i in range(len(X)):
                        if random.random() < 0.4:  # 40% of images are flipped
                            X[i] = cv2.flip(X[i], 1)
                            labels[i] = labels[i][::-1]

                for i in range(0, len(X), batch_size):
                    xs = X[i:i + batch_size]
                    ys = labels[i:i + batch_size]
                    yield (xs, ys)
    def batch_generator(isValidation = False):
        gen_epochs = 1 if isValidation else epochs
        for epoch in range(gen_epochs):
            exampleSetFiles = list(exampleSetFiles_const)
            random.shuffle(exampleSetFiles)

            while len(exampleSetFiles) > 0:
                D = []
                while len(exampleSetFiles) > 0 and len(D) < n_examples_to_load:
                    data = ExampleSet.load(os.path.join(exampleSetDir, exampleSetFiles.pop()))
                    D += data.test if isValidation else data.train

                if not isValidation: random.shuffle(D)

                X = format_inputs(D)

                # create output bins
                labels = np.array([defineCategory(ex.angle) for ex in D])

                if not isValidation:
                    for i in range(len(X)):
                        if random.random() < 0.4: # 40% of images are flipped
                            X[i] = cv2.flip(X[i], 1)
                            labels[i] = labels[i][::-1]

                for i in range(0, len(X), batch_size):
                    xs = X[i: i + batch_size]
                    ys = labels[i: i + batch_size]
                    yield (xs, ys)
    config = train_profiles[sys.argv[3]]
    epochs = config.epochs
    categories = config.categories

    model = make_model()
    model.summary()

    exampleSetDir = sys.argv[1]
    exampleSetFiles_const = tuple(f for f in os.listdir(exampleSetDir)
                                  if '.pkl.lz4' in f)
    n_training_examples = 0
    n_test_examples = 0
    cnt = collections.Counter()
    for f in exampleSetFiles_const:
        data = ExampleSet.load(os.path.join(exampleSetDir, f))
        n_training_examples += len(data.train)
        n_test_examples += len(data.test)
        for ex in data.train:
            i = np.argmax(defineCategory(ex.angle))
            cnt[i] += 1

    print "total training examples:", n_training_examples
    print "training label counts:", cnt

    def batch_generator(isValidation=False):
        gen_epochs = 1 if isValidation else epochs
        for epoch in range(gen_epochs):
            exampleSetFiles = list(exampleSetFiles_const)
            random.shuffle(exampleSetFiles)
Beispiel #5
0
def main():
    global categories, n_examples_to_load, batch_size

    rospy.init_node("nn_training")

    startTime = time.time()

    model_path = rospy.get_param("~model_output_path")
    exampleSetDir = rospy.get_param("~example_set_dir")
    epochs = int(rospy.get_param("~epochs"))

    categories = rospy.get_param("~positive_nonzero_categories")
    categories = string.strip(categories).split(" ")
    categories = [float(x) for x in categories]
    categories = expand_categories(categories)

    model = make_model()
    model.summary()

    exampleSetFiles_const = tuple(f for f in os.listdir(exampleSetDir)
                                  if '.pkl.lz4' in f)
    n_training_examples = 0
    n_test_examples = 0
    cnt = collections.Counter()
    for f in exampleSetFiles_const:
        data = ExampleSet.load(os.path.join(exampleSetDir, f))
        n_training_examples += len(data.train)
        n_test_examples += len(data.test)
        for ex in data.train:
            i = np.argmax(defineCategory(ex.angle))
            cnt[i] += 1

    print "total training examples:", n_training_examples
    print "training label counts:", cnt

    def batch_generator(isValidation=False):
        gen_epochs = 1 if isValidation else epochs
        for epoch in range(gen_epochs):
            exampleSetFiles = list(exampleSetFiles_const)
            random.shuffle(exampleSetFiles)

            while len(exampleSetFiles) > 0:
                D = []
                while len(exampleSetFiles) > 0 and len(D) < n_examples_to_load:
                    data = ExampleSet.load(
                        os.path.join(exampleSetDir, exampleSetFiles.pop()))
                    D += data.test if isValidation else data.train

                if not isValidation: random.shuffle(D)

                X = format_inputs(D)

                # create output bins
                labels = np.array([defineCategory(ex.angle) for ex in D])

                if not isValidation:
                    for i in range(len(X)):
                        if random.random() < 0.4:  # 40% of images are flipped
                            X[i] = cv2.flip(X[i], 1)
                            labels[i] = labels[i][::-1]

                for i in range(0, len(X), batch_size):
                    xs = X[i:i + batch_size]
                    ys = labels[i:i + batch_size]
                    yield (xs, ys)

    try:
        n_minibatches = int(math.ceil(float(n_training_examples) / batch_size))
        model.fit_generator(batch_generator(),
                            steps_per_epoch=n_minibatches,
                            epochs=epochs,
                            verbose=1)
        print "elapsed time:", time.time() - startTime

        n_minibatches = int(math.ceil(float(n_test_examples) / batch_size))
        loss, acc = model.evaluate_generator(batch_generator(True),
                                             steps=n_minibatches)
        print "validation loss:", loss, "| validation accuracy:", acc

    finally:
        model.save(model_path)
        print "\nsaved model to", model_path
def main():
    global categories, n_examples_to_load, batch_size

    rospy.init_node("nn_training")

    startTime = time.time()

    model_path = rospy.get_param("~model_output_path", None)
    exampleSetDir = rospy.get_param("~example_set_dir", None)
    epochs = int(rospy.get_param("~epochs", None))

    categories = rospy.get_param("~positive_nonzero_categories", None)
    categories = string.strip(categories).split(" ")
    categories = [float(x) for x in categories]
    categories = expand_categories(categories)

    model = make_model()
    model.summary()

    exampleSetFiles_const = tuple(f for f in os.listdir(exampleSetDir) if '.pkl.lz4' in f)
    n_training_examples = 0
    n_test_examples = 0
    cnt = collections.Counter()
    for f in exampleSetFiles_const:
        data = ExampleSet.load(os.path.join(exampleSetDir, f))
        n_training_examples += len(data.train)
        n_test_examples += len(data.test)
        for ex in data.train:
            i = np.argmax(defineCategory(ex.angle))
            cnt[i] += 1

    print "total training examples:", n_training_examples
    print "training label counts:", cnt

    def batch_generator(isValidation = False):
        gen_epochs = 1 if isValidation else epochs
        for epoch in range(gen_epochs):
            exampleSetFiles = list(exampleSetFiles_const)
            random.shuffle(exampleSetFiles)

            while len(exampleSetFiles) > 0:
                D = []
                while len(exampleSetFiles) > 0 and len(D) < n_examples_to_load:
                    data = ExampleSet.load(os.path.join(exampleSetDir, exampleSetFiles.pop()))
                    D += data.test if isValidation else data.train

                if not isValidation: random.shuffle(D)

                X = format_inputs(D)

                # create output bins
                labels = np.array([defineCategory(ex.angle) for ex in D])

                if not isValidation:
                    for i in range(len(X)):
                        if random.random() < 0.4: # 40% of images are flipped
                            X[i] = cv2.flip(X[i], 1)
                            labels[i] = labels[i][::-1]

                for i in range(0, len(X), batch_size):
                    xs = X[i: i + batch_size]
                    ys = labels[i: i + batch_size]
                    yield (xs, ys)

    try:
        n_minibatches = int(math.ceil(float(n_training_examples) / batch_size))
        model.fit_generator(batch_generator(),
                            steps_per_epoch=n_minibatches,
                            epochs=epochs,
                            verbose=1)
        print "elapsed time:", time.time() - startTime

        n_minibatches = int(math.ceil(float(n_test_examples) / batch_size))
        loss, acc = model.evaluate_generator(batch_generator(True), steps=n_minibatches)
        print "validation loss:", loss, "| validation accuracy:", acc

    finally:
        model.save(model_path)
        print "\nsaved model to", model_path