def __init__(self,
                 city_name,
                 avoid_stopping,
                 memory_fraction=0.25,
                 image_cut=[115, 510]):

        #Agent.__init__(self)
        dir_path = os.path.dirname(__file__)
        #self.dropout_vec = [1.0] * 8 + [0.7] * 2 + [0.5] * 2 + [0.5] * 1 + [0.5, 1.] *6
        self.dropout_vec = [1.0] * 8 + [0.7] * 2 + [0.5] * 2 + [0.5] * 1 + [
            0.5, 1.
        ] * 6
        self._image_size = (88, 200, 3)
        self._avoid_stopping = True
        self._image_cut = image_cut
        tf.reset_default_graph()
        config_gpu = tf.ConfigProto(allow_soft_placement=True)
        # GPU to be selected, just take zero , select GPU  with CUDA_VISIBLE_DEVICES
        config_gpu.gpu_options.visible_device_list = '0'
        config_gpu.gpu_options.per_process_gpu_memory_fraction = memory_fraction
        self._sess = tf.Session(config=config_gpu)
        self._models_path = "/home/pankaj/Trainer_module/CARLAILtrainer/models/"  #"D:/outbox/changed_old_trainer/trainer5/models/"#dir_path + '/model/'
        self._sess.run(tf.global_variables_initializer())
        #self.load_model()
        with tf.device('/gpu:0'):
            saver = tf.train.import_meta_graph(self._models_path +
                                               'model.ckpt.meta')
        self._graph = tf.get_default_graph()
        self._input_images = self._graph.get_tensor_by_name('input_image:0')
        self._input_speed = self._graph.get_tensor_by_name('input_speed:0')
        self._dout = self._graph.get_tensor_by_name('dropout:0')
        self._follow_lane = self._graph.get_tensor_by_name(
            'Network/Branch_0/fc_8:0')
        self._left = self._graph.get_tensor_by_name('Network/Branch_1/fc_11:0')
        self._right = self._graph.get_tensor_by_name(
            'Network/Branch_2/fc_14:0')
        self._straight = self._graph.get_tensor_by_name(
            'Network/Branch_3/fc_17:0')
        self._speed = self._graph.get_tensor_by_name(
            'Network/Branch_4/fc_20:0')
        self._intent = self._graph.get_tensor_by_name(
            'Network/Branch_5/fc_23:0')
        with tf.device('/gpu:0'):
            saver.restore(self._sess, self._models_path + 'model.ckpt')
        self._curr_dir = 0
        self.count = 0
        self._enable_manual_control = False
        self._is_on_reverse = False
        #pygame.init()
        #self._display = pygame.display.set_mode(
        #        (WINDOW_WIDTH, WINDOW_HEIGHT),
        #                        pygame.HWSURFACE | pygame.DOUBLEBUF)
        self.command_follower = CommandFollower(city_name)
        self.traffic_light_infraction = False
Example #2
0
def make_controlling_agent(args, town_name):
    """ 
    Make the controlling agent object depending on what was selected.
    Options:
    Forward Agent: Trivial agent that just accelerate forward.
    Human Agent: Agent controlled by a human driver, currently only by keyboard.
    Command Follower: A* planner followed by a PID controller (used as expert for data collection)
    """

    if args.controlling_agent == "ForwardAgent":
        return ForwardAgent()
    elif args.controlling_agent == "HumanAgent":
        return HumanAgent()
    elif args.controlling_agent == "CommandFollower":
        return CommandFollower(town_name)
    else:
        raise ValueError("Selected Agent Does not exist")
Example #3
0
def make_controlling_agent(args, town_name):
    """ Make the controlling agent object depending on what was selected.
        Right now we have the following options:
        Forward Agent: A trivial agent that just accelerate forward.
        Human Agent: An agent controlled by a human driver, currently only by keyboard.
    """

    if args.controlling_agent == "ForwardAgent":
        return ForwardAgent()
    elif args.controlling_agent == "HumanAgent":
        # TDNextPR: Add parameters such as joysticks to the human agent.
        return HumanAgent()
    elif args.controlling_agent == "CommandFollower":
        return CommandFollower(town_name)
    elif args.controlling_agent == 'LaneFollower':
        return LaneFollower(town_name)
    else:
        raise ValueError("Selected Agent Does not exist")