def initialize_custom_reset(self, alternate_route): """ resets the environment with a custom route for the agent :param alternate_route: list: list of alternate route nodes for car agent :return state: list: initial state of agent """ # initialize the car and light state objects init_car_state = self.car_init_method(self.N, self.axis, car_id=self.agent, alternate_route=alternate_route) self.cars_object = Cars(init_state=init_car_state, axis=self.axis) if self.animate: # init animator self.animator = Animator(fig=self.fig, ax=self.ax, cars_object=self.cars_object, lights_object=self.lights_object, num=self.num) stateview = self.refresh_stateview() state = stateview.determine_state()[0] state = state.index(True) return state
def __init__(self, n, graph, agent, dt, animate=False): """ initializes an environment for a car in the system :param n: int: number of cars to simulate :param graph: OGraph object from :param agent: int: the ID of the car (agent) :param animate: bool: if the environment is to be animated while learning """ self.N = n self.num = None self.graph = graph self.fig, self.ax = self.graph.fig, self.graph.ax self.agent = agent self.dt = dt self.animate = animate self.animator = None self.axis = self.ax.axis() self.route_times = [] self.car_init_method = sim.init_random_node_start_location # self.car_init_method = sim.init_culdesac_start_location self.light_init_method = sim.init_traffic_lights # self.car_init_method = convergent_learner.init_custom_agent # self.light_init_method = convergent_learner.init_custom_lights self.cars_object = Cars(self.car_init_method(self.N, self.graph), self.graph) self.lights_object = TrafficLights( self.light_init_method(self.graph, prescale=40), self.graph) self.high = 10 self.low = 2 self.shortest_route_thresh = 5
def __init__(self, n, fig, ax, agent, dt, animate=False): """ initializes an environment for a car in the system :param n: int: number of cars to simulate :param fig: figure: from matplotlib :param ax: axis: from matplotlib :param agent: int: the ID of the car (agent) :param animate: bool: if the environment is to be animated while learning """ self.N = n self.num = None self.fig = fig self.ax = ax self.agent = agent self.dt = dt self.animate = animate self.animator = None self.axis = self.ax.axis() self.route_times = [] # self.car_init_method = sim.init_culdesac_start_location # self.light_init_method = sim.init_traffic_lights self.car_init_method = convergent_learner.init_custom_agent self.light_init_method = convergent_learner.init_custom_lights self.cars_object = Cars(self.car_init_method(self.N, self.axis), self.axis) self.lights_object = TrafficLights( self.light_init_method(self.axis, prescale=40), self.axis) self.high = 10 self.low = 2 self.shortest_route_thresh = 5
def __init__(self, drones_positions, cars_inits): self.steps_left = TIME_LENGTH # création des voitures self.cars = [Cars(car_init) for car_init in cars_inits] # création des drones self.drones = [Drones(drone_z, 0) for drone_z in drones_positions] # la visibilité au niveau env est le cumul des visibiités des drones self.visibilities = [np.zeros((SIZE_X, SIZE_Y)) < 0] # pour se lier avec la librairie Acme self.spec = Environment_spec( number_of_drones=len(drones_positions), number_of_cars=len(cars_inits) )
def reset(self, num): """ resets the environment :param num: tuple: int, int :return state: int """ # initialize cars every reset init_cars = self.car_init_method(self.N, self.axis) self.cars_object = Cars(init_state=init_cars, graph=self.graph) stateview = self.refresh_stateview() state = stateview.determine_state()[0] state = state.index(True) if self.animate: # init animator self.num = num self.animator = Animator(fig=self.fig, ax=self.ax, cars_object=self.cars_object, lights_object=self.lights_object, num=self.num) return state
def addCustomer(customer): car_list = Cars() car_list.get_available_cars() car_number = input("Enter number of car, which you want to take: ") hours = input("And enter hours for rent: ") while car_number is not '' and hours is not '': if car_list.is_available(car_number): car = car_list.set_unavailable_cars(str(car_number)) customer.add_customer_cars(car, hours) car_list.get_available_cars() # print(car_number + " " + json.dumps(car, indent=4)) else: print("This cart is unavailable, please choose another car!") car_number = input( "If you want to get another car enter its number, or just press Enter: " ) if car_number is '': break hours = input("And enter hours for rent: ") return customer.get_customer_cars()
# G = ox.load_graphml('data/sanfrancisco.graphml') # G = ox.project_graph(G) # fig, ax = ox.plot_graph(G, fig_height=12, fig_width=10, node_size=0, edge_linewidth=0.5) # ax.set_title('San Francisco, California') """Piedmont, California""" G = ox.load_graphml('piedmont.graphml') G = ox.project_graph(G) fig, ax = ox.plot_graph(G, node_size=0, edge_linewidth=0.5, show=False) ax.set_title('Piedmont, California') # grab the dimensions of the figure axis = ax.axis() """ initialize the car and light state objects """ N = 33 # cars # cars = Cars(sim.init_culdesac_start_location(N, axis), axis) cars = Cars(sim.init_random_node_start_location(N, axis), axis) lights = TrafficLights(sim.init_traffic_lights(axis, prescale=40), axis) """ for an example of learning using a single, convergent learner, initialize the sim using these cars and lights: """ # cars = Cars(cl.init_custom_agent(n=1, fig_axis=axis), axis=axis) # lights = TrafficLights(cl.init_custom_lights(fig_axis=axis, prescale=None), axis) # initialize the Animator animator = Animator(fig=fig, ax=ax, cars_object=cars, lights_object=lights, num=(1, 10), n=N) init = animator.reset animate = animator.animate
from cars import Cars if __name__ == "__main__": Cars(10)
else: connection.close() elif command == '3': result = red_light.get_state() print(result) connection.sendall(str.encode(str(result))) else: result = 'Not authorised' connection.sendall(bytes(result, 'utf-8')) connection.close() if __name__ == "__main__": global red_light global user_id red_light = AccessPoint() user_id = Cars() while True: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as SystemSideSocket: try: SystemSideSocket.bind((HOST, PORT)) except socket.error as e: print(e) print('System Socket is listenning..') SystemSideSocket.listen(1) while True: client, address = SystemSideSocket.accept() print('Connected to: ' + address[0] + ':' + str(address[1])) _thread.start_new_thread(con_handler, (client, )) SystemSideSocket.close()
from turtle import Screen from player import Player from scoreboard import Scoreboard from cars import Cars from time import sleep screen = Screen() screen.colormode(255) screen.setup(width=600, height=600) screen.listen() screen.tracer(0) player = Player() scoreboard = Scoreboard() cars = Cars() screen.onkey(player.moveup, 'w') screen.onkey(player.moveup, 'Up') screen.onkey(player.movedown, 's') screen.onkey(player.movedown, 'Down') screen.onkey(player.moveright, 'Right') screen.onkey(player.moveright, 'd') screen.onkey(player.moveleft, 'a') screen.onkey(player.moveleft, 'Left') level = 1 game_on = True while game_on: screen.update() scoreboard.update_scoreboard(level) cars.move_car(level) if player.ycor() >= 285:
from turtle import Turtle, Screen from cars import Cars, car_list from racer import Racer from finishline import RaceLine from levels import Level # a turtle crossing game. Get the turtle to the finish line without hitting a car. Move forward with spacebar turtle.listen() # generate screen screen = Screen() screen.setup(height=600, width=600) screen.tracer(0) # generate game objects car = Cars() racer_turtle = Racer() finish_line = RaceLine(250) start_line = RaceLine(-250) level_start = Level() # instructions = level_start.instructions() # start game game_on = True while game_on: time.sleep(level_start.time_set) screen.update() for car in car_list: car.move() # generate one car after previous reaches a given coordinate if car_list[-1].xcor() in range(200, 250): car = Cars()
from osm_request import OGraph from tqdm import tqdm import sys # ask the user for a geo-codable location query = input( 'Please input a geo-codable place, like "Harlem, NY" or "Kigali, Rwanda": ' ) # get OGraph object graph = OGraph(query, save=True) # initialize the car and light state objects N = int(input('Number of cars to simulate: ')) # cars = Cars(sim.init_culdesac_start_location(N, graph), graph) cars = Cars(sim.init_random_node_start_location(N, graph), graph) lights = TrafficLights(sim.init_traffic_lights(graph, prescale=15), graph) """ for an example of learning using a single, convergent learner, initialize the sim using these cars and lights: """ # cars = Cars(cl.init_custom_agent(n=1, fig_axis=axis), axis=axis) # lights = TrafficLights(cl.init_custom_lights(fig_axis=axis, prescale=None), axis) # time of simulation (in seconds) duration = int(input('Duration of time to simulate (in seconds): ')) frames_per_second = 60 n_frames = duration * frames_per_second # initialize the Animator animator = Animator(fig=graph.fig, ax=graph.ax, cars_object=cars, lights_object=lights,
def setUp(self): print('setUp') self.toyota = Cars('Toyota', 'Supra', 236000000000)
from cars import Cars from electric_car_m import ElectricCar my_car = ElectricCar('tesla', 'model s', 2017) print(my_car.summary()) my_car.battery.read_battery() your_car = Cars('toyoya', 'prius', 2000) print(your_car.summary()) your_car.odometer_reading()
from turtle import Screen from time import sleep from player import Player from cars import Cars from random import randint from score import Levels screen = Screen() screen.setup(width=600, height=600) screen.tracer(0) screen.listen() player1 = Player() cars = [] for _ in range(40): cars.append(Cars(randint(-240, 240))) score_levels = Levels() screen.onkey(player1.go_up, "Up") game_on = True while game_on: if player1.ycor() > 300: player1.go_to_start() score_levels.level += 1 score_levels.show_level() for i in cars: i.more_speed()
from turtle import Screen from playerturtle import PlayerTurtle import time from cars import Cars from scoreboard import Scoreboard screen = Screen() screen.title("Cross Road Game") screen.setup(width=600, height=600) screen.bgcolor("white") screen.tracer(0) car = Cars() player = PlayerTurtle() scoreboard = Scoreboard() screen.listen() screen.onkey(player.go_up, "Up") game_is_on = True while game_is_on: time.sleep(0.1) screen.update() # move cars car.create_cars() car.move()
from cars import Cars #figured it out in the coolcars statement - need to fix the other ones though cool_car = Cars('Red', '2') lame_car = Cars('Brown', '4') great_car = Cars('Green', '4') great_car.honk() print('Information for cool car: color: {}, number of doors: {}\nInformation for lame car: color: {}, number of doors: {}\nInformation for great car: color: {}, number of doors: {}'.format(cool_car.color, cool_car.doorsnum, lame_car.color, lame_car.doorsnum, great_car.color, great_car.doorsnum))
"""Piedmont, California""" G = ox.load_graphml('piedmont.graphml') G = ox.project_graph(G) fig, ax = ox.plot_graph(G, node_size=0, edge_linewidth=0.5) ax.set_title('Piedmont, California') # grab the dimensions of the figure axis = ax.axis() # initialize the car and light state objects # cars = Cars(sim.init_culdesac_start_location(N, axis), axis) # cars = Cars(sim.init_random_node_start_location(N, axis), axis) # lights = TrafficLights(sim.init_traffic_lights(axis, prescale=40), axis) cars = Cars(cl.init_custom_agent(n=1, fig_axis=axis), axis=axis) lights = TrafficLights(cl.init_custom_lights(fig_axis=axis, prescale=None), axis) # initialize the Animator animator = Animator(fig=fig, ax=ax, cars_object=cars, lights_object=lights, num=(1, 10)) init = animator.reset animate = animator.animate # for creating HTML frame-movies # ani = animation.FuncAnimation(fig, animate, init_func=init, frames=1200, interval=30, blit=True) # ani.save('traffic.html', fps=300, extra_args=['-vcodec', 'libx264']) # for creating mp4 movies ani = animation.FuncAnimation(fig, animate, init_func=init, frames=20000) mywriter = animation.FFMpegWriter(fps=300)
class Env: def __init__(self, n, graph, agent, dt, animate=False): """ initializes an environment for a car in the system :param n: int: number of cars to simulate :param graph: OGraph object from :param agent: int: the ID of the car (agent) :param animate: bool: if the environment is to be animated while learning """ self.N = n self.num = None self.graph = graph self.fig, self.ax = self.graph.fig, self.graph.ax self.agent = agent self.dt = dt self.animate = animate self.animator = None self.axis = self.ax.axis() self.route_times = [] self.car_init_method = sim.init_random_node_start_location # self.car_init_method = sim.init_culdesac_start_location self.light_init_method = sim.init_traffic_lights # self.car_init_method = convergent_learner.init_custom_agent # self.light_init_method = convergent_learner.init_custom_lights self.cars_object = Cars(self.car_init_method(self.N, self.graph), self.graph) self.lights_object = TrafficLights( self.light_init_method(self.graph, prescale=40), self.graph) self.high = 10 self.low = 2 self.shortest_route_thresh = 5 def reset(self, num): """ resets the environment :param num: tuple: int, int :return state: int """ # initialize cars every reset init_cars = self.car_init_method(self.N, self.axis) self.cars_object = Cars(init_state=init_cars, graph=self.graph) stateview = self.refresh_stateview() state = stateview.determine_state()[0] state = state.index(True) if self.animate: # init animator self.num = num self.animator = Animator(fig=self.fig, ax=self.ax, cars_object=self.cars_object, lights_object=self.lights_object, num=self.num) return state def refresh_stateview(self): """ this function prepares a fresh depiction of what state the car is in :return stateview: object """ stateview = nav.StateView(graph=self.graph, car_index=self.agent, cars=self.cars_object.state, lights=self.lights_object.state) return stateview def initialize_custom_reset(self, alternate_route): """ resets the environment with a custom route for the agent :param alternate_route: list: list of alternate route nodes for car agent :return state: list: initial state of agent """ # initialize the car and light state objects init_car_state = self.car_init_method(self.N, self.axis, car_id=self.agent, alternate_route=alternate_route) self.cars_object = Cars(init_state=init_car_state, axis=self.axis) if self.animate: # init animator self.animator = Animator(fig=self.fig, ax=self.ax, cars_object=self.cars_object, lights_object=self.lights_object, num=self.num) stateview = self.refresh_stateview() state = stateview.determine_state()[0] state = state.index(True) return state def step(self, action, num): """ This function runs a full simulation of a car from origin to destination (if action, then use the alternate route) :param action: int: 0 or 1 :param num: tuple: the simulation number out of the total number of simulations :return new_state, reward, done, _: list: the end of the return is free to contain debugging info """ debug_report = [] if self.animate: self.animator.reset(self.num) stateview = self.refresh_stateview() state, new_route, new_xpath, new_ypath = stateview.determine_state() if action: new_state = self.initialize_custom_reset( alternate_route=(new_route, new_xpath, new_ypath)) else: new_state = state.index(True) arrived = False i = 0 while not arrived: arrived = self.simulation_step(i) i += 1 route_time = self.cars_object.state.loc[self.agent]['route-time'] self.route_times.append(route_time) # TODO: need new way of identifying shortest route time. if len(self.route_times) < self.shortest_route_thresh: shortest_route_found_reward = 0 done = False elif np.isclose(0, self.route_times[-1] - np.min(self.route_times), atol=5 * self.dt).all(): """ If the route time achieved after the simulation is within 5 x dt second of the minimum time achieved. Define this environment condition as having found the shortest route (locally). """ shortest_route_found_reward = self.high done = True else: shortest_route_found_reward = 0 done = False if num[0] < 1: reward = 0 else: time_delta = self.route_times[num[0] - 1] - self.route_times[ num[0]] + shortest_route_found_reward if time_delta > 0: reward = time_delta else: reward = 0 return new_state, reward, done, debug_report def simulation_step(self, i): """ make one step in the simulation :param i: simulation step :return arrived: bool """ frontview = nav.FrontView(self.cars_object.state.loc[self.agent], self.graph) end_of_route = frontview.end_of_route() if not end_of_route: if self.animate: self.animator.animate(i) else: self.lights_object.update(self.dt) self.cars_object.update(self.dt, self.lights_object.state) arrived = False else: arrived = True return arrived
from turtle import Screen, Turtle from player import Player from cars import Cars from score import Score import time #Setting up the screen sc = Screen() sc.tracer(0) sc.setup(width=600, height=600) difficulty_increase = 1.25 #Instantiating the Cars, Player, and Scoreboard player = Player() score = Score() cars = Cars() sc.listen() sc.onkey(player.move_up, 'Up') playing = True while playing: time.sleep(0.1) sc.update() cars.move_cars() #Checking for collision for car in cars.cars: if player.distance(car) < 20: playing = False score.goto(0, 0) score.write("YOU LOSE",
screen.tracer(0) screen.setup(width=600, height=600) scoreboard = Scoreboard() player = Player() car_list = [] loop_count = 0 screen.listen() screen.onkey(player.move, "Up") is_game_on = True while is_game_on: loop_count += 1 if loop_count % 6 == 0: new_car = Cars() new_car.increase_speed(scoreboard.score) car_list.append(new_car) for car in car_list: car.move() if car.distance(player) < 25: scoreboard.game_over() is_game_on = False time.sleep(0.1) screen.update() if player.level_up(): new_car = [] screen.update()