def required_model(v: int, n: int, delta: float, train_file: str, test_file: str): """ Run the required model with the given parameters :param v: Vocabulary choice :param n: ngram choice :param delta: Smoothing choice :param train_file: Path to training data :param test_file: Path to testing data :return: void """ validate_params(v, n, delta, train_file, test_file) vocab_size = get_vocab_size(v) ngrams = process_train_data(v, n, delta, vocab_size, train_file) test_data = pd.read_csv(test_file, delimiter='\t', names=[DF_COLUMN_ID, DF_COLUMN_NAME, DF_COLUMN_LANG, DF_COLUMN_TWEET]) transform_to_vocab(test_data, v) print("Running model against provided testing data.") results = get_test_results(test_data, ngrams, vocab_size, n) generate_trace_file(v, n, delta, results) print("Final results generated") print(results) print("Evaluating classifier with parameters: [vocabulary = {}, ngram size = {}, delta = {}]".format(v, n, delta)) evaluate_results(results, v, n, delta) return results
def lidstone(v: int, n: int, gamma: float, train_file: str, test_file: str): """ Provides Lidstone-smoothed scores. In addition to initialization arguments from BaseNgramModel also requires a number by which to increase the counts, gamma. :param v: Vocabulary choice :param n: ngram choice :param gamma: Smoothing choice :param train_file: Path to training data :param test_file: Path to testing data :return: """ validate_params(v, n, gamma, train_file, test_file) # Process train data train_data = pd.read_csv(train_file, delimiter='\t', names=[DF_COLUMN_ID, DF_COLUMN_NAME, DF_COLUMN_LANG, DF_COLUMN_TWEET]) train_data.drop(labels=[DF_COLUMN_ID, DF_COLUMN_NAME], inplace=True, axis=1) transform_to_vocab(train_data, v) train_data[DF_COLUMN_TWEET] = train_data[DF_COLUMN_TWEET].map(lambda tweet: tokenize(tweet, LIDSTONE_TOKENIZE_SPAN)) # Train model models_by_lang = {} for language, tweets in train_data.groupby(DF_COLUMN_LANG)[DF_COLUMN_TWEET]: tweet_list = tweets.tolist() train_ngrams, padded_vocab = padded_everygram_pipeline(n, tweet_list) model = Lidstone(gamma=gamma, order=n) model.fit(train_ngrams, padded_vocab) models_by_lang[language] = model # Process test data test_data = pd.read_csv(test_file, delimiter='\t', names=[DF_COLUMN_ID, DF_COLUMN_NAME, DF_COLUMN_LANG, DF_COLUMN_TWEET]) transform_to_vocab(test_data, v) test_data[DF_COLUMN_TWEET] = test_data[DF_COLUMN_TWEET].map(lambda tweet: tokenize(tweet=tweet, span=n, extended_func=True)) test_data[DF_COLUMN_TWEET] = test_data[DF_COLUMN_TWEET].map( lambda tweet_ngrams: [[modify_padding(ngram_char) for ngram_char in list(ngram)] for ngram in tweet_ngrams]) # Calculate scores test_data[DF_COLUMN_TWEET] = test_data[DF_COLUMN_TWEET].map(lambda tweet_ngrams: argmax(models_by_lang, tweet_ngrams)) score_lang_df = pd.DataFrame(test_data[DF_COLUMN_TWEET].tolist(), columns=[DF_COLUMN_SCORE, DF_COLUMN_GUESS]) # Finalize results results = prepare_result_df(test_data, score_lang_df) results = finalize_result_df(results) # Evaluation stats print("Evaluating Lidstone with parameters: [vocabulary = {}, ngram size = {}, delta = {}]".format(v, n, gamma)) format_results(results)
def export(): """ Return a csv file with database entries. Combine query params in primary key pairs. Make db query. Return a populated csv file adn delete it afterwards """ user_ids_param = request.args.get('user_ids', '') attrs_param = request.args.get('attributes', '') if not validate_params(user_ids_param, attrs_param): raise InvalidUsage('Get params are wrong', status_code=400) pk_pairs = extract_user_id_attr_pairs(user_ids_param, attrs_param) entries = get_entries_from_database(pk_pairs) csv_filename, csv_file_location = create_entries_csv(entries) response = send_from_directory(csv_file_location, csv_filename) response.headers['Content-Type'] = 'text/csv' response.headers['Content-Disposition'] = \ "attachment; filename=%s" % csv_filename @after_this_request def remove_file(response): """ Delete the created file after the request is finish. """ csv_path = os.path.join(csv_file_location, csv_filename) os.remove(csv_path) return response return response, 200
def get(self): if not validate_params(self, ('data','user')): return # Check we have the specified user in the database (res, err_dict) = yield tornado.gen.Task(self.db.users.find_one, {'name': self.get_argument('user')}) error = err_dict.get('error', None) if error: self.set_status(500) self.write("Internal Server Error. %s" % error) self.finish() return user = res[0] if len(res) > 0 else None if not user: self.set_status(403) self.write("Forbidden.") self.finish() return # TWISTED WITHIN TORNADO def translate_callback(result): self.write(str(result)) self.finish() def translate_errback(result): self.set_status(500) self.write(str(result)) self.finish() # Connect to translator service using twisted :-) from txtranslator import TranslatorClient client = TranslatorClient(SVC_HOST, SVC_PORT) d = client.translate2(str(self.get_argument('data'))) d.addCallback(translate_callback) d.addErrback(translate_errback)
def get(self): if not validate_params(self, ('data', 'user')): return def translation_callback(translator, output_str): if translator.error: self.set_status(500) self.write("Internal Server Error. %s" % translator.error) else: self.write(output_str) self.finish() def getuser_callback(user, error): if error: self.set_status(500) self.write("Internal Server Error. %s" % error) self.finish() elif not user: self.set_status(403) self.write("Forbidden.") self.finish() else: # Connect to translator service from tortranslator import TorTranslator trans = TorTranslator('localhost', 8010) import functools trans.translate(self.get_argument('data'), functools.partial(translation_callback, trans)) self.db.users.find_one({'name': self.get_argument('user')}, callback=getuser_callback)
def get(self): if not validate_params(self, ('user', 'data')): return # Check we have the specified user in the database (res, err_dict) = yield tornado.gen.Task(self.db.users.find_one, {'name': self.get_argument('user')}) error = err_dict.get('error', None) if error: self.set_status(500) self.write("Internal Server Error. %s" % error) self.finish() return user = res[0] if len(res) > 0 else None if not user: self.set_status(403) self.write("Forbidden.") self.finish() return # Connect to translator service from tortranslator import TorTranslator translator = TorTranslator('localhost', 8010) output_str = yield tornado.gen.Task(translator.translate, self.get_argument('data')) if translator.error: self.set_status(500) self.write("Internal Server Error. %s" % translator.error) self.finish() else: self.write(output_str) self.finish()
def get(self): if not validate_params(self, ('data','user')): return def translation_callback(translator, output_str): if translator.error: self.set_status(500) self.write("Internal Server Error. %s" % translator.error) else: self.write(output_str) self.finish() def getuser_callback(user, error): if error: self.set_status(500) self.write("Internal Server Error. %s" % error) self.finish() elif not user: self.set_status(403) self.write("Forbidden.") self.finish() else: # Connect to translator service from tortranslator import TorTranslator trans = TorTranslator('localhost', 8010) import functools trans.translate(self.get_argument('data'), functools.partial(translation_callback, trans)) self.db.users.find_one({'name': self.get_argument('user')}, callback=getuser_callback)
def render_GET(self, request): error_str = validate_params(request, ('user', 'data')) if error_str: return error_str # self.handle_request(request) return NOT_DONE_YET
def render_GET(self, request): error_str = validate_params(request, ('user', 'data')) if error_str: return error_str mongo_defer = txmongo.MongoConnection() mongo_defer.addCallback(mongocxn_callback, request) mongo_defer.addErrback(mongocxn_errback, request) return NOT_DONE_YET
def render_GET(self, request): # Ensure URL params were passed error_str = validate_params(request, ('data',)) if error_str: return error_str from txtranslator import TranslatorClient client = TranslatorClient(SVC_HOST, SVC_PORT) # Pass callback function only (Not doing any error handling) client.translate1(request.args['data'][0], translator_callback, request) return NOT_DONE_YET
def get(self): if not validate_params(self, ('name', )): return def render_async(): self.write("Hello world, %s (delayed)" % self.get_argument('name')) self.finish() delay = datetime.timedelta(days=0, seconds=2) # Can't pass args to callback for some reason. # Could have used a instance vars & method, but # using closure instead. tornado.ioloop.IOLoop.instance().add_timeout(delay, render_async)
def render_GET(self, request): error_str = validate_params(request, ('data',)) if error_str: return error_str from txtranslator import TranslatorClient client = TranslatorClient(SVC_HOST, SVC_PORT) # Use a deferred to handle callback & errback d = client.translate2(request.args['data'][0]) d.addCallback(translator_callback, request) d.addErrback(translator_errback, request) return NOT_DONE_YET
def get(self): if not validate_params(self, ('name',)): return def render_async(): self.write("Hello world, %s (delayed)" % self.get_argument('name')) self.finish() delay = datetime.timedelta(days=0, seconds=2) # Can't pass args to callback for some reason. # Could have used a instance vars & method, but # using closure instead. tornado.ioloop.IOLoop.instance().add_timeout(delay, render_async)
def render_GET(self, request): # Ensure URL params were passed error_str = validate_params(request, ('data', )) if error_str: return error_str from txtranslator import TranslatorClient client = TranslatorClient(SVC_HOST, SVC_PORT) # Pass callback function only (Not doing any error handling) client.translate1(request.args['data'][0], translator_callback, request) return NOT_DONE_YET
def render_GET(self, request): error_str = validate_params(request, ('data', )) if error_str: return error_str from txtranslator import TranslatorClient client = TranslatorClient(SVC_HOST, SVC_PORT) # Use a deferred to handle callback & errback d = client.translate2(request.args['data'][0]) d.addCallback(translator_callback, request) d.addErrback(translator_errback, request) return NOT_DONE_YET
def render_GET(self, request): error_str = validate_params(request, ('data',)) if error_str: return error_str from txtranslator import TranslatorClientFactory factory = TranslatorClientFactory() factory.input_str = request.args['data'][0] # Factory needs a callback (same as reactor.callLater in prev example) factory.set_callback(translator_callback, request) from twisted.internet import reactor reactor.connectTCP(SVC_HOST, SVC_PORT, factory) return NOT_DONE_YET
def get(self): if not validate_params(self, ('data', )): return def translation_callback(response): if t.error: self.set_status(500) self.write("Internal Server Error. %s" % t.error) else: self.write(response) self.finish() # Connect to translator service from tortranslator import TorTranslator t = TorTranslator('localhost', 8010) t.translate(self.get_argument('data'), translation_callback)
def get(self): if not validate_params(self, ('name',)): return def send_translation_data(): stream.write(str(self.get_argument('name'))) stream.read_until('.', translation_complete) def translation_complete(translated_data): self.write(translated_data) self.finish() # Connect to translator service # Just like in Twisted I have to implement TranslationProtocol manually. # Tornado gives me an async iostream class, but nothing more. s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) stream = tornado.iostream.IOStream(s) stream.connect(("localhost", 8010), send_translation_data)
def intraday(event, context): logger.info('event : {event}'.format(event=event)) path, query = validate_params(path=event.get('pathParameters'), query=event.get('queryStringParameters')) symbol = path.get('symbol') if not symbol: return failure(code=400, body='You should provide a symbol to your path parameters') logger.info('Getting stock intraday {symbol}'.format(symbol=symbol)) try: result = Alphavantage().get_intraday(**path, **query) print(result) # if not result.get('Item'): # return success(status_code=204, body=json.dumps(result.get('Item'), cls=DecimalEncoder)) except Exception as e: return failure(body=e)
def get(self): if not validate_params(self, ('name', )): return def send_translation_data(): stream.write(str(self.get_argument('name'))) stream.read_until('.', translation_complete) def translation_complete(translated_data): self.write(translated_data) self.finish() # Connect to translator service # Just like in Twisted I have to implement TranslationProtocol manually. # Tornado gives me an async iostream class, but nothing more. s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) stream = tornado.iostream.IOStream(s) stream.connect(("localhost", 8010), send_translation_data)
def get(self): if not validate_params(self, ('data', )): return # Check we have the specified user in the database self.db.users.find_one({'name': self.get_argument('user')}, callback=(yield tornado.gen.Callback("db_key"))) # Connect to translator service from tortranslator import TorTranslator translator = TorTranslator('localhost', 8010) translator.translate(self.get_argument('data'), callback=(yield tornado.gen.Callback('translator_key'))) # NOTE: BOTH THE DB QUERY AND THE TRANSLATION HAVE ALREADY STARTED (db_res, db_err_dict) = yield tornado.gen.Wait("db_key") output_str = yield tornado.gen.Wait("translator_key") # Deal with db results db_error = db_err_dict.get('error', None) if db_error: self.set_status(500) self.write("Internal Server Error. %s" % db_error) self.finish() return user = db_res[0] if len(db_res) > 0 else None if not user: self.set_status(403) self.write("Forbidden.") self.finish() return # Deal with translator results if translator.error: self.set_status(500) self.write("Internal Server Error. %s" % translator.error) self.finish() else: self.write(output_str) self.finish()
def get(self): if not validate_params(self, ('data',)): return # Check we have the specified user in the database self.db.users.find_one({'name': self.get_argument('user')}, callback=(yield tornado.gen.Callback("db_key"))) # Connect to translator service from tortranslator import TorTranslator translator = TorTranslator('localhost', 8010) translator.translate(self.get_argument('data'), callback=(yield tornado.gen.Callback('translator_key'))) # NOTE: BOTH THE DB QUERY AND THE TRANSLATION HAVE ALREADY STARTED (db_res, db_err_dict) = yield tornado.gen.Wait("db_key") output_str = yield tornado.gen.Wait("translator_key") # Deal with db results db_error = db_err_dict.get('error', None) if db_error: self.set_status(500) self.write("Internal Server Error. %s" % db_error) self.finish() return user = db_res[0] if len(db_res) > 0 else None if not user: self.set_status(403) self.write("Forbidden.") self.finish() return # Deal with translator results if translator.error: self.set_status(500) self.write("Internal Server Error. %s" % translator.error) self.finish() else: self.write(output_str) self.finish()
def get(self): if not validate_params(self, ('data', 'user')): return # Check we have the specified user in the database (res, err_dict) = yield tornado.gen.Task( self.db.users.find_one, {'name': self.get_argument('user')}) error = err_dict.get('error', None) if error: self.set_status(500) self.write("Internal Server Error. %s" % error) self.finish() return user = res[0] if len(res) > 0 else None if not user: self.set_status(403) self.write("Forbidden.") self.finish() return # TWISTED WITHIN TORNADO def translate_callback(result): self.write(str(result)) self.finish() def translate_errback(result): self.set_status(500) self.write(str(result)) self.finish() # Connect to translator service using twisted :-) from txtranslator import TranslatorClient client = TranslatorClient(SVC_HOST, SVC_PORT) d = client.translate2(str(self.get_argument('data'))) d.addCallback(translate_callback) d.addErrback(translate_errback)
def main(real_engine, setter=None, demo_mode= False): start_sumo("cfg/freeway.sumo.cfg", False) step = 0 batch_num = 0 veh_of_interest = "v.40" source_edges = ['source0', 'source1', 'source2', 'source3'] edge_filter, vtype_filter = validate_params( edge_filter=PLAT_EDGES, vtype_filter=["vtypeauto"]) pstate = NON while running(demo_mode, step, 4132): if demo_mode and step == 4132: start_sumo("cfg/freeway.sumo.cfg", False) step = 0 flags_at_pois = {} if pstate == NON: lanes = lane_gen() add_vehicles(N_VEHICLES_GEN, batch_num, fromEdge = source_edges[0], platoon_len = 24, real_engine = False) batch_num = batch_num + 3 ### Start from here add_vehicles(N_VEHICLES_GEN, batch_num, fromEdge = source_edges[1], platoon_len = 24, real_engine = False) batch_num = batch_num + 3 add_vehicles(N_VEHICLES_GEN, batch_num, fromEdge = source_edges[2], platoon_len = 24, real_engine = False) batch_num = batch_num + 3 add_vehicles(N_VEHICLES_GEN, batch_num, fromEdge = source_edges[3], platoon_len = 24, real_engine = False) batch_num = batch_num + 3 traci.gui.setZoom("View #0", 4500) # f = open("/Users/mac/src/Simulations/mixedtraffic/gen_times","w") # f.write("simulation step is {}".format(traci.simulation.getCurrentTime())) # f.close() #traci.gui.setZoom("View #1", 4500) topology = {} flags_at_pois = {} teleported_vehicles =[] vstate = IDLE pstate = PLATOONING genStep = step print("Gen Step is : {}".format(genStep)) print("pstate at gen is : {}".format(pstate)) if pstate == PLATOONING and step == genStep + 1: veh_of_interest = traci.lane.getLastStepVehicleIDs('source0_3')[::-1][0] print("veh of interest is: {}".format(veh_of_interest)) if pstate == PLATOONING: traci.simulationStep() if step > genStep + 10 and pstate == PLATOONING: print("veh of int is:{}".format(veh_of_interest)) if veh_of_interest in traci.edge.getLastStepVehicleIDs("p12"): print("veh of interest at set location {}!!!!!!!!!!!".format(traci.vehicle.getLaneID(veh_of_interest))) pstate = NON if step <= genStep + 14: set_lc_mode() print("LC Mode Set to FIX_LC") list_of_leaders =[] for lane in lanes: if traci.lane.getLastStepVehicleIDs(lane)==[]: continue lane_vehicles = traci.lane.getLastStepVehicleIDs(lane)[::-1] teleported_vehicles = traci.simulation.getEndingTeleportIDList() print("end teleported {}".format(teleported_vehicles)) teleported_vehicles = [vehicle for vehicle in teleported_vehicles if vehicle not in removed_vehicles] print("Teleported {}".format(teleported_vehicles)) removed_vehicles = [] for vehicle in teleported_vehicles: try: traci.vehicle.remove(vehicle, REMOVE_PARKING) except: print("vehicle already removed") else: removed_vehicles.append(vehicle) print("vehicle {} has been removed".format(vehicle)) #teleported_vehicles = [vehicle for vehicle in end_teleport_vehicles if vehicle in teleported_vehicles and vehicle not in removed_vehicles] if lane_vehicles != []: planes = sorted_planes(lane_vehicles, lane, removed_vehicles) if planes != []: for plane in planes: topology = plane.topo_contsructor() topology = plane.pla_speed_spacing(topology, states) #print("Topology at platSpeedSpacing is : {}".format(topology)) communicate(topology) set_arrived_free(ARR_EDGES) for plane in planes: #print("states are {}".format(states)) if traci.vehicle.getRouteID(plane.plane_leader()).split("_") == '0': continue if plane.near_flag(): leader = plane.plane_leader() print("Veh {} looking for flag".format(leader)) flag_data = look_for_flags(leader,pois,flags_at_pois, step) if flag_data != None: if flag_data[0] == True and plane.safe_to_cl(): print("Veh {} has found a flag, changing lanes now".format(leader)) plane.move_to_next_best_lane(step) flags_at_pois[leader]['state'].append('completed') flags_at_pois[leader]['poitype'].append(flag_data[1]) elif flag_data[0] == True and plane.safe_to_cl() == False: print(plane.safe_to_cl()) #pdb.set_trace() obstruction = plane.plane_obstructed(states, obstructors) if obstruction == True: plane.obst_overtaken(obstructors, states) planers.remove_obstructors() #pdb.set_trace() traci.simulationStep() print("step is : {}".format(step)) print("Current time is :{}".format(traci.simulation.getCurrentTime())) print("pstate is : {}".format(pstate)) step += 1 traci.close()
def main(): sumo_binary = "sumo-gui" if args.gui else "sumo" utils.start_sumo(sumo_binary, args.configuration_file, False) edge_filter, vtype_filter = utils.validate_params(args.edge_filter, args.vtype_filter) step = 0 platoons = [] while utils.running(args.demo, step, args.max_step): traci.simulationStep() vehicles = utils.retrieve_vehicles(edge_filter) cacc_vehicles = utils.filter_cacc_vehicles(vehicles, vtype_filter) simulation_vehicles = traci.vehicle.getIDList() for vehicle in cacc_vehicles: if not platooning.in_platoon(platoons, vehicle): platoons.append( platooning.Platoon([vehicle], desired_gap=args.desired_gap, safe_gap=args.safe_gap)) teleported_vehicles = traci.simulation.getEndingTeleportIDList() for vehicle in teleported_vehicles: for platoon in platoons: if vehicle in platoon: platoon.remove_vehicle(platoon.index_of(vehicle)) break for platoon in platoons: # Remove platoons with vehicles that have left the simulation if not platoon.all_members_are_in(simulation_vehicles): platoons.pop(platoons.index(platoon)) continue platoon.look_for_splits() if platoon.leader_wants_to_leave(edge_filter): platoon.leader_leave() merges, lane_changes = platooning.look_for_merges( platoons, max_distance=args.max_distance, max_platoon_length=args.platoon_length, edge_filter=edge_filter, max_relative_speed=args.relative_speed) new_platoons = [] platoons_to_remove = set() for i in range(len(merges)): if merges[i] != -1: new_platoons.append( platooning.merge_platoons(platoons[i], platoons[merges[i]], lane_changes[i])) platoons_to_remove.add(platoons[i]) platoons_to_remove.add(platoons[merges[i]]) platoons.extend(new_platoons) platoons = [ platoon for platoon in platoons if platoon not in platoons_to_remove ] for platoon in platoons: platoon.update_desired_speed_and_lane() platoon.communicate() platoon.maneuver() step += 1 traci.close()
def majority_vote_cl(v: int, n: int, delta: float, train_file: str, test_file: str, min_token_freq: int = 1, max_token_freq: float = 1.0): """ Entry point of program. :param max_token_freq: ignore terms that have a document frequency strictly higher than the given proportion. :param min_token_freq: ignore terms that have a document frequency strictly lower than the given threshold. :param v: Vocabulary choice :param n: ngram choice :param delta: Smoothing choice :param train_file: Path to training data :param test_file: Path to testing data :return: void """ validate_params(v, n, delta, train_file, test_file) # Process data train_data = pd.read_csv( train_file, delimiter='\t', names=[DF_COLUMN_ID, DF_COLUMN_NAME, DF_COLUMN_LANG, DF_COLUMN_TWEET]) test_data = pd.read_csv( test_file, delimiter='\t', names=[DF_COLUMN_ID, DF_COLUMN_NAME, DF_COLUMN_LANG, DF_COLUMN_TWEET]) lang_mapping, inv_lang_mapping = encode_class_labels( train_data[DF_COLUMN_LANG]) train_data[DF_COLUMN_LANG] = train_data[DF_COLUMN_LANG].map(lang_mapping) custom_transform_to_vocab(train_data, v) custom_transform_to_vocab(test_data, v) # Prepare features (Ngrams and their weights) tfidf = TfidfVectorizer(analyzer='char_wb', lowercase=False, ngram_range=(n, n), min_df=min_token_freq, max_df=max_token_freq) features = tfidf.fit_transform(train_data[DF_COLUMN_TWEET]).toarray() labels = train_data[DF_COLUMN_LANG] # Define Estimators svc = LinearSVC() svc_calibrated = CalibratedClassifierCV(svc) lr = LogisticRegression(multi_class='multinomial', max_iter=500) estimators = [('lr', lr), ('svc_calibrated', svc_calibrated)] # Train model voting_classifier = VotingClassifier(estimators=estimators, voting='soft', n_jobs=-1) voting_classifier.fit(features, labels) # Calculate scores features_test = tfidf.transform(test_data[DF_COLUMN_TWEET]) guess = voting_classifier.predict(features_test) scores = voting_classifier.predict_proba(features_test) # Finalize results results = prepare_result_df(test_data) results[DF_COLUMN_SCORE] = scores results[DF_COLUMN_GUESS] = guess results[DF_COLUMN_GUESS] = results[DF_COLUMN_GUESS].map(inv_lang_mapping) results = finalize_result_df(results) generate_trace_file(v, n, delta, results) # Evaluation stats print( "\nEvaluating Majority Vote classifier with parameters: [vocabulary = {}, ngram size = {}, delta = {}]" .format(v, n, delta)) evaluate_results(results, v, n, delta) return results
def main(real_engine, setter=None, demo_mode=False): global genStep # start_sumo("cfg/freeway.sumo.cfg", False) step = 0 batch_num = 0 source_edges = ['source0', 'source1', 'source2', 'source3'] removed_vehs = [] all_arrives = [] all_arrived = [] edge_filter, vtype_filter = validate_params(edge_filter=PLAT_EDGES, vtype_filter=["vtypeauto"]) pstate = INSERT while running(demo_mode, step, 2200): if demo_mode and step == 2200: start_sumo("cfg/freeway.sumo.cfg", False) step = 0 print("step is : {}".format(step)) print("Current time is :{}".format(traci.simulation.getCurrentTime())) print("pstate is : {}".format(pstate)) if pstate == INSERT: add_vehicles(N_VEHICLES_GEN, batch_num, list_of_leaders, fromEdge=source_edges[0], platoon_len=24, real_engine=False) batch_num = batch_num + 3 add_vehicles(N_VEHICLES_GEN, batch_num, list_of_leaders, fromEdge=source_edges[1], platoon_len=24, real_engine=False) batch_num = batch_num + 3 add_vehicles(N_VEHICLES_GEN, batch_num, list_of_leaders, fromEdge=source_edges[2], platoon_len=24, real_engine=False) batch_num = batch_num + 3 add_vehicles(N_VEHICLES_GEN, batch_num, list_of_leaders, fromEdge=source_edges[3], platoon_len=24, real_engine=False) batch_num = batch_num + 3 traci.gui.setZoom("View #0", 4500) all_arrived = arrived_vehz(all_arrives) all_arrives = all_arrived traci.simulationStep() pstate = PLATOONING genStep = step if pstate == INSERT2: inserted = add_vehiclez(N_VEHICLES_GEN, batch_num, list_of_leaders, priorityswitches, fromEdge=source_edges[0], platoon_len=24, real_engine=False) batch_num = batch_num + inserted inserted = add_vehiclez(N_VEHICLES_GEN, batch_num, list_of_leaders, priorityswitches, fromEdge=source_edges[1], platoon_len=24, real_engine=False) batch_num = batch_num + inserted inserted = add_vehiclez(N_VEHICLES_GEN, batch_num, list_of_leaders, priorityswitches, fromEdge=source_edges[2], platoon_len=24, real_engine=False) batch_num = batch_num + inserted inserted = add_vehiclez(N_VEHICLES_GEN, batch_num, list_of_leaders, priorityswitches, fromEdge=source_edges[3], platoon_len=24, real_engine=False) batch_num = batch_num + inserted traci.gui.setZoom("View #0", 4500) all_arrived = arrived_vehz(all_arrives) all_arrives = all_arrived traci.simulationStep() topology = {} pstate = PLATOONING genStep = step if pstate == PLATOONING and step >= genStep + 110: switches = [] for lane in range(1, 4): upstreamcheck = upstream_check(batch_num, lane, all_arrived) downstreamcheck = downstream_check(batch_num, lane, all_arrived) switches.append(upstreamcheck) switches.append(downstreamcheck) priorityswitches = switching_logic(switches) if 'GREEN' in priorityswitches: pstate = INSERT2 if step > genStep + 1 and pstate == PLATOONING: flag_planes = [] lanes = lane_gen() for lane in lanes: if not traci.lane.getLastStepVehicleIDs(lane): continue lane_vehicles = [ veh for veh in traci.lane.getLastStepVehicleIDs(lane)[::-1] if veh not in removed_vehs ] planes = sorted_planes(lane_vehicles, lane) for plane in planes: if plane.near_flag(): flag_planes.append(plane) teleported_vehicles = traci.simulation.getEndingTeleportIDList( ) for vehicle in teleported_vehicles: try: traci.vehicle.remove(vehicle, REMOVE_PARKING) except: print(f"Veh {vehicle} has been removed already") else: print(f"Veh {vehicle} has been removed") removed_vehs.append(vehicle) topology = plane.topo_contsructor(removed_vehs) topology = plane.pla_speed_spacing(topology) communicate(topology) all_arrived = arrived_vehz(all_arrives) all_arrives = all_arrived for plane in flag_planes: flag_n_poi_index = plane.look_for_flags(pois, step) if flag_n_poi_index[0] == True: plane.move_to_next_best_lane( step, flag_n_poi_index) plane.set_arrived_free() traci.simulationStep() remove_parked(removed_vehs) teleported_vehicles = traci.simulation.getEndingTeleportIDList() for vehicle in teleported_vehicles: try: traci.vehicle.remove(vehicle, REMOVE_PARKING) except: print(f"Veh {vehicle} has been removed already") else: print(f"Veh {vehicle} has been removed") removed_vehs.append(vehicle) step += 1 traci.close()