def doItAll(region): if Regions.isRegion(region): cmd = shlex.split( command %( Env.mtcwDir, "BatchRegionTiler.py", region) ) subprocess.Popen(cmd).wait() cmd = shlex.split( command %( Env.mtcwDir, "BatchRegionMerger.py", region) ) subprocess.Popen(cmd).wait() if len(Regions.getRegionFilterList(region)) == len(open(Env.mergedTileDir+region+"/mergeorder.txt").readlines()): cmd = shlex.split( command %( Env.mtcwDir, "BatchRegionOptimizer.py", region) ) subprocess.Popen(cmd).wait() cmd = shlex.split( command %( Env.mtcwDir, "AutoGemf.py", region) ) subprocess.Popen(cmd).wait() if os.path.isdir(Env.mergedTileDir+region): shutil.rmtree(Env.mergedTileDir+region) if os.path.isdir(Env.mergedTileDir+region+".opt"): shutil.rmtree(Env.mergedTileDir+region+".opt") cmd = shlex.split( command %( Env.mtcwDir, "GenerateMxRegionData.py", region) ) subprocess.Popen(cmd).wait() else: print len(Regions.getRegionFilterList(region)) print len(open(Env.mergedTileDir+region+"/mergeorder.txt").readlines()) print "an error occurred"
def generateRegion(region): #fisrt lets see if the gemf is there print "generating data for " + region filter = Regions.getRegionFilterList(region) bo = BsbOutlines.BsbOutlines(Env.bsbDir, filter) xmlFname = region+".xml" xmlPath = Env.gemfDir+"/"+xmlFname zxmlPath = Env.gemfDir+"/"+region+".zxml" xml = XMLWriter(codecs.open(xmlPath, "w", "utf-8")) zxml = zipfile.ZipFile(zxmlPath, "w", zipfile.ZIP_DEFLATED) gemfFile = Env.gemfDir+region+".gemf" xml.start("rml", version='1.0') xml.start("region", description=Regions.getRegionDescription(region), bytes = str(os.path.getsize(gemfFile))) for kapfile in bo.getkeys(): xml.start("chart", file=kapfile, name=bo.getname(kapfile), scale=str(bo.getscale(kapfile)), depths=bo.getdepthunits(kapfile), zoom=str(bo.getzoom(kapfile))); xml.data(bo.getoutline(kapfile)) xml.end("chart") xml.end("region") xml.end("rml") xml.close(1) cmd = "tidy -xml -imq " + xmlPath os.popen(cmd) zxml.write(xmlPath, xmlFname) os.remove(xmlPath) zxml.close()
def generateRegion(region): #fisrt lets see if the gemf is there print "generating data for " + region filter = Regions.getRegionFilterList(region) bo = BsbOutlines.BsbOutlines(Env.bsbDir, filter) # bsbScales = BsbScales(Env.bsbDir, filter) # sortList = bsbScales.getKapsSortedByScale() # sortList.reverse() # mergeorder = "" # for item in sortList: # mergeorder += item + ":" # mergeorder.rstrip(":") sqlFname = region+".sql" sqlPath = Env.gemfDir+"/"+sqlFname zdatPath = Env.gemfDir+"/"+region+".zdat" sqlf = codecs.open(sqlPath, "w", "utf-8") zdat = zipfile.ZipFile(zdatPath, "w", zipfile.ZIP_DEFLATED) #sqlf = open(Env.gemfDir+"/"+region+".bin", "wb") wrt = u"--MXMARINER-DBVERSION:3\n" #zdat.writestr( sqlFname, wrt) sqlf.write( wrt ) if (custom): gemfFile = Env.gemfDir+region+".gemf" if not os.path.isfile(gemfFile): print "gemf not ready:" + region sys.exit() else: bytes = os.path.getsize(gemfFile) wrt = strcustom0 %(region) sqlf.write( wrt ) #zdat.writestr( sqlFname, wrt) #[name], [description], [image], [size], [installeddate] wrt = strcustom1 %(region, Regions.getRegionDescription(region), region.lower().replace("_", ""), bytes, epoch) sqlf.write( wrt ) #zdat.writestr( sqlFname, wrt) else: #wrt = str0 %(epoch, mergeorder, region) wrt = str0 %(epoch, region) sqlf.write( wrt ) #zdat.writestr( sqlFname, wrt) wrt = str1 %(region) sqlf.write( wrt ) #zdat.writestr( sqlFname, wrt) for kapfile in bo.getkeys(): wrt = str2 %(region, kapfile, bo.getname(kapfile), bo.getupdated(kapfile), bo.getscale(kapfile), \ bo.getoutline(kapfile), bo.getdepthunits(kapfile), bo.getzoom(kapfile)); sqlf.write( wrt ) #zdat.writestr( sqlFname, wrt) sqlf.close() zdat.write(sqlPath, sqlFname) os.remove(sqlPath) zdat.close()
def renderRegion(region): mFilter = Regions.getRegionFilterList(region) #print mFilter tileDir = Regions.getRegionUnMergedTileDir(region) bsbDir = Regions.getRegionBsbDir(region) if mFilter != None: createTiles(bsbDir, tileDir, region, mFilter) else: print region + " does not exist"
def mainmenu(): program_run = True #Display Menu while program_run == True: print("\nOffice Solutions Data Analytics" + "\nPlease make a selection from the menu below:") print("\n\tMain Menu:" + "\n\t1 - Most Profitable Products" + "\n\t2 - Least Profitable Products" + "\n\t3 - Region Insights" + "\n\t4 - Sub-Category Insights" + "\n\t5 - State Insights" + "\n\t6 - Top Customers" + "\n\t7 - Add New Employee" + "\n\t8 - Exit") selected = input("Choose a menu #: ").lower().strip() #Menu Item selected if selected == "1": # Opens Most Profitable PRoducts Profits_Top.top_ten() elif selected == "2": # Opens Second Insight Code Profits_Bottom.bottom_ten() elif selected == "3": # Opens Region Insights Regions.menu() elif selected == "4": # Opens Sub-Category Insights Category.menu() elif selected == "5": # Opens State Insights States.menu() elif selected == "6": # Opens Top Customer Insights Top_Customers.top_customers() elif selected == "7": #Opens Add New Employee Code New_Employee.add_employee() pause() elif selected == "8" or selected == "exit": # Exits Loop (and Program) program_run = False else: print("'" + selected + "' is not a valid menu selection. " + "Please enter a numerical value from 1-8.\n") pause()
def doTile2(self, kapPath, log, regiondir): header_override = Env.mtcwDir+"header_overrides/NOAA/"+os.path.basename(kapPath)[0:-4] if Regions._isNOAARegion(self.region) and os.path.isfile(header_override): override = CreateHeaderOverride.makeHeader(kapPath) command = "python %smap2gdal.py -q --header-file %s %s" %(Env.tilersToolsDir, override, kapPath) else: command = "python %smap2gdal.py -q %s" %(Env.tilersToolsDir, kapPath) print command thisone = subprocess.Popen(shlex.split(command), stdout=log) thisone.wait() #kstz = KapScaleToZoom(kapPath) vrtPath = kapPath[0:-4]+".vrt" #vrtPath = kapPath.rstrip(".KAP")+".vrt" print vrtPath, "\n" if os.path.isfile(vrtPath): command = "python %sgdal_tiler.py --overview-resampling=bilinear --base-resampling=bilinear -t " %(Env.tilersToolsDir) + regiondir + \ " -c " + vrtPath + " -z " + str(getKapZoom(kapPath)) destdir = regiondir + "/" + os.path.basename(kapPath)[0:-4]+".zxy" #print destdir if not os.path.isdir(destdir): print command thisone = subprocess.Popen(shlex.split(command), stdout=log) thisone.wait() else: print "this chart has already been tiled" else: print "Something went wrong creating vrt from: " + kapPath sys.exit()
def FetchByRegionCode(code): """ Fetch all the shop records that belong to the region specified by the region code. This is a common use-case internally within Oxfam. Args: code: string, the region code Returns: A list of dicts containing all the shop records that belong to the specified region (area). """ result = None # While we could just do this in one SQL statement, we're going to use the Regions module so that the code is more robust. # This can be changed if required when we look at optimization. region = Regions.FetchByCode(code) if region is not None: try: Log.info(('SHOPS-Fetch-RegionCode:', 'Trying to grab data using regionCode/Id ')) query = "SELECT * FROM shop WHERE region_id = %s;" db.cursor.execute(query, (region["id"], )) result = db.cursor.fetchall() Log.info(('SHOPS-Fetch-RegionCode:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-RegionCode', e)) Log.info(('SHOPS-Fetch-RegionCode:Querry:', query)) Log.info(('SHOPS-Fetch-RegionCode:', 'Failed to grab data')) return result
def optimizeRegion(region): optDir = Regions.getRegionMergedTileDir(region) if os.path.isdir(optDir): command = "python %stiles_opt.py %s" % (Env.tilersToolsDir, optDir) print command thisone = subprocess.Popen(shlex.split(command)) thisone.wait() else: print "Region tiles don't exist... run BatchRegionMerger.py first"
def one_frame(frame, one_number, count, h): state, really_number, number, cords, zone = ModelDetect.detect_number( frame) found_really_number = False # means that we found number and it's really if state: for c in cords: pts = np.array(c, np.int32) pts = pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts], True, Blue, 2) if really_number: one_number.extend( number) # список номеров для текущей одной машины found_really_number = True cv2.putText(frame, str(number), (20, h - 30), cv2.FONT_HERSHEY_SIMPLEX, 1, Blue, 2) log.debug(" Found REALLY number %s" % str(number)) else: cv2.putText(frame, str(number), (20, h - 30), cv2.FONT_HERSHEY_SIMPLEX, 1, LightSkyBlue, 2) log.debug(" Found NOT REALLY number %s" % str(number)) count = 0 else: count += 1 car_number = 'no' if count < CADRS_TO_FIND_NEW_CAR: log.debug(' ONE_NUMBER is %s' % str(one_number)) if len(one_number) >= MIN_CADRS_TO_DETECT: flag_new_car = 'ENOUGH_FRAMES_FOR_RECOGNITION' car_number = WrongNumbers.choose_number(one_number) reg = Regions.which_regions(car_number) font = ImageFont.truetype(FONT, 32) img_pil = Image.fromarray(frame) draw = ImageDraw.Draw(img_pil) draw.text((20, h - 150), str(reg), font=font, fill=Red) # fill=(0, 0, 255, 0) frame = np.array(img_pil) cv2.putText(frame, str(car_number), (20, h - 80), cv2.FONT_HERSHEY_SIMPLEX, 1, Red, 2) else: flag_new_car = 'NOT_ENOUGH_FRAMES_FOR_RECOGNITION' elif count == CADRS_TO_FIND_NEW_CAR and len( one_number) >= MIN_CADRS_TO_DETECT: car_number = WrongNumbers.choose_number(one_number) flag_new_car = 'ENDING_FIND_THIS_CAR' # and begin find new car one_number.clear() else: one_number.clear() flag_new_car = 'NO_CARS' return found_really_number, frame, car_number, count, one_number, flag_new_car, zone
def createGemf(region): directory = Regions.getRegionMergedTileDir(region) + ".opt" if not os.path.isdir(directory): print "Region not ready... run BatchRegionOptimizer first" sys.exit() if not os.path.isdir(directory + "/gemf"): os.mkdir(directory + "/gemf") if os.path.isdir(directory + "/merge"): shutil.move(directory + "/merge", directory + "/gemf") else: print "you need to merge files first!" sys.exit() logfile = directory + "/gemflog.txt" log = open(logfile, "wb") command = "python %s/generate_efficient_map_file.py %s" % (Env.tilersToolsDir, directory + "/gemf") print "creating gemf..." print command thisone = subprocess.Popen(shlex.split(command), stdout=log) thisone.wait() log.close() os.rename(directory + "/gemf/map_data.gemf", Env.gemfDir + region + ".gemf")
def mergeRegion(region): print "merging: " + region mergeTiles(Regions.getRegionMergedTileDir(region), Regions.getRegionBsbDir(region), \ Regions.getRegionUnMergedTileDir(region), Regions.getRegionFilterList(region))
import collections import logging.config import Regions regions_numbers = Regions.load_regions() all_regions = regions_numbers.keys() logging.config.fileConfig('logging.ini', disable_existing_loggers=False) log = logging.getLogger(__name__) def check(num): # check one number. It works on cars and lories tmp = list(num) if len(tmp) == 8 or len(tmp) == 9: tmp_num = tmp[1:4] tmp_region = tmp[6:] tmp_literal = [tmp[0]] tmp_literal.extend(tmp[4:6]) tmp_num = ''.join(map(str, tmp_num)) # должно быть числом tmp_region = ''.join(map(str, tmp_region)) # должно быть числом tmp_literal = ''.join(map(str, tmp_literal)) # должно быть буквами tmp_lit_truck = tmp[0:2] tmp_num_truck = tmp[2:6] tmp_lit_truck = ''.join(map(str, tmp_lit_truck)) # должно быть числом tmp_num_truck = ''.join(map(str, tmp_num_truck)) # должно быть буквами if (tmp_num.isdigit() and tmp_literal.isalpha() and tmp_region.isdigit()) or ( tmp_lit_truck.isalpha() and tmp_num_truck.isdigit()): if tmp_region in all_regions: return True
bo.getoutline(kapfile), bo.getdepthunits(kapfile), bo.getzoom(kapfile)); sqlf.write( wrt ) #zdat.writestr( sqlFname, wrt) sqlf.close() zdat.write(sqlPath, sqlFname) os.remove(sqlPath) zdat.close() # rootlen = len(target_dir) + 1 # zdat.write(sqlPath, compress_type) # zdat.write(fn, fn[rootlen:]) if __name__== "__main__": if not sys.argv.__len__() == 2: print "You must supply a region:" Regions.printRegionList() #print "Or you can also do: ALL_NGA, ALL_NOAA, or PRINTARRAY" sys.exit() else: arg = sys.argv[1] if arg == "ALL_NOAA": generateNOAA() elif arg == "ALL_NGA": print "not yet implemented" elif Regions.isRegion(arg): generateRegion(arg) elif arg== "UPDATE": generateUpdate(); else: print "invalid argument", arg
subprocess.Popen(cmd).wait() if len(Regions.getRegionFilterList(region)) == len(open(Env.mergedTileDir+region+"/mergeorder.txt").readlines()): cmd = shlex.split( command %( Env.mtcwDir, "BatchRegionOptimizer.py", region) ) subprocess.Popen(cmd).wait() cmd = shlex.split( command %( Env.mtcwDir, "AutoGemf.py", region) ) subprocess.Popen(cmd).wait() if os.path.isdir(Env.mergedTileDir+region): shutil.rmtree(Env.mergedTileDir+region) if os.path.isdir(Env.mergedTileDir+region+".opt"): shutil.rmtree(Env.mergedTileDir+region+".opt") cmd = shlex.split( command %( Env.mtcwDir, "GenerateMxRegionData.py", region) ) subprocess.Popen(cmd).wait() else: print len(Regions.getRegionFilterList(region)) print len(open(Env.mergedTileDir+region+"/mergeorder.txt").readlines()) print "an error occurred" if __name__== "__main__": if not sys.argv.__len__() == 2: print "You must supply a region:" Regions.printRegionList() sys.exit() else: doItAll(sys.argv[1])
import sys # specify the path to nomeroff-net NOMEROFF_NET_DIR = os.path.abspath('../nomeroff-net/') sys.path.append(NOMEROFF_NET_DIR) # add path to search modules from NomeroffNet import RectDetector, TextDetector, OptionsDetector, \ Detector, textPostprocessing import Regions import WrongNumbers logging.config.fileConfig('logging.ini', disable_existing_loggers=False) log = logging.getLogger(__name__) all_regions = Regions.load_regions() rectDetector = RectDetector() optionsDetector = OptionsDetector() optionsDetector.load("latest") # Initialize text detector. textDetector = TextDetector({ "eu_ua_2004_2015": { "for_regions": ["eu_ua_2015", "eu_ua_2004"], "model_path": "latest" }, "eu_ua_1995": { "for_regions": ["eu_ua_1995"], "model_path": "latest"
def main(assign_int, relocat_int, t_max, time_step, opt_method, relocate_method, veh_speed, i_date, taxi=False, xyt_string="NULL", visualize=False, false_forecast_f=None): # Comment FD: # this part assumes 'i_run' is given in iso format, e.g. 2016-04-01 # this is necessary to use the correct forecasts # Response MH: # Works for me if relocate_method != "NULL": (sim_year, sim_month, sim_day) = [int(x) for x in i_date.split("-")] week = [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday" ] date = datetime.datetime(sim_year, sim_month, sim_day) day_number = date.weekday() weekday = week[day_number] ################################################################################################## # Input Information - Customer Demand ################################################################################################## # read in information about all customers if taxi: # file_str = "../Inputs/Taxi_Demand_Day" + str(i_run) + "_Sample.csv" file_str = "../Inputs/NYC_Taxi/Request_Data/" + str( i_date) + "__manhattan_yellow_taxi_requests_10percent.csv" else: file_str = "../Inputs/Artificial/Demand_Requests.csv" demand_file = open(file_str, 'r') demand_reader = csv.reader(demand_file) customers = [] count = 0 for i_row in demand_reader: count += 1 if count > 1: person_id = int(i_row[0]) request_time = int(i_row[1]) pick_x = float(i_row[2]) pick_y = float(i_row[3]) drop_x = float(i_row[4]) drop_y = float(i_row[5]) grp_size = int(i_row[6]) customers.append( Person.make_person(person_id, pick_x, pick_y, request_time, drop_x, drop_y, grp_size)) ################################################################################################## # Input Information - AV Initial Positions ################################################################################################## # read in information about all AVs if taxi: file_str2 = "../Inputs/Artificial/Vehicles_Taxi.csv" else: file_str2 = "../Inputs/Artificial/Vehicles_Taxi.csv" veh_file = open(file_str2, 'r') vehicle_reader = csv.reader(veh_file) av_fleet = [] cnt = 0 for j_row in vehicle_reader: cnt += 1 if cnt > 1: vehicle_id = int(j_row[0]) start_x = float(j_row[1]) start_y = float(j_row[2]) capacity = int(j_row[3]) veh_status = "idle" av_fleet.append( Vehicle.make_vehicle(vehicle_id, start_x, start_y, capacity, veh_status)) ################################################################################################## # Input Information - Regions/subAreas ################################################################################################## # Dandl ############ # create a list with all sub_area objects # and/or create a list with all subArea-time periods # it seems like you might already be reading in the files in Regions, but still need a list of all sub_area objects ############ # Comment FD: the idea is that the main area class does all the work and returns dictionaries for # 1) demand forecast # 2) vehicle availability # with the subarea_key as key of the respective dictionary and the respective quantity as value # the respective destination centers can be called by # area.sub_areas[subarea_key].relocation_destination # # read information of area depending on # a) xyt_string # b) false_forecast_f [optional, if not given, the real forecast value will be read] # format of xyt_string: 2x_8y_5min # format of xy_string: 2x_8y if relocate_method != "NULL": xy_string = "_".join(xyt_string.split("_")[:2]) prediction_csv_file = "../Inputs/NYC_Taxi/Prediction_Data/prediction_areas_{0}.csv".format( xy_string) # region_csv_file = "prediction_areas_{0}.csv".format(xy_string) if false_forecast_f: region_csv_file = false_forecast_f else: region_csv_file = "../Inputs/NYC_Taxi/Prediction_Data/manhattan_trip_patterns_{0}_only_predictions.csv".format( xyt_string) relocation_destination_f = "../Inputs/NYC_Taxi/Prediction_Data/demand_center_points_{0}.csv".format( xy_string) area = Regions.Area(region_csv_file, prediction_csv_file, relocation_destination_f) # Comment MH: # This is gonna work great, I think! Thanks! if visualize: file_string = '../Results_Rev2/visualize.csv' csv_viz = open(file_string, 'w') viz_writer = csv.writer(csv_viz, lineterminator='\n', delimiter=',', quotechar='"', quoting=csv.QUOTE_NONNUMERIC) ################################################################################################## # Simulation ################################################################################################## # Initialize Vectors i_person = 0 # Begin simulation new_t_max = int(1.2 * t_max) for t in range(0, new_t_max, time_step): # For visualization purposes veh_info_list = [] if visualize and t % 15 == 0 and t < 7200: for j_veh in av_fleet: veh_info_list.append([ t, j_veh.status, int(j_veh.position_x), int(j_veh.position_y) ]) viz_writer.writerow(veh_info_list) # display current statuses of AVs and customers # if t % 900 == 0: # len_idle = len(list(j_veh for j_veh in av_fleet if j_veh.status == "idle")) # len_relocate = len(list(j_veh for j_veh in av_fleet if j_veh.status == "relocating")) # len_pick = len(list(j_veh for j_veh in av_fleet if j_veh.status == "enroute_pickup")) # len_drop = len(list(j_veh for j_veh in av_fleet if j_veh.status == "enroute_dropoff")) # print("hour: " + str(t/3600) + " idle:" + str(len_idle) + " relocate:" + str(len_relocate) # + " pick:" + str(len_pick) + " drop:" + str(len_drop)) # len_unassigned = len(list(i for i in customers if i.status == "unassigned")) # len_assigned = len(list(i for i in customers if i.status == "assigned")) # len_inVeh = len(list(i for i in customers if i.status == "inVeh")) # len_served = len(list(i for i in customers if i.status == "served")) # print("unassigned:" + str(len_unassigned) + " assigned:" + str(len_assigned) # + " inVeh:" + str(len_inVeh) + " served:" + str(len_served)) for j_av in av_fleet: ################################################################################################## # decrease curb time, of vehicles that just finished pickup or drop-off if j_av.curb_time_remain > 0: j_av.curb_time_remain -= time_step ################################################################################################## # move relocating AVs elif j_av.status == "relocating": sub_area = j_av.next_sub_area Vehicle.move_vehicle_manhat(t, j_av, Person.Person, sub_area) ################################################################################################## # move en_route drop-off AVs elif j_av.status == "enroute_dropoff": person_drop = j_av.next_drop Vehicle.move_vehicle_manhat(t, j_av, person_drop, Regions.SubArea) # if AV's status changes, then the AV must have dropped off customer, and traveler status needs to change if j_av.status != "enroute_dropoff": Person.update_person(t, person_drop, j_av) ################################################################################################## # move en_route pickup AVs elif j_av.status == "enroute_pickup": person_pick = j_av.next_pickup Vehicle.move_vehicle_manhat(t, j_av, person_pick, Regions.SubArea) # if AV's status changes, then the AV must have picked up customer, and customer status needs to change if j_av.status != "enroute_pickup": Person.update_person(t, person_pick, j_av) ################################################################################################## # check if there are new requests if i_person < len(customers): while customers[i_person].request_time <= t: i_request = customers[i_person] Person.update_person(t, i_request, Vehicle.Vehicle) i_person += 1 if i_person == len(customers): break ################################################################################################### # Assign AVs to customer requests, or subAreas ################################################################################################### # Get the number of idle AVs and unassigned customers count_avail_veh = len( list(j for j in av_fleet if j.status in ["idle", "enroute_dropoff", "relocating"] and j.next_pickup.person_id < 0)) count_unassigned = len( list(i for i in customers if i.status == "unassigned")) # Assign using FCFS methods if "FCFS" in opt_method: if t % assign_int == 0 and count_unassigned > 0 and count_avail_veh > 0: AA.assign_veh_fcfs(av_fleet, customers, opt_method, t) if t % relocat_int == 0 and count_avail_veh > 0 and relocate_method != "NULL": # Dandl # Call relocation/rebalancing algorithm # Comment FD: give reference to area object instead of sub_areas to relocation algorithm # -> this allows use of area.getVehicleAvailabilitiesPerArea() and # area.getDemandPredictionsPerArea() # forecast needs to know which weekday it is # old: veh_subarea_assgn = AA.relocate_veh(av_fleet, area, relocate_method, t, weekday) AA.relocate_veh(av_fleet, area, relocate_method, t, weekday) # Response MH: # It makes sense to input area object instead of sub_area # I changed the code such that the processing/updating of vehicles and travelers (and subareas) # are called within the assignment (and relocation) algorithm - this is why I commented out code below # # Dandl # # Need to process sub_Areas, and vehicles that are now relocating # # Comment FD: veh_subarea_assgn is list of (vehicle_obj, subArea_obj) tuples # for [j_vehicle, l_subarea] in veh_subarea_assgn: # temp_veh_status = "relocating" # Vehicle.update_vehicle(t, Person.Person, j_vehicle, l_subarea, temp_veh_status) ################################################################################################### # Assign using Optimization-based methods else: # Every X seconds assign customers in the waiting queue to an AV if t % assign_int == 0 and count_unassigned > 0 and count_avail_veh > 0: AA.assign_veh_opt(av_fleet, customers, opt_method, t) if t % relocat_int == 0 and count_avail_veh > 0 and relocate_method != "NULL": AA.relocate_veh(av_fleet, area, relocate_method, t, weekday) # see comments in 'Assign using FCFS' ################################################################################################## # Simulation Over ################################################################################################## ################################################################################################## # Customer and AV Results ################################################################################################## # Quality of Service Metrics # remove edge effects, only count middle 80% <-- previously 60% start = round(0.03 * len(customers)) end = round(0.8 * len(customers)) metric__people = customers[start:end] # num_metric_people = len(metric__people) ###### Customer Results ############### file_string1 = '../Results_Rev2/taxi_trvlr_results'+ '_hold' + str(assign_int) + '_fleet' + str(len(customers)) \ + '_opt' + str(opt_method) +'.csv' csv_traveler = open(file_string1, 'w') traveler_writer = csv.writer(csv_traveler, lineterminator='\n', delimiter=',', quotechar='"', quoting=csv.QUOTE_NONNUMERIC) traveler_writer.writerow([ "person_id", "base_ivtt", "simulate_ivtt", "wait_assgn_time", "wait_pick_time", "vehicle", "old_veh" ]) #, "rideshare"]) for j_person in customers[start:end]: base_ivtt = j_person.in_veh_dist / veh_speed traveler_writer.writerow([ j_person.person_id, base_ivtt, j_person.travel_time, j_person.wait_assgn_time, j_person.wait_pick_time, j_person.vehicle_id, j_person.old_vehicles ]) ####### AV Results ############### file_string2 = '../Results_Rev2/taxi_veh_results'+ '_hold' + str(assign_int) + '_fleet' + str(len(customers)) \ + '_opt' + str(opt_method) +'.csv' csv_vehicle = open(file_string2, 'w') vehicle_writer = csv.writer(csv_vehicle, lineterminator='\n', delimiter=',', quotechar='"', quoting=csv.QUOTE_NONNUMERIC) vehicle_writer.writerow([ "vehicle_id", "distance", "pass_assgn", "pass_pick", "pass_drop", "pass_drop_list" ]) cum_distance = 0 for k_vehicle in av_fleet: cum_distance += k_vehicle.total_distance vehicle_writer.writerow([ k_vehicle.vehicle_id, k_vehicle.total_distance, k_vehicle.pass_assgn_count, k_vehicle.pass_pick_count, k_vehicle.pass_drop_count, k_vehicle.pass_dropped_list ]) if taxi: cum_distance = cum_distance / 1000.0 vehicle_writer.writerow(["cum_distance_km", cum_distance]) else: cum_distance = cum_distance / 5280.0 vehicle_writer.writerow(["cum_distance_ft", cum_distance]) ################################################################################################## # Calculate Performance Metrics for Single Simulation ################################################################################################## # Customer Metrics ############### # Incomplete Customers Metrics num_served = (list(p.status for p in customers)).count("served") num_in_veh = (list(p.status for p in customers)).count("inVeh") num_assgnd = (list(p.status for p in customers)).count("assigned") num_unassgnd = (list(p.status for p in customers)).count("unassigned") print("num_served", num_served) perc_reassigned = round( numpy.mean( list(p.reassigned for p in metric__people if p.status == "served")), 2) mean_wait_pick = int( numpy.mean( list(p.wait_pick_time for p in metric__people if p.status == "served"))) # sd_wait_pick = int(numpy.std(list(p.wait_pick_time for p in metric__people if p.status == "served"))) # mean_wait_assgn = int(numpy.mean(list(p.wait_assgn_time for p in metric__people if p.status == "served"))) # sd_wait_assgn = int(numpy.std(list(p.wait_assgn_time for p in metric__people if p.status == "served"))) # mean_trip_dist = round(numpy.mean(list(p.in_veh_dist for p in metric__people if p.status == "served"))/5280, 3) # sd_trip_dist = round(numpy.std(list(p.in_veh_dist for p in metric__people if p.status == "served"))/5280, 3) # AV Metrics ############### if taxi: tot_fleet_dist = int( sum(list(v.total_distance for v in av_fleet)) / 1000.0) # km mean_tot_veh_dist = round( numpy.mean(list(v.total_distance for v in av_fleet)) / 1000.0, 2) # km empty_fleet_dist = int( sum(list(v.empty_distance for v in av_fleet)) / 1000.0) # km perc_empty_dist = round(empty_fleet_dist / float(tot_fleet_dist), 3) fleet_hours = ((mean_tot_veh_dist * 1000.0) / veh_speed) / 3600.0 fleet_utilization = round(fleet_hours / (new_t_max / 3600.0), 2) else: tot_fleet_dist = int( sum(list(v.total_distance for v in av_fleet)) / 5280.0) # miles mean_tot_veh_dist = round( numpy.mean(list(v.total_distance for v in av_fleet)) / 5280.0, 2) # miles # sd_tot_veh_dist = round(numpy.std(list(v.total_distance for v in Vehicles))/5280.0,2) empty_fleet_dist = int( sum(list(v.empty_distance for v in av_fleet)) / 5280.0) # miles # mean_empty_veh_dist= round(numpy.mean(list(v.empty_distance for v in Vehicles))/5280.0,2) # sd_empty_veh_dist = round(numpy.std(list(v.empty_distance for v in Vehicles))/5280.0,2) # loaded_fleet_miles = int(sum(list(v.loaded_distance for v in Vehicles))/5280.0) # mean_loaded_veh_dist= round(numpy.mean(list(v.loaded_distance for v in Vehicles))/5280.0,2) # sd_loaded_veh_dist = round(numpy.std(list(v.loaded_distance for v in Vehicles))/5280.0,2) perc_empty_dist = round(empty_fleet_dist / float(tot_fleet_dist), 3) fleet_hours = ((mean_tot_veh_dist * 5280.0) / veh_speed) / 3600.0 fleet_utilization = round(fleet_hours / (new_t_max / 3600.0), 2) # Initialize Vector of Metrics # sim_results = [num_metric_People, perc_reassigned, # mean_ivtt, sd_ivtt, mean_wait_pick, sd_wait_pick, mean_wait_assgn, sd_wait_assgn, # mean_trip_dist, sd_trip_dist, # tot_fleet_dist, mean_tot_veh_dist, sd_tot_veh_dist, # empty_fleet_miles, perc_empty_miles, fleet_utilization, # mean_increase_RS_ivtt, sd_increase_RS_ivtt, # num_served, num_inVeh, num_assgnd, num_unassgnd] sim_results = [ mean_wait_pick, perc_empty_dist, perc_reassigned, fleet_utilization, num_served, num_in_veh, num_assgnd, num_unassgnd ] return (sim_results)
def printScales(self): keyList = self.scales.keys() keyList.sort() for key in keyList: print key def printProjections(self): keyList = self.projections.keys() for key in keyList: print key, self.projections[key].__len__() def getKapsSortedByScale(self, addExt = ""): lst = [] keyList = self.scales.keys() keyList.sort() keyList.reverse() for key in keyList: for scale in self.scales[key]: item = scale.split("/")[-1].rstrip(".KAP") item = item.rstrip(".kap") lst.append(item+addExt) return lst if __name__== "__main__": import Regions filter = Regions.getRegionFilterList("REGION_15") dir = "/home/will/zxyCharts/BSB_ROOT/NOAA_BSB_ROOT/BSB_ROOT" bs = BsbScales(dir, filter) for ea in bs.getKapsSortedByScale(".KAP"): print ea