print_request = raw_input('\nPrint student_locations_world? y or n? ') if print_request == 'y': print student_locations_world print_request = raw_input('\nPrint student_locations_bad_states? y or n? ') if print_request == 'y': print student_locations_bad_states print_request = raw_input('\nPrint student_locations_bad_cities? y or n? ') if print_request == 'y': print student_locations_bad_cities print_request = raw_input( '\nPrint student_locations_errors_other? y or n? ') if print_request == 'y': print student_locations_errors_other print_request = raw_input( '\nPrint student_locations_bad_counties? y or n? ') if print_request == 'y': print student_locations_bad_counties run_stats(commencement_line_count, student_locations_us, student_locations_world, student_locations_bad_states, student_locations_bad_cities, student_locations_errors_other, student_locations_bad_counties) export_tsv_ids_rates(county_ids_ratios, student_locations_us, 'html/files/', 'students_by_county.tsv')
def main(exports_root_path, main_run_index) : ocr_files = os.listdir(ocr_lists_root_path) print '\nAvailable OCR text files:' i = 0 for file in ocr_files : print '{0}: {1}'.format(i, file) i = i + 1 ocr_file_index = '' while ocr_file_index == '' : ocr_file_index = raw_input('\nWhich file would you like to run the analysis on? ') ocr_file_index = int(ocr_file_index) ocr_text_file = ocr_files[ocr_file_index] year = re.sub('\.txt', '', ocr_text_file) student_list_text_file_path = ocr_lists_root_path + ocr_text_file print 'Text file path: \'{0}\''.format(student_list_text_file_path) exports_root_path = exports_root_path + year + '/' print 'Export files to: \'{0}\''.format(exports_root_path) raw_input('\nContinue if the above paths are correct...') try: os.makedirs(exports_root_path) raw_input('Directory created: \'{0}\'\nContinue...'.format(exports_root_path)) except OSError: if not os.path.isdir(exports_root_path): raise # run a new city state analysis with google api call? run_new = '' while run_new == '' : run_new = print_request = raw_input('\nRe-run entire analysis? y or n? ') # run a new analysis if run_new == 'y': def run_new_analysis(main_run_index) : raw_input('\nFile to be parsed: ' + student_list_text_file_path) global student_locations_us global student_locations_world global student_locations_bad_states global student_locations_bad_cities global student_locations_errors_other global student_locations_bad_counties global commencement_line_count global saved_google_city_state_county_responses student_locations_us, student_locations_world, student_locations_bad_states, student_locations_bad_cities, student_locations_errors_other, student_locations_bad_counties, commencement_line_count = get_student_locations(student_list_text_file_path, us_states, world_countries, state_county_names_ids, main_run_index, saved_google_city_state_county_responses) print 'Created student_locations_us [dict], student_locations_world [dict], student_locations_bad_states [dict], student_locations_bad_cities [dict], and student_locations_errors_other [list]' save_request = raw_input('\nSaving commencement_line_count: {0}... (enter \'n\' to escape) '.format(commencement_line_count)) if save_request != 'n' : commencement_line_count = str(commencement_line_count) path = exports_root_path + 'commencement_line_count.txt' with open(path, 'w') as save_file: save_file.write(commencement_line_count) # student_locations_us print_request = raw_input('\nPrinting student_locations_us... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(student_locations_us, indent=4) save_request = raw_input('\nSaving student_locations_us... (enter \'n\' to escape) ') if save_request != 'n' : path = exports_root_path + 'student_locations_us.json' json.dump(student_locations_us, open(path,'w'), sort_keys=True) # student_locations_world print_request = raw_input('\nPrinting student_locations_world... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(student_locations_world, indent=4) save_request = raw_input('\nSaving student_locations_world... (enter \'n\' to escape) ') if save_request != 'n' : path = exports_root_path + 'student_locations_world.json' json.dump(student_locations_world, open(path,'w'), sort_keys=True) # student_locations_bad_states print_request = raw_input('\nPrinting student_locations_bad_states... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(student_locations_bad_states, indent=4) save_request = raw_input('\nSaving student_locations_bad_states... (enter \'n\' to escape) ') if save_request != 'n' : path = exports_root_path + 'student_locations_bad_states.json' json.dump(student_locations_bad_states, open(path,'w'), sort_keys=True) # student_locations_bad_cities print_request = raw_input('\nPrinting student_locations_bad_cities... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(student_locations_bad_cities, indent=4) save_request = raw_input('\nSaving student_locations_bad_cities... (enter \'n\' to escape) ') if save_request != 'n' : path = exports_root_path + 'student_locations_bad_cities.json' json.dump(student_locations_bad_cities, open(path,'w'), sort_keys=True) # student_locations_errors_other print_request = raw_input('\nPrinting student_locations_errors_other... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(student_locations_errors_other, indent=4) save_request = raw_input('\nSaving student_locations_errors_other... (enter \'n\' to escape) ') if save_request != 'n' : path = exports_root_path + 'student_locations_errors_other.json' json.dump(student_locations_errors_other, open(path,'w'), sort_keys=True) # student_locations_bad_counties print_request = raw_input('\nPrinting student_locations_bad_counties... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(student_locations_bad_counties, indent=4) save_request = raw_input('\nSaving student_locations_bad_counties... (enter \'n\' to escape) ') if save_request != 'n' : path = exports_root_path + 'student_locations_bad_counties.json' json.dump(student_locations_bad_counties, open(path,'w'), sort_keys=True) run_new_analysis(main_run_index) # already ran the analysis, use the files saved from a previous run else : raw_input('Loading dictionaries and lists from json files...') def load_data_from_files() : path = exports_root_path + 'commencement_line_count.txt' with open(path) as file : for line in file : if line != '' : global commencement_line_count commencement_line_count = line path = exports_root_path + 'student_locations_us.json' with open(path) as json_file: global student_locations_us student_locations_us = json.load(json_file) path = exports_root_path + 'student_locations_world.json' with open(path) as json_file: global student_locations_world student_locations_world = json.load(json_file) path = exports_root_path + 'student_locations_bad_states.json' with open(path) as json_file: global student_locations_bad_states student_locations_bad_states = json.load(json_file) path = exports_root_path + 'student_locations_bad_cities.json' with open(path) as json_file: global student_locations_bad_cities student_locations_bad_cities = json.load(json_file) path = exports_root_path + 'student_locations_errors_other.json' with open(path) as json_file: global student_locations_errors_other student_locations_errors_other = json.load(json_file) path = exports_root_path + 'student_locations_bad_counties.json' with open(path) as json_file: global student_locations_bad_counties student_locations_bad_counties = json.load(json_file) load_data_from_files() def print_data_from_loaded_files() : # commencement_line_count print_request = raw_input('\nPrint commencement_line_count, y or n? ') if print_request != 'n' : print json.dumps(commencement_line_count, indent=4) # student_locations_us print_request = raw_input('\nPrint student_locations_us, y or n? ') if print_request != 'n' : print json.dumps(student_locations_us, indent=4) # student_locations_world print_request = raw_input('\nPrint student_locations_world, y or n? ') if print_request != 'n' : print json.dumps(student_locations_world, indent=4) # student_locations_bad_states print_request = raw_input('\nPrint student_locations_bad_states, y or n? ') if print_request != 'n' : print json.dumps(student_locations_bad_states, indent=4) # student_locations_bad_cities print_request = raw_input('\nPrint student_locations_bad_cities, y or n? ') if print_request != 'n' : print json.dumps(student_locations_bad_cities, indent=4) # student_locations_errors_other print_request = raw_input('\nPrint student_locations_errors_other, y or n? ') if print_request != 'n' : print json.dumps(student_locations_errors_other, indent=4) # student_locations_bad_counties print_request = raw_input('\nPrint student_locations_bad_counties, y or n? ') if print_request != 'n' : print json.dumps(student_locations_bad_counties, indent=4) print_data_from_loaded_files() run_stats(commencement_line_count, student_locations_us, student_locations_world, student_locations_bad_states, student_locations_bad_cities, student_locations_errors_other, student_locations_bad_counties) # add the new student locations and the county info to a dictionary for reference and fewer google api calls google_city_state_county_responses = register_google_city_state_county_responses(student_locations_us, saved_google_city_state_county_responses) print_request = raw_input('Printing google_city_state_county_responses... (enter \'n\' to escape) ') if print_request != 'n' : print json.dumps(google_city_state_county_responses, indent=4) google_saved_count = 0 for state in google_city_state_county_responses : for city in google_city_state_county_responses : google_saved_count = google_saved_count + 1 raw_input('\nThe google_city_state_county_responses city count: {0}\n'.format(google_saved_count)) # set the tsv file parameters tsv_file_name = 'students_by_county_' + year + '.tsv' tsv_export_path = html_files_path # get the dictionary to save to tsv and the associated stats student_county_ids_ratios, tsv_file_stats = prepare_tsv_dictionary(county_ids_ratios, student_locations_us) # save the tsv file and print out some stats export_tsv_ids_rates(student_county_ids_ratios, tsv_export_path, tsv_file_name, tsv_file_stats)
print student_locations_us print_request = raw_input('\nPrint student_locations_world? y or n? ') if print_request == 'y' : print student_locations_world print_request = raw_input('\nPrint student_locations_bad_states? y or n? ') if print_request == 'y' : print student_locations_bad_states print_request = raw_input('\nPrint student_locations_bad_cities? y or n? ') if print_request == 'y' : print student_locations_bad_cities print_request = raw_input('\nPrint student_locations_errors_other? y or n? ') if print_request == 'y' : print student_locations_errors_other print_request = raw_input('\nPrint student_locations_bad_counties? y or n? ') if print_request == 'y' : print student_locations_bad_counties run_stats(commencement_line_count, student_locations_us, student_locations_world, student_locations_bad_states, student_locations_bad_cities, student_locations_errors_other, student_locations_bad_counties) export_tsv_ids_rates(county_ids_ratios, student_locations_us, 'html/files/', 'students_by_county.tsv')
def stats(): run_stats() return jsonify(OK("Sync stats complete"))
def test_look_elevator(limit=None): """method that tests the look elevator""" result_dir = "look" # create directory if not existing dirs = os.path.join(BASE_DIR, result_dir) if not os.path.exists(dirs): os.makedirs(dirs) # create loggers person_logger_path = os.path.join(dirs, settings.LOG_DIR, settings.PERSON_LOG_FNAME) person_logger = logger.PersonLogger(person_logger_path, remove_old=True) # create building building = Building([ 'SB', 'B', 'G', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', ]) # generate arrivals arr_gen = ArrivalGenerator(building=building, person_logger=person_logger) days = ["M", "Tu", "W", "Th", "F"] for day in days: # load saved arrivals or generate new arrivals arr_gen.arrival_times = [] save_path = os.path.join(settings.ARRIVALS_DIR, "{}_arrivals.csv".format(day)) if not os.path.exists(save_path): arr_gen.gen_from_classes(file_path=settings.ARRIVALS_DATA_SET_CSV, days=[day]) arr_gen.save(save_path) else: arr_gen.load(save_path) # add first 100 floor arrivals to FEQ cnt = 0 for time, person in arr_gen.arrival_times: cnt += 1 if limit is not None and cnt > limit: break settings.FEQ.put_nowait((time, person, person.States.QUEUED)) # create 6 elevators settings.ELEVATORS = [] for _ in range(6): settings.ELEVATORS.append(elevators.LookElevator(None, building)) while not settings.FEQ.empty(): curr_time, obj, state = settings.FEQ.get_nowait() settings.CURR_TIME = curr_time obj.update_state(state) # commit changes to person_logger person_logger.conn.commit() print("Done with", day) sim_stats.run_stats(person_log_path=person_logger_path, stats_dir=os.path.join(dirs, "stats")) print("done simulating", result_dir)