def create_estimation_dataset(G,path_list_collection,id_list,trip_times,master_config): output_config=master_config.output_config choice_set_config=master_config.choice_set_config est_file=open(output_config['estimation_data'],'w') est_writer=csv.writer(est_file,lineterminator='\r') if output_config['path_size']: path_size_alias=['path_size'] else: path_size_alias=[] est_writer.writerow(['occ','alt','trip_id','chosen']+output_config['aliases']+path_size_alias) path_size_data=path_size(G,path_list_collection,choice_set_config) for occ_idx in range(len(path_list_collection)): for alt_idx in range(len(path_list_collection[occ_idx])): path=path_list_collection[occ_idx][alt_idx] values=[] for i in range(len(output_config['variables'])): if output_config['variables'][i] in master_config['time_dependent_relation']: key=get_time_dependent_variable(output_config['variables'][i],trip_times[id_list[occ_idx]],master_config['time_dependent_relation']) else: key=output_config['variables'][i] values.append(str(path_trace(G,path,key,output_config['trace_funs'][i],output_config['final_funs'][i],output_config['weights'][i]))) if output_config['path_size']: values.append(path_size_data[occ_idx][alt_idx]) est_writer.writerow([str(occ_idx),str(alt_idx),str(id_list[occ_idx]),str(alt_idx==0)]+values) est_file.close()
def create_holdback_prediction_dataset(G, path_list_collection, id_list, trip_times, master_config, chosen_overlap): output_config = master_config.output_config choice_set_config = master_config.choice_set_config est_file = open(output_config['estimation_data'], 'w') est_writer = csv.writer(est_file, lineterminator='\r') if output_config['path_size']: path_size_alias = ['path_size'] else: path_size_alias = [] est_writer.writerow(['occ', 'alt', 'trip_id', 'chosen'] + output_config['aliases'] + path_size_alias + ['overlap']) sans_chosen = copy.deepcopy(path_list_collection) for path_list in sans_chosen: path_list.pop(0) path_size_data = path_size(G, sans_chosen, choice_set_config) for occ_idx in range(len(path_list_collection)): for alt_idx in range(len(path_list_collection[occ_idx])): path = path_list_collection[occ_idx][alt_idx] values = [] for i in range(len(output_config['variables'])): if output_config['variables'][i] in master_config[ 'time_dependent_relation']: key = get_time_dependent_variable( output_config['variables'][i], trip_times[id_list[occ_idx]], master_config['time_dependent_relation']) else: key = output_config['variables'][i] values.append( str( path_trace(G, path, key, output_config['trace_funs'][i], output_config['final_funs'][i], output_config['weights'][i]))) if output_config['path_size']: if alt_idx == 0: values.append(0) else: values.append(path_size_data[occ_idx][alt_idx - 1]) values.append(chosen_overlap[occ_idx][alt_idx]) est_writer.writerow([ str(occ_idx), str(alt_idx), str(id_list[occ_idx]), str(alt_idx == 0) ] + values) est_file.close()
def trace_and_load(network,filtered_sets,master_config,matrix_list,source): config=master_config.assign_config this_network=network #trace print current_process().name, "- zone: ", source, "Tracing paths..." predict_collection={} for target in filtered_sets: predict_data=[] for path in filtered_sets[target]: trace_vals={} for i in range(len(config['variables'])): var=config['variables'][i] trace_vals[config['aliases'][i]]=path_trace(this_network,path,var,config['trace_funs'][i],config['final_funs'][i],config['weights'][i]) for var in trace_vals: if var in config['divisors']: trace_vals[var]=trace_vals[var]/trace_vals[config['divisors'][var]] predict_data.append(trace_vals) if config['path_size']: PS=path_size(this_network,[filtered_sets[target]],master_config.choice_set_config).pop() for i in range(len(filtered_sets[target])): if config['path_size_log']: PS[i]=log(PS[i]) predict_data[i][config['path_size_alias']]=PS[i] predict_collection[target]=predict_data #apply logit probabilities print current_process().name, "- zone: ", source, "Applying logit probabilities..." to_load=[] for mat_idx in range(len(matrix_list)): to_load.append({}) for target in predict_collection: num_pers=matrix_list[mat_idx][source-1,target-1] to_load[mat_idx][target]=simulate_mixed_logit(num_pers,predict_collection[target],config) #load onto network print current_process().name, "- zone: ", source, "Loading network..." for target in filtered_sets: for j in range(len(filtered_sets[target])): path=filtered_sets[target][j] if this_network.orig_network is None: for i in range(len(path)-1): for mat_idx in range(len(matrix_list)): this_network[path[i]][path[i+1]][config['load_names'][mat_idx]]=this_network[path[i]][path[i+1]][config['load_names'][mat_idx]]+to_load[mat_idx][target][j] else: for i in range(1,len(path)-1): for mat_idx in range(len(matrix_list)): this_network.orig_network[path[i][0]][path[i][1]][config['load_names'][mat_idx]]=this_network.orig_network[path[i][0]][path[i][1]][config['load_names'][mat_idx]]+to_load[mat_idx][target][j]
def trace_and_load(network, filtered_sets, master_config, matrix_list, source): config = master_config.assign_config this_network = network #trace print current_process().name, "- zone: ", source, "Tracing paths..." predict_collection = {} for target in filtered_sets: predict_data = [] for path in filtered_sets[target]: trace_vals = {} for i in range(len(config['variables'])): var = config['variables'][i] trace_vals[config['aliases'][i]] = path_trace( this_network, path, var, config['trace_funs'][i], config['final_funs'][i], config['weights'][i]) for var in trace_vals: if var in config['divisors']: trace_vals[var] = trace_vals[var] / trace_vals[ config['divisors'][var]] predict_data.append(trace_vals) if config['path_size']: PS = path_size(this_network, [filtered_sets[target]], master_config.choice_set_config).pop() for i in range(len(filtered_sets[target])): if config['path_size_log']: PS[i] = log(PS[i]) predict_data[i][config['path_size_alias']] = PS[i] predict_collection[target] = predict_data #apply logit probabilities print current_process( ).name, "- zone: ", source, "Applying logit probabilities..." to_load = [] for mat_idx in range(len(matrix_list)): to_load.append({}) for target in predict_collection: num_pers = matrix_list[mat_idx][source - 1, target - 1] to_load[mat_idx][target] = simulate_mixed_logit( num_pers, predict_collection[target], config) #load onto network print current_process().name, "- zone: ", source, "Loading network..." for target in filtered_sets: for j in range(len(filtered_sets[target])): path = filtered_sets[target][j] if this_network.orig_network is None: for i in range(len(path) - 1): for mat_idx in range(len(matrix_list)): this_network[path[i]][path[i + 1]][ config['load_names'][mat_idx]] = this_network[ path[i]][path[i + 1]][config['load_names'][ mat_idx]] + to_load[mat_idx][target][j] else: for i in range(1, len(path) - 1): for mat_idx in range(len(matrix_list)): this_network.orig_network[path[i][0]][path[i][1]][ config['load_names'] [mat_idx]] = this_network.orig_network[path[i][0]][ path[i][1]][config['load_names'][ mat_idx]] + to_load[mat_idx][target][j]
def find_coef_bounding_box(G,source,target,choice_set_config,time_dependent_relation,trip_time): final_bound={} config=choice_set_config verbose=False#config['verbose'] for prelim_key in config['variables']: key=get_time_dependent_variable(prelim_key,trip_time,time_dependent_relation) if key==config['ref']: final_bound[key]=[1,1] continue vc={config['ref']:1} if key in config['median_compare']: for compare_key in final_bound: if key not in config['median_compare']: if config['log_prior']: vc[compare_key]=exp( (log(final_bound[compare_key][0])+log(final_bound[compare_key][1]))/2) else: vc[compare_key]=(final_bound[compare_key][0]+final_bound[compare_key][1])/2 link_randomizer=None if key in config['randomize_compare']: if not config['randomize_after']: raise Exception, "randomize_compare not allowed without randomize_after" link_randomizer=config['randomize_after_dev'] the_seed=random.randint(0,sys.maxint) if verbose: print vc cur_wgt=None if key in config['weights']: cur_wgt=config['weights'][key] myfun=lambda cur_coef: path_trace( G, bidirectional_dijkstra(G,source,target,dict(vc,**{key:cur_coef}),config['weights'],link_randomizer)[1], key, 'sum', wgtvar=cur_wgt ) coef_min_low = coef_min_high = log(config['ranges'][prelim_key][0]) coef_max_low = coef_max_high = log(config['ranges'][prelim_key][1]) val_min_low = val_min_high = myfun(exp(coef_min_low)) val_max_low = val_max_high = myfun(exp(coef_max_low)) if verbose: print key, "coef_min_low:", exp(coef_min_low) print key, "coef_max_low:", exp(coef_max_low) print key, "val_min_low:", val_min_low print key, "val_max_low:", val_max_low if val_min_low == val_max_low: if verbose: print key, "no range... ignoring" continue if verbose: print key, "coef_min_low:", exp(coef_min_low) print key, "coef_max_low:", exp(coef_max_low) print key, "val_min_low:", val_min_low print key, "val_max_low:", val_max_low while True: random.seed(the_seed) coef_mid_low = (coef_min_low+coef_max_low)/2 coef_mid_high = (coef_min_high+coef_max_high)/2 val_mid_low = myfun(exp(coef_mid_low)) val_mid_high = myfun(exp(coef_mid_high)) if verbose: print key, "coef_mid_low:", exp(coef_mid_low) print key, "coef_mid_high:", exp(coef_mid_high) print key, "val_mid_low:", val_mid_low print key, "val_mid_high:", val_mid_high if val_mid_low==val_min_low: coef_min_low=coef_mid_low else: coef_max_low=coef_mid_low val_max_low=val_mid_low if val_mid_high==val_max_high: coef_max_high=coef_mid_high else: coef_min_high=coef_mid_high val_min_high=val_mid_high if verbose: print key, "coef_low:", (exp(coef_min_low),exp(coef_max_low)) print key, "val_low:", (val_min_low,val_max_low) print key, "coef_high:", (exp(coef_min_high),exp(coef_max_high)) print key, "val_high:", (val_min_high,val_max_high) if (coef_max_low-coef_min_low)<config['tolerance']: break if coef_mid_low!=coef_mid_high: final_bound[key]=[exp(coef_mid_low),exp(coef_mid_high)] if verbose: print final_bound return final_bound