def batch_reports(project_dir, results_file, additional_costs=None, join_data=None): #combine the segments and options (combinations) into one iterable SEGMENTS_DIR = os.path.join(project_dir, 'Segments') COMBOS_DIR = os.path.join(project_dir, 'Combinations') COMMON_DATA_DIR = os.path.join(project_dir, 'CommonData') ADMIN_DIR = os.path.join(project_dir, 'ProjectAdmin') BASELINE_DIR = os.path.join(project_dir, 'Baseline') #instantiate the true baseline flood report baseline_model = Model(BASELINE_DIR) pn_join_csv = os.path.join(COMMON_DATA_DIR, r'sphila_sheds_parcels_join.csv') parcel_node_join_df = pd.read_csv(pn_join_csv) parcel_shp_df = spatial.read_shapefile(sg.config.parcels_shapefile) baserpt = reporting.FloodReport(baseline_model, parcel_node_join_df) base_flood_vol = baserpt.flood_vol_mg paths = (SEGMENTS_DIR, COMBOS_DIR) #result file header # cols = 'MODEL,COST,FLOOD_VOL_MG,PARCEL_FLOOD_HRS,FLOOD_VOL_REDUCED_MG,PARCEL_FLOOD_HRS_REDUCED,PARCEL_HRS_REDUCED_DELTA_THRESH' # with open(results_file, 'a') as f: # f.write(cols + '\n') for path, dirs, files in chain.from_iterable( os.walk(path) for path in paths): for f in files: if '.inp' in f: inp_path = os.path.join(path, f) alt = Model(inp_path) print 'reporting on {}'.format(alt.name) #generate the reports frpt = reporting.FloodReport(alt, parcel_node_join_df) impact_rpt = reporting.ComparisonReport( baserpt, frpt, additional_costs, join_data) # #write to the log # model_id = os.path.splitext(f)[0] # with open(results_file, 'a') as f: # # stats = (model_id, impact_rpt.cost_estimate, # frpt.flood_vol_mg, frpt.parcel_hrs_flooded, # baserpt.flood_vol_mg - frpt.flood_vol_mg, # baserpt.parcel_hrs_flooded - frpt.parcel_hrs_flooded, # impact_rpt.parcel_hours_reduced, # ) # f.write('{},{},{},{},{},{},{}\n'.format(*stats)) report_dir = os.path.join(alt.inp.dir, 'Report_AllParcels') if not os.path.exists(report_dir): os.mkdir(report_dir) #write the report files # impact_rpt.write(report_dir) # impact_rpt.generate_figures(report_dir, parcel_shp_df) serialize.encode_report(impact_rpt, os.path.join(report_dir, 'rpt.json'))
def run_swmm_engine(inp_folder): try: wd = os.path.join(combi_folder, inp_folder) m = Model(inp_folder) # run.run_simple(inp_folder) if not m.rpt: # print 'completed {} at {}'.format(m.inp.name, datetime.now()) with open(logfile, 'a') as f: f.write('{} -- {} started... '.format( datetime.now().strftime("%y-%m-%d %H:%M"), m.inp.name)) run.run_hot_start_sequence(m) f.write('completed at {}\n'.format( datetime.now().strftime("%y-%m-%d %H:%M"))) else: # print '{} -- RPT already exist: {}'.format(datetime.now(), m.rpt.filePath) with open(logfile, 'a') as f: f.write('RPT already exist: {}\n'.format(m.rpt.filePath)) except: with open(logfile, 'a') as f: f.write(' FAILED at {}\n'.format( datetime.now().strftime("%y-%m-%d %H:%M")))
def run_swmm_engine(inp_folder): logfile = os.path.join(wd, 'log_'+log_start_time+'.txt') m = Model(inp_folder) if not m.rpt_is_valid(): # if the rpt is not valid i.e. not having current, usable data: run with open (logfile, 'a') as f: now = datetime.now().strftime("%y-%m-%d %H:%M") f.write('{}: started at {} '.format(m.inp.name, now)) # print 'running {}\n'.format(m.inp.name) run.run_hot_start_sequence(m.inp.path) now = datetime.now().strftime("%y-%m-%d %H:%M") f.write(', completed at {}\n'.format(now)) else: with open (logfile, 'a') as f: f.write('{}: skipped (up-to-date)\n'.format(m.inp.name))
def run_swmm_engine(inp_folder): logfile = os.path.join(wd, 'log_' + log_start_time + '.txt') m = Model(inp_folder) if not m.rpt_is_valid(): # if the rpt is not valid i.e. not having current, usable data: run with open(logfile, 'a') as f: now = datetime.now().strftime("%y-%m-%d %H:%M") f.write('{}: started at {} '.format(m.inp.name, now)) # print 'running {}\n'.format(m.inp.name) run.run_hot_start_sequence(m.inp.path) now = datetime.now().strftime("%y-%m-%d %H:%M") f.write(', completed at {}\n'.format(now)) else: with open(logfile, 'a') as f: f.write('{}: skipped (up-to-date)\n'.format(m.inp.name))
def propagate_changes_from_baseline(baseline_dir, alternatives_dir, combi_dir, version_id='', comments=''): #stuff """ if the baseline model has changes that need to be propogated to all models, iterate through each model and rebuild the INPs with the new baseline and existing build instructions. update the build instructions to reflect the revision date of the baseline. """ version_id += '_' + datetime.now().strftime("%y%m%d%H%M%S") #collect the directories of all models #model_dirs = [os.listdir(os.path.join(alternatives_dir, x)) for x in os.listdir(alternatives_dir)] model_dirs = [ ] #[os.path.join(alternatives_dir, alt, imp_level) for imp_level in os.listdir(os.path.join(alternatives_dir, alt)) for alt in os.listdir(alternatives_dir)] for alt in os.listdir(alternatives_dir): #print alt #iterate through each implementation level of each alternative for imp_level in os.listdir(os.path.join(alternatives_dir, alt)): #create or refresh the build instructions file for the alternatives model_dirs.append(os.path.join(alternatives_dir, alt, imp_level)) model_dirs += [os.path.join(combi_dir, x) for x in os.listdir(combi_dir)] #print model_dirs baseline = Model(baseline_dir) baseinp = baseline.inp.filePath for model_dir in model_dirs: model = Model(model_dir) vc_directory = os.path.join(model_dir, 'vc') latest_bi = vc_utils.newest_file(vc_directory) #update build instructions metdata and build the new inp bi = inp.BuildInstructions(latest_bi) bi.metadata['Parent Models']['Baseline'] = { baseinp: vc_utils.modification_date(baseinp) } bi.metadata['Log'].update({version_id: comments}) bi.save(vc_directory, version_id + '.txt') print 'rebuilding {} with changes to baseline'.format(model.name) bi.build(baseline_dir, model.inp.filePath) #overwrite old inp
def batch_cost_estimates(baseline_dir, segments_dir, options_dir, results_file, supplemental_cost_data=None, create_proj_reports=True): """ DEPRECIATED compute the cost estimate of each model/option in the segments and combinations directories. Resulsts will be printed in the results text file. """ #combine the segments and options (combinations) into one iterable paths = (segments_dir, options_dir) baseline = Model(baseline_dir) for path, dirs, files in chain.from_iterable( os.walk(path) for path in paths): for f in files: if '.inp' in f: inp_path = os.path.join(path, f) alt = Model(inp_path) #calculate the cost costsdf = functions.estimate_cost_of_new_conduits( baseline, alt, supplemental_cost_data) cost_estimate = costsdf.TotalCostEstimate.sum() / math.pow( 10, 6) print '{}: ${}M'.format(alt.name, round(cost_estimate, 1)) model_id = os.path.splitext(f)[0] with open(results_file, 'a') as res: res.write('{}, {}\n'.format(model_id, cost_estimate)) if create_proj_reports: #create a option-specific per segment costing csv file report_dir = os.path.join(alt.inp.dir, REPORT_DIR_NAME) fname = '{}_CostEstimate_{}.csv'.format( alt.name, strftime("%y%m%d")) cost_report_path = os.path.join(report_dir, fname) if not os.path.exists(report_dir): os.mkdir(report_dir) costsdf.to_csv(cost_report_path)
def batch_post_process(options_dir, baseline_dir, log_dir, bbox=None, overwrite=False): """ DEPRECIATED batch process all models in a given directory, where child directories each model (with .inp and .rpt companions). A bbox should be passed to control where the grahics are focused. Specify whether reporting content should be overwritten if found. """ baseline = Model(baseline_dir) folders = os.listdir(options_dir) logfile = os.path.join(log_dir, 'logfile.txt') with open(logfile, 'a') as f: f.write('MODEL,NEW_SEWER_MILES,IMPROVED,ELIMINATED,WORSE,NEW\n') for folder in folders: #first check if there is already a Report directory and skip if required current_dir = os.path.join(options_dir, folder) report_dir = os.path.join(current_dir, REPORT_DIR_NAME) if not overwrite and os.path.exists(report_dir): print 'skipping {}'.format(folder) continue else: #generate the report current_model = Model(current_dir) print 'Generating report for {}'.format(current_model.inp.name) #reporting.generate_figures(baseline, current_model, bbox=bbox, imgDir=report_dir, verbose=True) report = reporting.Report(baseline, current_model) report.write(report_dir) #keep a summay log with open(logfile, 'a') as f: #'MODEL,NEW_SEWER_MILES,IMPROVED,ELIMINATED,WORSE,NEW' f.write('{},{},{},{},{},{}\n'.format( current_model.inp.name, report.sewer_miles_new, report.parcels_flooding_improved, report.parcels_eliminated_flooding, report.parcels_worse_flooding, report.parcels_new_flooding))
def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # inp_path = model.inp.path model = Model(inp_path) rpt_path = os.path.splitext(inp_path)[0] + '.rpt' hotstart1 = os.path.join(model.inp.dir, model.inp.name + '_hot1.hsf') hotstart2 = os.path.join(model.inp.dir, model.inp.name + '_hot2.hsf') # if not os.path.exists(hotstart1) and not os.path.exists(hotstart2): #create new model inp with params to save hotstart1 print('create new model inp with params to save hotstart1') s = pd.Series(['SAVE HOTSTART "{}"'.format(hotstart1)]) hot1_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot1_df) model = replace_inp_section(model.inp.path, '[REPORT]', defs.REPORT_none) model = replace_inp_section(model.inp.path, '[OPTIONS]', defs.OPTIONS_no_rain) subprocess.call([swmm_eng, model.inp.path, rpt_path]) # if os.path.exists(hotstart1) and not os.path.exists(hotstart2): #create new model inp with params to use hotstart1 and save hotstart2 print('with params to use hotstart1 and save hotstart2') s = pd.Series([ 'USE HOTSTART "{}"'.format(hotstart1), 'SAVE HOTSTART "{}"'.format(hotstart2) ]) hot2_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot2_df) subprocess.call([swmm_eng, model.inp.path, rpt_path]) # if os.path.exists(hotstart2): #create new model inp with params to use hotstart2 and not save anything print('params to use hotstart2 and not save anything') s = pd.Series(['USE HOTSTART "{}"'.format(hotstart2)]) hot3_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot3_df) model = replace_inp_section(model.inp.path, '[REPORT]', defs.REPORT_none) # defs.REPORT_nodes_links) model = replace_inp_section(model.inp.path, '[OPTIONS]', defs.OPTIONS_normal) subprocess.call([swmm_eng, model.inp.path, rpt_path])
def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', comments=''): """ Generate SWMM5 models of each logical combination of all implementation phases (IP) across all relief sewer networks (RSN). Inputs: baseline_dir -> path to directory containing the baseline SWMM5 model rsn_dir -> path to directory containing subdirectories for each RSN containing directories for each IP within the network combi_dir -> target directory in which child models will be created version_id -> identifier for a given version (optional) comments -> comments tracked within build instructions log for each model scenario (optional) Calling create_combinations will update child models if parent models have been changed. """ baseinp = Model(baseline_dir).inp.path version_id += '_' + datetime.now().strftime("%y%m%d%H%M%S") #create a list of directories pointing to each IP in each RSN RSN_dirs = [os.path.join(rsn_dir, rsn) for rsn in os.listdir(rsn_dir)] IP_dirs = [os.path.join(d, ip) for d in RSN_dirs for ip in os.listdir(d)] #list of lists of each IP within each RSN, including a 'None' phase. IPs = [[None] + os.listdir(d) for d in RSN_dirs] #identify all scenarios (cartesian product of sets of IPs between each RSN) #then isolate child scenarios with atleast 2 parents (sets with one parent #are already modeled as IPs within the RSNs) all_scenarios = [[_f for _f in s if _f] for s in itertools.product(*IPs)] child_scenarios = [s for s in all_scenarios if len(s) > 1] #notify user of what was initially found str_IPs = '\n'.join([', '.join([_f for _f in i if _f]) for i in IPs]) print(('Found {} implementation phases among {} networks:\n{}\n' 'This yeilds {} combined scenarios ({} total)'.format( len(IP_dirs), len(RSN_dirs), str_IPs, len(child_scenarios), len(all_scenarios) - 1))) # ========================================================================== # UPDATE/CREATE THE PARENT MODEL BUILD INSTRUCTIONS # ========================================================================== for ip_dir in IP_dirs: ip_model = Model(ip_dir) vc_dir = os.path.join(ip_dir, 'vc') if not os.path.exists(vc_dir): print('creating new build instructions for {}'.format( ip_model.name)) inp.create_inp_build_instructions(baseinp, ip_model.inp.path, vc_dir, version_id, comments) else: #check if the alternative model was changed since last run of this tool #--> compare the modification date to the BI's modification date meta data latest_bi = vc_utils.newest_file(vc_dir) if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it print('updating build instructions for {}'.format( ip_model.name)) inp.create_inp_build_instructions(baseinp, ip_model.inp.path, vc_dir, version_id, comments) # ========================================================================== # UPDATE/CREATE THE CHILD MODELS AND CHILD BUILD INSTRUCTIONS # ========================================================================== for scen in child_scenarios: newcombi = '_'.join(sorted(scen)) new_dir = os.path.join(combi_dir, newcombi) vc_dir = os.path.join(combi_dir, newcombi, 'vc') #parent model build instr files #BUG (this breaks with model IDs with more than 1 char) parent_vc_dirs = [os.path.join(rsn_dir, f[0], f, 'vc') for f in scen] latest_parent_bis = [vc_utils.newest_file(d) for d in parent_vc_dirs] build_instrcts = [ inp.BuildInstructions(bi) for bi in latest_parent_bis ] if not os.path.exists(new_dir): os.mkdir(new_dir) newinppath = os.path.join(new_dir, newcombi + '.inp') print('creating new child model: {}'.format(newcombi)) new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_dir, version_id + '.txt') new_build_instructions.build(baseline_dir, newinppath) else: #check if the alternative model was changed since last run #of this tool --> compare the modification date to the BI's #modification date meta data latest_bi = vc_utils.newest_file(os.path.join(new_dir, 'vc')) if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it print('updating child build instructions for {}'.format( newcombi)) newinppath = os.path.join(new_dir, newcombi + '.inp') new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_dir, version_id + '.txt') new_build_instructions.build(baseline_dir, newinppath)
def create_combinations(baseline_dir, alternatives_dir, combi_dir, version_id='', comments=''): """ given a set of main alternatives split into models varying levels of implementation, this function combines all implementation levels of all alternatives into all logical combinations. """ basemodel = Model(baseline_dir) baseinp = basemodel.inp.filePath alt_directories = os.listdir( alternatives_dir) #list of dirs holding alt models implementation_levels = [] newmodels = [] version_id += '_' + datetime.now().strftime("%y%m%d%H%M%S") for alt in alt_directories: #iterate through each implementation level of each alternative for imp_level in os.listdir(os.path.join(alternatives_dir, alt)): implementation_levels.append(os.path.join(alt, imp_level)) #create or refresh the build instructions file for the alternatives alt_imp_level_dir = os.path.join(alternatives_dir, alt, imp_level) alt_imp_inp = Model(alt_imp_level_dir).inp.filePath vc_directory = os.path.join(alt_imp_level_dir, 'vc') if not os.path.exists(vc_directory): print 'creating new build instructions for {}'.format( imp_level) inp.create_inp_build_instructions(baseinp, alt_imp_inp, vc_directory, version_id, comments) else: #check if the alternative model was changed since last run of this tool #--> compare the modification date to the BI's modification date meta data latest_bi = vc_utils.newest_file(vc_directory) if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it print 'updating build instructions for {}'.format( imp_level) inp.create_inp_build_instructions(baseinp, alt_imp_inp, vc_directory, version_id, comments) #creat directories for new model combinations for L in range(1, len(implementation_levels) + 1): #break for subset in itertools.combinations(implementation_levels, L): #subset e.g. = 'A\A01' #newcombi = '_'.join(subset) newcombi = '_'.join([os.path.split(s)[1] for s in subset]) new_combi_dir = os.path.join(combi_dir, newcombi) vc_directory = os.path.join(new_combi_dir, 'vc') #create a list of the parent directories, use that to prevent #two or more from same alternative alternative_dirs = [os.path.split(s)[0] for s in subset] if len(alternative_dirs) == len( set(alternative_dirs)) and len(subset) > 1: #confirming the list length is equal to the set length (hashable) #confirms that there are not duplicates in the items list parent_vc_dirs = [ os.path.join(alternatives_dir, f, 'vc') for f in subset ] latest_parent_bis = [ vc_utils.newest_file(d) for d in parent_vc_dirs ] build_instrcts = [ inp.BuildInstructions(bi) for bi in latest_parent_bis ] if not os.path.exists( new_combi_dir): #and newcombi not in flavors: #check to make sure new model doesn't repeat two or more from #a particular genre. #print new_combi_dir os.mkdir(new_combi_dir) newinppath = os.path.join(new_combi_dir, newcombi + '.inp') #collect build instructions from each alt's implementation #level for this combination. Select those with the current #version id, or the latest version. print 'creating new child model: {}'.format(newcombi) new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_directory, version_id + '.txt') new_build_instructions.build(baseline_dir, newinppath) else: #check if the alternative model was changed since last run #of this tool --> compare the modification date to the BI's #modification date meta data latest_bi = vc_utils.newest_file( os.path.join(new_combi_dir, 'vc')) if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it print 'updating child build instructions for {}'.format( newcombi) newinppath = os.path.join(new_combi_dir, newcombi + '.inp') new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_directory, version_id + '.txt') new_build_instructions.build(baseline_dir, newinppath)