def write_aggregate_read_struct(attribute_dic=None, buildings_usage_classification="HAZUS"): handle, file_name = tempfile.mkstemp(".csv", "test_aggregate_struct_") os.close(handle) file_name2delete, attribute_dic = write_test_file(file_name, attribute_dic) # Let's get the current dir position this_dir, tail = path.split(__file__) # print "eqrm_dir", eqrm_dir if this_dir == "": this_dir = "." eqrm_dir = this_dir + sep + ".." attribute_conversions_extended = copy.deepcopy(attribute_conversions) attribute_conversions_extended["UFI"] = int aggregate_building_db(file_name) # Build lookup table for building parameters building_classification_tag = "" damage_extent_tag = "" default_input_dir = join(eqrm_dir, "resources", "data", "") sites = Structures.from_csv( file_name, building_classification_tag, damage_extent_tag, default_input_dir=default_input_dir, eqrm_dir=eqrm_dir, buildings_usage_classification=buildings_usage_classification, ) os.remove(file_name2delete) return attribute_dic, sites
def write_aggregate_read_struct(attribute_dic=None, buildings_usage_classification='HAZUS'): handle, file_name = tempfile.mkstemp('.csv', 'test_aggregate_struct_') os.close(handle) file_name2delete, attribute_dic = write_test_file(file_name, attribute_dic) # Let's get the current dir position this_dir, tail = path.split(__file__) #print "eqrm_dir", eqrm_dir if this_dir == '': this_dir = '.' eqrm_dir = this_dir + sep + '..' attribute_conversions_extended = copy.deepcopy(attribute_conversions) attribute_conversions_extended['UFI'] = int aggregate_building_db(file_name) # Build lookup table for building parameters building_classification_tag = '' damage_extent_tag = '' default_input_dir = join(eqrm_dir, 'resources', 'data', '') sites = Structures.from_csv( file_name, building_classification_tag, damage_extent_tag, default_input_dir=default_input_dir, eqrm_dir=eqrm_dir, buildings_usage_classification=buildings_usage_classification) os.remove(file_name2delete) return attribute_dic, sites
def write_aggregate_read_csv(attribute_dic=None, buildings_usage_classification="HAZUS"): handle, file_name = tempfile.mkstemp(".csv", "test_aggregate_csv_") os.close(handle) file_name, attribute_dic = write_test_file(file_name, attribute_dic) attribute_conversions_extended = copy.deepcopy(attribute_conversions) attribute_conversions_extended["UFI"] = int file_out = file_name + "out" aggregate_building_db(file_name, file_out) site = csv_to_arrays(file_out, **attribute_conversions_extended) os.remove(file_name) os.remove(file_out) return attribute_dic, site
def write_aggregate_read_csv(attribute_dic=None, buildings_usage_classification='HAZUS'): handle, file_name = tempfile.mkstemp('.csv', 'test_aggregate_csv_') os.close(handle) file_name, attribute_dic = write_test_file(file_name, attribute_dic) attribute_conversions_extended = copy.deepcopy(attribute_conversions) attribute_conversions_extended['UFI'] = int file_out = file_name + "out" aggregate_building_db(file_name, file_out) site = csv_to_arrays(file_out, **attribute_conversions_extended) os.remove(file_name) os.remove(file_out) return attribute_dic, site