def testing(self): attributes = {"mo": array(["money", "soup"]), "SITE_CLASS": array(["E", "C"])} latitude = [10, 20] longitude = [1, 2] sites = Sites(latitude, longitude, **attributes) site_class2Vs30 = {"C": 30, "E": 40} sites.set_Vs30(site_class2Vs30) actual = array(latitude) self.assert_(allclose(sites.latitude, actual, 0.001)) actual = array(longitude) self.assert_(allclose(sites.longitude, actual, 0.001)) actual = array(["money", "soup"]) for (att, act) in map(None, sites.attributes["mo"], actual): self.assert_(att == act) actual = array([40, 30]) self.assert_(allclose(sites.attributes["Vs30"], actual, 0.001)) site_class2Vs30 = {"C": 30} try: sites.set_Vs30(site_class2Vs30) except KeyError: pass else: self.failUnless(False, "KeyError not raised")
def __init__(self, latitude, longitude, **attributes): """Contruct a Bridges object. latitude a vector (tuple, list, ...) of latitude values longitude a vector (tuple, list, ...) of longitude values attributes a dictionary of bridge attributes """ Sites.__init__(self, latitude, longitude, **attributes)
def __init__(self, latitude, longitude, vulnerability_set, **attributes): """Create an object holding all Structures data for user defined vulnerability curves """ # inherit setup from Sites, add building parameters Sites.__init__(self, latitude, longitude, **attributes) self.vulnerability_set = vulnerability_set # Validate that the curves match this set self.validate_vulnerability_set()
def __init__(self, latitude, longitude, building_parameters, **attributes): """Create an object holding all Structures data. Inherits from Sites which handles lat, lon and attributes. Structures adds the 'building_parameters' attribute which must be handled specially. Compare this with the handling of 'extra' classification data in the Bridges class. TODO: make extra data here be handled in a similar way as in Bridges? """ # inherit setup from Sites, add building parameters Sites.__init__(self, latitude, longitude, **attributes) self.building_parameters = building_parameters
def test_closest_site(self): # Test data from GA website # http://www.ga.gov.au/earth-monitoring/geodesy/geodetic-techniques/distance-calculation-algorithms.html latitude = [-31, -31, -32, -33, -34, -35, -40, -50, -60, -70, -80] longitude = [150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151] sites = Sites(latitude, longitude) # Point A from website point_lat = -30 point_lon = 150 closest_site = sites.closest_site(point_lat, point_lon) assert sites.latitude[closest_site] == latitude[0] assert sites.longitude[closest_site] == longitude[0]
def test_closest_site(self): # Test data from GA website # http://www.ga.gov.au/earth-monitoring/geodesy/geodetic-techniques/distance-calculation-algorithms.html latitude = [-31,-31,-32,-33,-34,-35,-40,-50,-60,-70,-80] longitude = [150,151,151,151,151,151,151,151,151,151,151] sites = Sites(latitude, longitude) # Point A from website point_lat = -30 point_lon = 150 closest_site = sites.closest_site(point_lat, point_lon) assert sites.latitude[closest_site] == latitude[0] assert sites.longitude[closest_site] == longitude[0]
def create_site(): """ Create dummy site. Uses the same technique from test_sites (Test_Sites.test_read_from_file """ # create dummy CSV file - this is bridges data, but sites should handle anything lat = [-35.352085] lon = [149.236994] clsf = ['HWB17'] cat = ['BRIDGE'] skew = [0] span = [2] cls = ['E'] attribute_keys = ['BID', 'STRUCTURE_CLASSIFICATION'] dummy_csv_data = ['BID,LONGITUDE,LATITUDE,STRUCTURE_CLASSIFICATION,' 'STRUCTURE_CATEGORY,SKEW,SPAN,SITE_CLASS', '2,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[0], lat[0], clsf[0], cat[0], skew[0], span[0], cls[0])] (handle, filename) = tempfile.mkstemp('.csv', 'test_sites_') os.close(handle) f = open(filename, 'wb') f.write('\n'.join(dummy_csv_data)) f.close() # now read file - pass attribute_conversion as **kwargs data sites = Sites.from_csv(filename, BID=int, STRUCTURE_CLASSIFICATION=str) return sites, filename
def create_site(): """ Create dummy site. Uses the same technique from test_sites (Test_Sites.test_read_from_file """ # create dummy CSV file - this is bridges data, but sites should handle anything lat = [-35.352085] lon = [149.236994] clsf = ['HWB17'] cat = ['BRIDGE'] skew = [0] span = [2] cls = ['E'] attribute_keys = ['BID', 'STRUCTURE_CLASSIFICATION'] dummy_csv_data = [ 'BID,LONGITUDE,LATITUDE,STRUCTURE_CLASSIFICATION,' 'STRUCTURE_CATEGORY,SKEW,SPAN,SITE_CLASS', '2,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[0], lat[0], clsf[0], cat[0], skew[0], span[0], cls[0]) ] (handle, filename) = tempfile.mkstemp('.csv', 'test_sites_') os.close(handle) f = open(filename, 'wb') f.write('\n'.join(dummy_csv_data)) f.close() # now read file - pass attribute_conversion as **kwargs data sites = Sites.from_csv(filename, BID=int, STRUCTURE_CLASSIFICATION=str) return sites, filename
def test_read_from_file_Vs30(self): """Test reading Sites data from file taking into account Vs30 data. 1. If VS30 is present and requested, it should be an attribute 2. If VS30 is not present and requested, it should not be an attribute """ raw_csv_no_vs30 = """BID,LATITUDE,LONGITUDE 2,-6.4125,110.837502 3,-6.4125,110.845833 4,-6.4125,110.854164""" raw_csv_vs30 = """BID,LATITUDE,LONGITUDE,VS30 2,-6.4125,110.837502,666 3,-6.4125,110.845833,560 4,-6.4125,110.854164,560""" (handle, filename_vs30) = tempfile.mkstemp(".csv", "test_sites_") os.close(handle) f = open(filename_vs30, "wb") f.write(raw_csv_vs30) f.close() (handle, filename_no_vs30) = tempfile.mkstemp(".csv", "test_sites_") os.close(handle) f = open(filename_no_vs30, "wb") f.write(raw_csv_no_vs30) f.close() expected_vs30 = [666, 560, 560] # Test 1. sites = Sites.from_csv(filename_vs30, VS30=float) self.failUnless("Vs30" in sites.attributes) self.failUnless(np.all(sites.attributes["Vs30"] == expected_vs30)) # Test 2. sites = Sites.from_csv(filename_no_vs30, VS30=float) self.failUnless("Vs30" not in sites.attributes) # get rid of test data files os.remove(filename_vs30) os.remove(filename_no_vs30)
def test_read_from_file_Vs30(self): """Test reading Sites data from file taking into account Vs30 data. 1. If VS30 is present and requested, it should be an attribute 2. If VS30 is not present and requested, it should not be an attribute """ raw_csv_no_vs30 = """BID,LATITUDE,LONGITUDE 2,-6.4125,110.837502 3,-6.4125,110.845833 4,-6.4125,110.854164""" raw_csv_vs30 = """BID,LATITUDE,LONGITUDE,VS30 2,-6.4125,110.837502,666 3,-6.4125,110.845833,560 4,-6.4125,110.854164,560""" (handle, filename_vs30) = tempfile.mkstemp('.csv', 'test_sites_') os.close(handle) f = open(filename_vs30, 'wb') f.write(raw_csv_vs30) f.close() (handle, filename_no_vs30) = tempfile.mkstemp('.csv', 'test_sites_') os.close(handle) f = open(filename_no_vs30, 'wb') f.write(raw_csv_no_vs30) f.close() expected_vs30 = [666,560,560] # Test 1. sites = Sites.from_csv(filename_vs30, VS30=float) self.failUnless('Vs30' in sites.attributes) self.failUnless(np.all(sites.attributes['Vs30'] == expected_vs30)) # Test 2. sites = Sites.from_csv(filename_no_vs30, VS30=float) self.failUnless('Vs30' not in sites.attributes) # get rid of test data files os.remove(filename_vs30) os.remove(filename_no_vs30)
def testing(self): attributes = {'mo': array(['money', 'soup']), 'SITE_CLASS': array(['E', 'C'])} latitude = [10, 20] longitude = [1, 2] sites = Sites(latitude, longitude, **attributes) site_class2Vs30 = {'C': 30, 'E': 40} sites.set_Vs30(site_class2Vs30) actual = array(latitude) self.assert_(allclose(sites.latitude, actual, 0.001)) actual = array(longitude) self.assert_(allclose(sites.longitude, actual, 0.001)) actual = array(['money', 'soup']) for (att, act) in map(None, sites.attributes['mo'], actual): self.assert_(att == act) actual = array([40, 30]) self.assert_(allclose(sites.attributes['Vs30'], actual, 0.001)) site_class2Vs30 = {'C': 30} try: sites.set_Vs30(site_class2Vs30) except KeyError: pass else: self.failUnless(False, "KeyError not raised")
def testing_truncate_sites_for_test(self): attributes = {'mo': array(['money', 'soup']), 'SITE_CLASS': array(['E', 'C']), 'id': array([1, 2])} latitude = [10, 20] longitude = [1, 2] sites = Sites(latitude, longitude, **attributes) use_site_indexes = False site_indexes = array([2]) new_sites = truncate_sites_for_test(use_site_indexes, sites, site_indexes) self.failUnless(allclose(array([1, 2]), new_sites.attributes['id'])) use_site_indexes = True site_indexes = array([2]) new_sites = truncate_sites_for_test(use_site_indexes,sites, site_indexes) self.failUnlessEqual(site_indexes, new_sites.attributes['id'])
def create_analysis_objects(self): # Parameters rupture_centroid_lat = asarray([-30]) rupture_centroid_lon = asarray([150]) length = asarray([1.0]) azimuth = asarray([2.0]) width = asarray([3.0]) dip = asarray([4.0]) depth = asarray([5.0]) Mw = asarray([6.0]) atten_models = asarray([ 'Allen', 'Toro_1997_midcontinent', 'Sadigh_97', 'Youngs_97_interface', 'Youngs_97_intraslab' ]) atten_model_weights = asarray([0.2, 0.2, 0.2, 0.2, 0.2]) atten_periods = asarray([0, 1.0, 2.0]) sites_lat = asarray([-31]) sites_lon = asarray([150]) # Event Set event_set = Event_Set.create(rupture_centroid_lat=rupture_centroid_lat, rupture_centroid_lon=rupture_centroid_lon, azimuth=azimuth, dip=dip, Mw=Mw, depth=depth, area=length * width, width=width, length=length) # Event Activity event_activity = Event_Activity(len(event_set)) event_activity.set_scenario_event_activity() # Source Model source_model = Source_Model.create_scenario_source_model( len(event_set)) source_model.set_attenuation(atten_models, atten_model_weights) source_model.set_ground_motion_calcs(atten_periods) event_set.scenario_setup() event_activity.ground_motion_model_logic_split(source_model, True) # Sites sites = Sites(sites_lat, sites_lon) # SA # Set up synthetic SA figures # Dimensions - spawn, gmm, rm, sites, events, period motion = zeros((1, len(atten_models), 1, len(sites), len(event_set), len(atten_periods)), dtype=float) # Allen motion[:, 0, :, :, :, 0] = 0 # period 0 motion[:, 0, :, :, :, 1] = 1 # period 1.0 motion[:, 0, :, :, :, 2] = 2 # period 2.0 # Toro_1997_midcontinent motion[:, 1, :, :, :, 0] = 3 # period 0 motion[:, 1, :, :, :, 1] = 4 # period 1.0 motion[:, 1, :, :, :, 2] = 5 # period 2.0 # Sadigh_97 motion[:, 2, :, :, :, 0] = 6 # period 0 motion[:, 2, :, :, :, 1] = 7 # period 1.0 motion[:, 2, :, :, :, 2] = 8 # period 2.0 # Youngs_97_interface motion[:, 3, :, :, :, 0] = 9 # period 0 motion[:, 3, :, :, :, 1] = 10 # period 1.0 motion[:, 3, :, :, :, 2] = 11 # period 2.0 # Young_97_intraslab motion[:, 4, :, :, :, 0] = 12 # period 0 motion[:, 4, :, :, :, 1] = 13 # period 1.0 motion[:, 4, :, :, :, 2] = 14 # period 2.0 # A minimal set of eqrm_flags so create_parameter_data passes # We only care about atten_models -> everything else are dummy values eqrm_flags = {} eqrm_flags['run_type'] = 'hazard' eqrm_flags['is_scenario'] = True eqrm_flags['output_dir'] = self.dir eqrm_flags['input_dir'] = self.dir eqrm_flags['site_tag'] = 'different_to_function' eqrm_flags['return_periods'] = [0.0] eqrm_flags['use_amplification'] = False eqrm_flags['zone_source_tag'] = 'not_used' eqrm_flags['atten_periods'] = atten_periods eqrm_flags['atten_models'] = atten_models return (event_set, event_activity, source_model, sites, motion, eqrm_flags)
def test_read_from_file(self): """Test reading Sites data from a file.""" # create dummy CSV file - this is bridges data, but sites should handle anything lat = [-35.352085, -35.348677, -35.336884, -35.345209, -35.340859, -35.301472, -35.293012, -35.320122] lon = [149.236994, 149.239383, 149.241625, 149.205986, 149.163037, 149.141364, 149.126767, 149.063810] clsf = ['HWB17', 'HWB17', 'HWB17', 'HWB22', 'HWB3', 'HWB17', 'HWB10', 'HWB28'] cat = ['BRIDGE', 'BRIDGE', 'BRIDGE', 'BRIDGE', 'BRIDGE', 'BRIDGE', 'BRIDGE', 'BRIDGE'] skew = [0, 32, 20, 4, 0, 0, 12, 0] span = [2, 3, 6, 2, 1, 1, 3, 3] cls = ['E', 'F', 'G', 'D', 'E', 'F', 'G', 'C'] attribute_keys = ['BID', 'STRUCTURE_CLASSIFICATION'] dummy_csv_data = ['BID,LONGITUDE,LATITUDE,STRUCTURE_CLASSIFICATION,' 'STRUCTURE_CATEGORY,SKEW,SPAN,SITE_CLASS', '2,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[0], lat[0], clsf[0], cat[0], skew[0], span[0], cls[0]), '3,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[1], lat[1], clsf[1], cat[1], skew[1], span[1], cls[1]), '4,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[2], lat[2], clsf[2], cat[2], skew[2], span[2], cls[2]), '5,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[3], lat[3], clsf[3], cat[3], skew[3], span[3], cls[3]), '6,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[4], lat[4], clsf[4], cat[4], skew[4], span[4], cls[4]), '7,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[5], lat[5], clsf[5], cat[5], skew[5], span[5], cls[5]), '8,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[6], lat[6], clsf[6], cat[6], skew[6], span[6], cls[6]), '9,%.6f,%.6f,%s,%s,%s,%s,%s' % (lon[7], lat[7], clsf[7], cat[7], skew[7], span[7], cls[7])] (handle, filename) = tempfile.mkstemp('.csv', 'test_sites_') os.close(handle) f = open(filename, 'wb') f.write('\n'.join(dummy_csv_data)) f.close() # now read file - pass attribute_conversion as **kwargs data sites = Sites.from_csv(filename, BID=int, STRUCTURE_CLASSIFICATION=str) # make sure we have required attributes, and only those attributes self.failUnless(hasattr(sites, 'longitude')) self.failUnless(np.all(sites.longitude == lon)) self.failUnless(hasattr(sites, 'latitude')) self.failUnless(np.all(sites.latitude == lat)) self.failUnless(len(sites.attributes) == len(attribute_keys)) for key in sites.attributes: if key not in attribute_keys: self.fail("Found unexpected .attribute key '%s'" % key) # repeat above test, pass attributes a dict attr_dict = {'BID': int, 'STRUCTURE_CATEGORY': str, 'SKEW': float, 'SPAN': int, 'SITE_CLASS': str} attribute_keys = ['BID', 'STRUCTURE_CATEGORY', 'SKEW', 'SPAN', 'SITE_CLASS'] sites = Sites.from_csv(filename, **attr_dict) # make sure we have required attributes, and only those attributes self.failUnless(hasattr(sites, 'longitude')) self.failUnless(np.all(sites.longitude == lon)) self.failUnless(hasattr(sites, 'latitude')) self.failUnless(np.all(sites.latitude == lat)) self.failUnless(len(sites.attributes) == len(attribute_keys)) for key in sites.attributes: if key not in attribute_keys: self.fail("Found unexpected .attribute key '%s'" % key) # get rid of test data file os.remove(filename)
def test_read_from_file(self): """Test reading Sites data from a file.""" # create dummy CSV file - this is bridges data, but sites should handle anything lat = [-35.352085, -35.348677, -35.336884, -35.345209, -35.340859, -35.301472, -35.293012, -35.320122] lon = [149.236994, 149.239383, 149.241625, 149.205986, 149.163037, 149.141364, 149.126767, 149.063810] clsf = ["HWB17", "HWB17", "HWB17", "HWB22", "HWB3", "HWB17", "HWB10", "HWB28"] cat = ["BRIDGE", "BRIDGE", "BRIDGE", "BRIDGE", "BRIDGE", "BRIDGE", "BRIDGE", "BRIDGE"] skew = [0, 32, 20, 4, 0, 0, 12, 0] span = [2, 3, 6, 2, 1, 1, 3, 3] cls = ["E", "F", "G", "D", "E", "F", "G", "C"] attribute_keys = ["BID", "STRUCTURE_CLASSIFICATION"] dummy_csv_data = [ "BID,LONGITUDE,LATITUDE,STRUCTURE_CLASSIFICATION," "STRUCTURE_CATEGORY,SKEW,SPAN,SITE_CLASS", "2,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[0], lat[0], clsf[0], cat[0], skew[0], span[0], cls[0]), "3,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[1], lat[1], clsf[1], cat[1], skew[1], span[1], cls[1]), "4,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[2], lat[2], clsf[2], cat[2], skew[2], span[2], cls[2]), "5,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[3], lat[3], clsf[3], cat[3], skew[3], span[3], cls[3]), "6,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[4], lat[4], clsf[4], cat[4], skew[4], span[4], cls[4]), "7,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[5], lat[5], clsf[5], cat[5], skew[5], span[5], cls[5]), "8,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[6], lat[6], clsf[6], cat[6], skew[6], span[6], cls[6]), "9,%.6f,%.6f,%s,%s,%s,%s,%s" % (lon[7], lat[7], clsf[7], cat[7], skew[7], span[7], cls[7]), ] (handle, filename) = tempfile.mkstemp(".csv", "test_sites_") os.close(handle) f = open(filename, "wb") f.write("\n".join(dummy_csv_data)) f.close() # now read file - pass attribute_conversion as **kwargs data sites = Sites.from_csv(filename, BID=int, STRUCTURE_CLASSIFICATION=str) # make sure we have required attributes, and only those attributes self.failUnless(hasattr(sites, "longitude")) self.failUnless(np.all(sites.longitude == lon)) self.failUnless(hasattr(sites, "latitude")) self.failUnless(np.all(sites.latitude == lat)) self.failUnless(len(sites.attributes) == len(attribute_keys)) for key in sites.attributes: if key not in attribute_keys: self.fail("Found unexpected .attribute key '%s'" % key) # repeat above test, pass attributes a dict attr_dict = {"BID": int, "STRUCTURE_CATEGORY": str, "SKEW": float, "SPAN": int, "SITE_CLASS": str} attribute_keys = ["BID", "STRUCTURE_CATEGORY", "SKEW", "SPAN", "SITE_CLASS"] sites = Sites.from_csv(filename, **attr_dict) # make sure we have required attributes, and only those attributes self.failUnless(hasattr(sites, "longitude")) self.failUnless(np.all(sites.longitude == lon)) self.failUnless(hasattr(sites, "latitude")) self.failUnless(np.all(sites.latitude == lat)) self.failUnless(len(sites.attributes) == len(attribute_keys)) for key in sites.attributes: if key not in attribute_keys: self.fail("Found unexpected .attribute key '%s'" % key) # get rid of test data file os.remove(filename)