def __init__(self): # This is a hack to get the version based on the eqrm_code dir current_dir = os.getcwd() os.chdir('..') #os.chdir('eqrm_code') self.version, date, modified = get_version() os.chdir(current_dir) self.host = platform.uname()[1] parallel = Parallel() self.node_number = str(parallel.rank)
def calc_params(eqrm_flags): """ Calculate parameters used for the other calculations based on event set information. """ parallel = Parallel(is_parallel=False) # TODO: Do we really need to load this in its entirety? (event_set, event_activity, source_model) = create_event_set(eqrm_flags, parallel) params = {} params['num_events'] = len(event_set) params['num_gmm_max'] = source_model.get_max_num_atten_models() params['num_spawn'] = eqrm_flags.atten_spawn_bins params['num_rm'] = event_activity.recurrence_model_count() params['num_periods'] = len(eqrm_flags.atten_periods) params['use_amplification'] = eqrm_flags.use_amplification params['collapse_gmm'] = eqrm_flags.atten_collapse_Sa_of_atten_models return params
""" Testing parallalism mpirun -machinefile ~/.machines.cyclone -c 4 python parallel_spike.py """ from eqrm_code.parallel import Parallel import eqrm_code.polygon import pypar sites_len = 10 sites = range(sites_len) para = Parallel() para.calc_lo_hi(sites_len) print "I am processor %d of %d on node %s. lo is %i, hi is %i" % ( para.rank, para.size, para.node, int(para.lo), int(para.hi), ) for i in range(para.lo, para.hi): print i # Now lets para.finalize()
def events_shaking_a_site(output_dir, site_tag, site_lat, site_lon, period, soil_amp, file_format='binary'): """events_shaking_a_site Given disaggregated output data produce a csv file showing ground motion and event information for the given site and period. Parameters: output_dir - path to directory where the simulation data has been produced, and where the output files will be placed site_tag - used to identify the appropriate data as input site_lat - site latitude site_lon - site longitude (use the closest site as the cockey flies) period - attenuation period (must be an exact match) soil_amp - if True use soil results, else use bedrock results Output file: <output_dir>/ - if soil_amp: <site_tag>_bedrock_SA_events_ap[<period>]_lat[<site_lat>]_lon[<site_lon>].csv - else: <site_tag>_soil_SA_events_ap[<period>]_lat[<site_lat>]_lon[<site_lon>].csv Columns: 'ground_motion' - ground motion value 'ground_motion_model' - ground motion model used 'trace_start_lat' - rupture trace start latitude 'trace_start_lon' - rupture trace start longitude 'trace_end_lat' - rupture trace end latitude 'trace_end_lon' - rupture trace end longitude 'rupture_centroid_lat' - rupture centroid latitude 'rupture_centroid_lon' - rupture centroid longitude 'depth' - rupture depth to centroid (km) 'azimuth' - rupture azimuth (degrees from true North) 'dip' - rupture dip 'Mw' - rupture moment magnitude 'length' - rupture length 'width' - rupture width 'activity' - event activity (probability that the event will occur this year) 'Rjb' - Joyner-Boore distance to rupture plane 'Rrup' - Closest distance to rupture plane 'site_lat' - Closest site latitude 'site_lon' - Closest site longitude """ # Ground motion motion, atten_periods = load_motion(output_dir, site_tag, soil_amp, file_format) if period not in atten_periods: raise Exception("Period %s not in atten_periods %s" % (period, atten_periods)) period_ind = where(period == atten_periods)[0][0] parallel = Parallel(is_parallel=False) # Event set objects (event_set, event_activity, source_model) = load_event_set(parallel, os.path.join(output_dir, '%s_event_set' % site_tag)) # Site objects sites = load_sites( parallel, os.path.join( output_dir, '%s_sites' % site_tag)) closest_site_ind = sites.closest_site(site_lat, site_lon) closest_site_lat = sites[closest_site_ind].latitude[0] closest_site_lon = sites[closest_site_ind].longitude[0] # Get the motion that corresponds to this site, collapsing spawn, rm, period # Motion dimensions - spawn, gmm, rm, sites, events, period motion_for_site = motion[0, :, 0, closest_site_ind,:, period_ind] (event_set.trace_end_lat, event_set.trace_end_lon) = xy_to_ll(event_set.rupture_centroid_x, -event_set.rupture_centroid_y, event_set.rupture_centroid_lat, event_set.rupture_centroid_lon, event_set.azimuth) # Event activity dimensions - spawn, gmm, rm, events # Collapse spawn and rm event_activity = event_activity.event_activity[0, :, 0,:] # Distances Rjb = sites.distances_from_event_set(event_set).distance('Joyner_Boore') Rjb_for_site = Rjb.swapaxes(0, 1)[:, closest_site_ind] Rrup = sites.distances_from_event_set(event_set).distance('Rupture') Rrup_for_site = Rrup.swapaxes(0, 1)[:, closest_site_ind] if soil_amp is True: motion_name = 'soil_SA' elif soil_amp is False: motion_name = 'bedrock_SA' else: raise IOError("soil_amp must be True or False") # Create file and write headers filename = '%s_%s_events_ap%s_lat%s_lon%s.csv' % (site_tag, motion_name, period, closest_site_lat, closest_site_lon) handle = csv.writer(open(os.path.join(output_dir, filename), 'w')) handle.writerow(['ground_motion', 'ground_motion_model', 'trace_start_lat', 'trace_start_lon', 'trace_end_lat', 'trace_end_lon', 'rupture_centroid_lat', 'rupture_centroid_lon', 'depth', 'azimuth', 'dip', 'Mw', 'length', 'width', 'activity', 'Rjb', 'Rrup', 'site_lat', 'site_lon']) # Loop over events for i in range(motion_for_site.shape[1]): # events trace_start_lat = event_set.trace_start_lat[i] trace_start_lon = event_set.trace_start_lon[i] trace_end_lat = event_set.trace_end_lat[i] trace_end_lon = event_set.trace_end_lon[i] rupture_centroid_lat = event_set.rupture_centroid_lat[i] rupture_centroid_lon = event_set.rupture_centroid_lon[i] depth = event_set.depth[i] azimuth = event_set.azimuth[i] dip = event_set.dip[i] mw = event_set.Mw[i] length = event_set.length[i] width = event_set.width[i] rjb = Rjb_for_site[i] rrup = Rrup_for_site[i] ground_motion = motion_for_site[:, i] activity = event_activity[:, i] event_source = source_model[int(event_set.source[i])] for gmm_ind, gmm in enumerate(event_source.atten_models): handle.writerow([ground_motion[gmm_ind], gmm, trace_start_lat, trace_start_lon, trace_end_lat, trace_end_lon, rupture_centroid_lat, rupture_centroid_lon, depth, azimuth, dip, mw, length, width, activity[gmm_ind], rjb, rrup, closest_site_lat, closest_site_lon]) return os.path.join(output_dir, filename)
""" Testing parallalism mpirun -machinefile ~/.machines.cyclone -c 4 python parallel_spike.py """ from eqrm_code.parallel import Parallel import eqrm_code.polygon import pypar sites_len = 10 sites = range(sites_len) para = Parallel() para.calc_lo_hi(sites_len) print "I am processor %d of %d on node %s. lo is %i, hi is %i" % ( para.rank, para.size, para.node, int(para.lo), int(para.hi)) for i in range(para.lo, para.hi): print i # Now lets para.finalize()