def main(comp_000, comp_090, output_dir): os.makedirs(output_dir, exist_ok=True) log_file = os.path.join(output_dir, "log") logging.basicConfig(format="%(asctime)s %(message)s", filename=log_file, level=logging.DEBUG) waveforms = {} waveforms["000"], meta = read_ascii(comp_000, meta=True) waveforms["090"] = read_ascii(comp_090) dt = meta["dt"] results = {} for component in COMPONENT: results[component] = {} for a in ALPHA: for c in C: for period in T: im_name = f"TM05_a{a}_c{c}_T{period}_storey" logging.info(f"calculating {im_name}") df = Taghavi_Miranda_2005(waveforms[component], dt, period, a, c, storey=STORIES) for i in range(STORIES + 1): results[component][ f"{im_name}{i}_disp_peak"] = df.iloc[i].disp_peak results[component][ f"{im_name}{i}_slope_peak"] = df.iloc[i].slope_peak results[component][ f"{im_name}{i}_storey_shear_peak"] = df.iloc[ i].storey_shear_peak results[component][ f"{im_name}{i}_total_accel_peak"] = df.iloc[ i].total_accel_peak im_csv_fname = os.path.join(output_dir, "Taghavi_Miranda_2005.csv") df = pd.DataFrame.from_dict(results, orient="index") df.index.name = "component" geom = pd.Series(get_geom(df.loc["000"], df.loc["090"]), name="geom") df = df.append(geom) df.to_csv(im_csv_fname)
def test_sed_im(component_name, expected_im): try: path = waveforms[components.index(component_name)] except ValueError: # missing test data print(f"Missing waveform data for {component_name}.") raise waveform, meta = read_ascii(path, meta=True) # waveform in g -> cm/s^2 velocity = acc2vel(waveform * 980.665, dt=meta["dt"]) times = np.arange(meta["nt"], dtype=np.float32) times *= meta["dt"] im = get_specific_energy_density_nd(velocity, times) assert np.isclose(im, expected_im, rtol=0.005)
for i, statname in enumerate(statnames): s0 = statname + GV[0] s1 = statname + GV[1] s2 = statname + GV[2] # stat_data_0_S, num_pts, dt, shift = readGP_2(mainfolder,s0) # stat_data_1_S, _, _, _ = readGP_2(mainfolder,s1) # stat_data_2_S, _, _, _ = readGP_2(mainfolder,s2) # # stat_data_0_O, num_pts, dt, shift1 = readGP_2(mainfolder_o,s0) # stat_data_1_O, _, _, _ = readGP_2(mainfolder_o,s1) # stat_data_2_O, _, _, _ = readGP_2(mainfolder_o,s2) stat_data_0_S = timeseries.read_ascii(mainfolder + s0) stat_data_0_S = np.multiply(signal.tukey(int(num_pts), 0.1), stat_data_0_S) stat_data_1_S = timeseries.read_ascii(mainfolder + s1) stat_data_1_S = np.multiply(signal.tukey(int(num_pts), 0.1), stat_data_1_S) stat_data_2_S = timeseries.read_ascii(mainfolder + s2) stat_data_2_S = np.multiply(signal.tukey(int(num_pts), 0.1), stat_data_2_S) stat_data_0_O = timeseries.read_ascii(mainfolder_o + s0) stat_data_0_O = np.multiply(signal.tukey(int(num_pts), 0.1), stat_data_0_O) stat_data_1_O = timeseries.read_ascii(mainfolder_o + s1) stat_data_1_O = np.multiply(signal.tukey(int(num_pts), 0.1), stat_data_1_O)
def plot_station( output, sources, labels, tmax, verbose, station, ): """Creates a waveform plot for a specific station.""" if verbose: print("Plotting station: {}...".format(station)) timeseries = [] for source in sources: if type(source).__name__ != "str": # opened binary object timeline = (np.arange(source.nt, dtype=np.float32) * source.dt + source.start_sec) timeseries.append(np.vstack((source.vel(station).T, timeline))) else: # text directory meta = read_ascii(os.path.join(source, f"{station}{extensions[0]}"), meta=True)[1] vals = np.array([ read_ascii(os.path.join(source, f"{station}{ext}")) for ext in extensions ]) timeline = (np.arange(meta["nt"], dtype=np.float32) * meta["dt"] + meta["sec"]) timeseries.append(np.vstack((vals, timeline))) x_max = max([ts[-1, -1] for ts in timeseries]) if tmax is not None: x_max = min(tmax, x_max) all_y = np.concatenate([ts[:-1] for ts in timeseries], axis=1) # get axis min/max y_min, y_max = np.min(all_y), np.max(all_y) y_diff = y_max - y_min pgvs = np.max(np.abs(all_y), axis=1) ppgvs = np.max(all_y, axis=1) npgvs = np.min(all_y, axis=1) scale_length = max(int(round(x_max / 25.0)) * 5, 5) # start plot f, axis = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(20, 4), dpi=96) f.subplots_adjust(left=0.08, bottom=0.12, right=0.96, top=None, wspace=0.08, hspace=0) plt.suptitle( station, fontsize=20, x=0.02, y=0.5, horizontalalignment="left", verticalalignment="center", ) plt.xlim([0, x_max]) # subplots for i, s in enumerate(timeseries): for j in range(len(extensions)): ax = axis[j] ax.set_axis_off() ax.set_ylim([y_min - y_diff * 0.15, y_max]) (line, ) = ax.plot( s[len(extensions)], s[j] * min(y_max / ppgvs[j], y_min / npgvs[j]), color=colours[i % len(colours)], linewidth=1, ) if j == 2: line.set_label(labels[i]) ax.legend() if i == 1 and j == 0: # Add scale ax.plot( [0, scale_length], [y_min - y_diff * 0.1] * 2, color="black", linewidth=1, ) ax.text( 0, y_min - y_diff * 0.15, "0", size=12, verticalalignment="top", horizontalalignment="center", ) ax.text( scale_length, y_min - y_diff * 0.15, str(scale_length), size=12, verticalalignment="top", horizontalalignment="center", ) ax.text( scale_length / 2.0, y_min - y_diff * 0.225, "sec", size=12, verticalalignment="top", horizontalalignment="center", ) if i == 0: # Add component label ax.set_title(extensions[j][1:], fontsize=18) ax.text(x_max, y_max, "{:.1f}".format(pgvs[j]), fontsize=14) plt.savefig(os.path.join(output, f"{station}.png")) plt.close()