def test_to_dataframe(): cwb_files, event = read_data_dir('geonet', 'nz2018p115908') st = read_data(cwb_files[0])[0] df1 = streams_to_dataframe([st, st], event=event) np.testing.assert_array_equal(df1.STATION.tolist(), ['WPWS', 'WPWS']) np.testing.assert_array_equal(df1.NAME.tolist(), ['Waipawa_District_Council', 'Waipawa_District_Council']) target_levels = ['ELEVATION', 'EPICENTRAL_DISTANCE', 'GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2', 'Z', 'HYPOCENTRAL_DISTANCE', 'LAT', 'LON', 'NAME', 'NETID', 'SOURCE', 'STATION', '', 'PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)'] # let's use sets to make sure all the columns are present in whatever order cmp1 = set(['ELEVATION', 'EPICENTRAL_DISTANCE', 'GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2', 'HYPOCENTRAL_DISTANCE', 'LAT', 'LON', 'NAME', 'NETID', 'SOURCE', 'STATION', 'Z']) cmp2 = set(['', 'PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']) header1 = set(df1.columns.levels[0]) header2 = set(df1.columns.levels[1]) assert header1 == cmp1 assert header2 == cmp2 # idx = 0 # for s in df1.columns.levels: # for col in s: # try: # assert col == target_levels[idx] # except Exception as e: # x = 1 # idx += 1 # This was previously not being tested """imts = ['PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']
def test_to_dataframe(): cwb_files, event = read_data_dir('geonet', 'nz2018p115908') st = read_data(cwb_files[0])[0] df1 = streams_to_dataframe([st, st], event=event) np.testing.assert_array_equal(df1.STATION.tolist(), ['WPWS', 'WPWS']) np.testing.assert_array_equal(df1.NAME.tolist(), ['Waipawa_District_Council', 'Waipawa_District_Council']) target_levels = ['ELEVATION', 'EPICENTRAL_DISTANCE', 'GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ', 'HYPOCENTRAL_DISTANCE', 'LAT', 'LON', 'NAME', 'NETID', 'SOURCE', 'STATION', '', 'PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)'] idx = 0 for s in df1.columns.levels: for col in s: assert col == target_levels[idx] idx += 1 # This was previously not being tested """imts = ['PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']
def save_shakemap_amps(processed, event, event_dir): """Write ShakeMap peak amplitudes to an Excel spreadsheet. Args: processed (StreamCollection): Processed waveforms. event (ScalarEvent): Event object. event_dir (str): Directory where peak amps should be written. Returns: str: Path to output amps spreadsheet. """ ampfile_name = None if processed.n_passed: dataframe = streams_to_dataframe(processed, event=event) ampfile_name = os.path.join(event_dir, 'shakemap.xlsx') dataframe.to_excel(ampfile_name) wb = load_workbook(ampfile_name) ws = wb.active # TODO: This ws.append() fails sometimes. Going back to using pandas. # for r in dataframe_to_rows(dataframe, index=True, header=True): # try: # ws.append(r) # except Exception as e: # x = 1 # we don't need the index column, so we'll delete it here ws.delete_cols(1) ws.insert_rows(1) ws['A1'] = 'REFERENCE' ws['B1'] = dataframe['SOURCE'].iloc[0] wb.save(ampfile_name) return ampfile_name