s_db, csv_fn = s_cfg.combine_run_cfg(station_save_dir)

            # make xml file
            s_xml = archive.XMLMetadata()
            s_xml.read_config_file(survey_cfg)
            s_xml.supplement_info = s_xml.supplement_info.replace(
                "\\n", "\n\t\t\t")

            # location
            s_xml.survey.east = s_db.lon.median()
            s_xml.survey.west = s_db.lon.median()
            s_xml.survey.north = s_db.lat.median()
            s_xml.survey.south = s_db.lat.median()

            # get elevation from national map
            s_elev = archive.get_nm_elev(s_db.lat.median(), s_db.lon.median())
            s_xml.survey.elev_min = s_elev
            s_xml.survey.elev_max = s_elev

            # start and end time
            s_xml.survey.begin_date = s_db.start_date.min()
            s_xml.survey.end_date = s_db.stop_date.max()

            # add list of files
            s_xml.supplement_info += "\n\t\t\tFile List:\n\t\t\t" + "\n\t\t\t".join(
                asc_fn_list)

            # write station xml
            s_xml.write_xml_file(
                os.path.join(station_save_dir, "{0}_meta.xml".format(station)))
zc = archive.Z3DCollection()
fn_list = zc.get_time_blocks(z3d_path)

h5_fn = r"d:\Peacock\MTData\GabbsValley\gv01\h5_test.hdf5"

if os.path.exists(h5_fn):
    os.remove(h5_fn)

h5_obj = h5py.File(h5_fn, 'w')
for ii, fn_block in enumerate(fn_list, 1):
    ts_db, meta_arr = zc.merge_ts(fn_list[0])
    # fill attributes
    h5_obj.attrs['datum'] = 'WGS84'
    h5_obj.attrs['latitude'] = meta_arr['lat'].mean()
    h5_obj.attrs['longitude'] = meta_arr['lon'].mean()
    h5_obj.attrs['elevation'] = archive.get_nm_elev(meta_arr['lat'].mean(),
                                                    meta_arr['lon'].mean())
    h5_obj.attrs['station'] = 'gv01'
    # fill channel attributes
    for m_arr in meta_arr:
        for c_attr, h_attr in zip(['ch_num', 'ch_length', 'ch_azm'],
                                  ['sensor', 'length', 'azimuth']):
            h5_obj.attrs['{0}_{1}'.format(m_arr['comp'].lower(),
                                          h_attr)] = m_arr[c_attr]

    # create group for schedule action
    schedule = h5_obj.create_group('schedule_{0:02}'.format(ii))
    # add metadata
    schedule.attrs['start_time'] = meta_arr['start'].max()
    schedule.attrs['stop_time'] = meta_arr['stop'].min()
    schedule.attrs['n_samples'] = meta_arr['n_samples'].min()
    schedule.attrs['n_channels'] = meta_arr.size
Exemple #3
0
# ab_list = [chr(i) for i in range(ord('A'),ord('Z')+1)]
# char_dict = dict([(index, alpha) for index, alpha in enumerate(ab_list, 1)])

# =============================================================================
# Rename EDI files and rename station
# =============================================================================
edi_list = [
    os.path.join(edi_path, edi) for edi in os.listdir(edi_path)
    if edi.endswith(".edi")
]

lines = ["station,lat,lon,nm_elev"]
for edi_fn in edi_list:
    mt_obj = mt.MT(edi_fn)
    nm_elev = archive.get_nm_elev(mt_obj.lat, mt_obj.lon)
    lines.append("{0},{1:.5f},{2:.5f},{3:.2f}".format(mt_obj.station,
                                                      mt_obj.lat, mt_obj.lon,
                                                      nm_elev))

with open(os.path.join(edi_path, "imush_station_locations_nm.csv"),
          "w") as fid:
    fid.write("\n".join(lines))

#    print(mt_obj.station)
#    new_station = '{0}{1:03}'.format(char_dict[int(mt_obj.station[0:2])],
#                                     int(mt_obj.station[2:]))
#    mt_obj.station = new_station
#    print(mt_obj.station)
#    mt_obj.write_mt_file(save_dir=sv_path)
zc = archive.Z3DCollection()
fn_list = zc.get_time_blocks(z3d_path)

h5_fn = r"d:\Peacock\MTData\GabbsValley\gv01\h5_test.hdf5"

if os.path.exists(h5_fn):
    os.remove(h5_fn)

h5_obj = h5py.File(h5_fn, "w")
for ii, fn_block in enumerate(fn_list, 1):
    ts_db, meta_arr = zc.merge_ts(fn_list[0])
    # fill attributes
    h5_obj.attrs["datum"] = "WGS84"
    h5_obj.attrs["latitude"] = meta_arr["lat"].mean()
    h5_obj.attrs["longitude"] = meta_arr["lon"].mean()
    h5_obj.attrs["elevation"] = archive.get_nm_elev(meta_arr["lat"].mean(),
                                                    meta_arr["lon"].mean())
    h5_obj.attrs["station"] = "gv01"
    # fill channel attributes
    for m_arr in meta_arr:
        for c_attr, h_attr in zip(["ch_num", "ch_length", "ch_azm"],
                                  ["sensor", "length", "azimuth"]):
            h5_obj.attrs["{0}_{1}".format(m_arr["comp"].lower(),
                                          h_attr)] = m_arr[c_attr]

    # create group for schedule action
    schedule = h5_obj.create_group("schedule_{0:02}".format(ii))
    # add metadata
    schedule.attrs["start_time"] = meta_arr["start"].max()
    schedule.attrs["stop_time"] = meta_arr["stop"].min()
    schedule.attrs["n_samples"] = meta_arr["n_samples"].min()
    schedule.attrs["n_channels"] = meta_arr.size