def validate( metadata_file_location, config = None ): # from os.path import isfile as exist from os.path import abspath,join from os import pardir # run_dir = abspath( join( metadata_file_location, pardir ) )+'/' # the folder is valid if there is l=m=2 mode data in the following dirs status = exist( run_dir + '/OutermostExtraction/rMPsi4_Y_l2_m2.asc' )\ and exist( run_dir + '/Extrapolated_N2/rMPsi4_Y_l2_m2.asc' )\ and exist( run_dir + '/Extrapolated_N3/rMPsi4_Y_l2_m2.asc' )\ and exist( run_dir + '/Extrapolated_N4/rMPsi4_Y_l2_m2.asc' ); # return status
def validate(metadata_file_location, config=None): # from os.path import isfile as exist from os.path import abspath, join from os import pardir # run_dir = abspath(join(metadata_file_location, pardir)) + '/' # the folder is valid if there is l=m=2 mode data in the following dirs status = exist( run_dir + '/OutermostExtraction/rMPsi4_Y_l2_m2.asc' )\ and exist( run_dir + '/Extrapolated_N2/rMPsi4_Y_l2_m2.asc' )\ and exist( run_dir + '/Extrapolated_N3/rMPsi4_Y_l2_m2.asc' )\ and exist( run_dir + '/Extrapolated_N4/rMPsi4_Y_l2_m2.asc' ) # return status
def read_waveforms_from_saved_dir( file_path: str, event: obspy.core.event.Event = None ) -> _Union[obspy.core.event.Event, _Tuple[obspy.Stream, obspy.Stream]]: """ Read in the obspy waveform data in this folder :param file_path: path to waveform data :param event: add data to event if not None """ if exist(pjoin(file_path, "waveforms_SP.mseed")): sp_data = obspy.read(pjoin(file_path, "waveforms_SP.mseed")) vbb_data = obspy.read(pjoin(file_path, "waveforms_VBB.mseed")) if event is not None: if exist(pjoin(file_path, "waveforms_SP.mseed")): event.waveforms_SP = sp_data event.waveforms_VBB = vbb_data return event else: return sp_data, vbb_data
def save(self, filename: str) -> None: """ save file to the assign filename :param filename: the target filename :type filename: str """ if not path.exist(filename): self.hdul.writeto(filename) else: print("Failed! File exists.")
def skip_if_not_fedora(message: str) -> None: import os from os.path import exists as exist def skip(): pytest.skip(message) os_release_fp = '/etc/os-release' if not exist(os_release_fp): skip() with open(os_release_fp) as f: os_release_lines = f.readlines() if 'Fedora' not in os_release_lines[0]: skip()
def collect_results_cpu(result_part, size, tmpdir=None): rank, world_size = get_dist_info() # create a tmp dir if it is not specified if tmpdir is None: MAX_LEN = 512 # 32 is whitespace dir_tensor = torch.full((MAX_LEN, ), 32, dtype=torch.uint8, device='cuda') if rank == 0: mmcv.mkdir_or_exist('.dist_test') tmpdir = tempfile.mkdtemp(dir='.dist_test') tmpdir = torch.tensor(bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') dir_tensor[:len(tmpdir)] = tmpdir dist.broadcast(dir_tensor, 0) tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() else: if osp.exist(tmpdir): raise OSError((f'The tmpdir {tmpdir} already exists.', ' Since tmpdir will be deleted after testing,', ' please make sure you specify an empty one.')) mmcv.mkdir_or_exist(tmpdir) # dump the part result to the dir mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) dist.barrier() # collect all parts if rank != 0: return None else: # load results of all parts from tmp dir part_list = [] for i in range(world_size): part_file = osp.join(tmpdir, f'part_{i}.pkl') part_result = mmcv.load(part_file) # When data is severely insufficient, an empty part_result # on a certain gpu could makes the overall outputs empty. if part_result: part_list.append(part_result) # sort the results ordered_results = [] for res in zip(*part_list): ordered_results.extend(list(res)) # the dataloader may pad some samples ordered_results = ordered_results[:size] # remove tmp dir shutil.rmtree(tmpdir) return ordered_results
def skip_if_not_fedora(message:str) -> None: import os from os.path import exists as exist def skip(): pytest.skip(message) os_release_fp = '/etc/os-release' if not exist(os_release_fp): skip() with open(os_release_fp) as f: os_release_lines = f.readlines() if 'Fedora' not in os_release_lines[0]: skip()
def mnt_remote_folder(host_ip: str, host_usr: str, remote_folder: str, mnt_folder: str) -> None: """ Mount folder including waveform data to local machine :param host_ip: ip address of the server that contains waveform data :param host_usr: username of the host :param remote_folder: folder that contains waveform data :param mnt_folder: name of folder to mount to local machine """ if not exist(mnt_folder): print("Create {} with writing permissions using sudo in terminal".format(mnt_folder)) exit(1) # mkdir_command = "mkdir {}".format(mnt_folder) # subprocess.call(mkdir_command, shell=True) try: mount_command = "sshfs {}@{}:{} {}".format(host_usr, host_ip, remote_folder, mnt_folder) subprocess.call(mount_command, shell=True) print("Mounted to {}".format(mnt_folder)) except Exception as e: unmnt_remote_folder(mnt_folder) raise e
def __init__(self, elastic_law, main_dir=None): # set elastic law assert isinstance(elastic_law, ElasticLaw) self._elastic_law = elastic_law # set write and read directory if main_dir is None: self._main_dir = os.getcwd() else: assert isinstance(main_dir, str) assert path.exist(main_dir) self._main_dir = main_dir assert hasattr(elastic_law, "linearity_type") assert hasattr(elastic_law, "name") self._results_dir = path.join( self._main_dir, f"results/{self._elastic_law.linearity_type}/{self._elastic_law.name}" )
def ftp_upload_file(remote_ip, remote_port, remote_username, remote_password, remote_dir, local_file): my_session_factory = ftputil.session.session_factory( base_class=ftplib.FTP, port=remote_port, encrypt_data_channel=True, debug_level=0) ret = False path = Path() if False == path.exist(local_file): logger.info("local_file:%s not exist", local_file) return False with ftputil.FTPHost(remote_ip, remote_username, remote_password, session_factory=my_session_factory) as ftp: localfile = local_file logger.info('localfile:%s', localfile) remotefile = remote_dir + local_file logger.info('remotefile:%s', remotefile) remotedir = os.path.dirname(remotefile) logger.info('remotedir:%s', remotedir) ftp.makedirs(remotedir) ftp.upload(localfile, remotefile) err,local_size = path.getsize(localfile) logger.info("err:%s,local_size:%s", err,local_size) if False == err: return False if False == ftp.path.exists(remotefile): return False remote_size = ftp.path.getsize(remotefile) logger.info("remote_size:%s", remote_size) if local_size == remote_size: ret = True return ret
def get_daylong_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. Calling options for the script `obs_download_data.py` that accompany this package. """ parser = ArgumentParser( usage="%(prog)s [options] <Station Database>", description="Script used " + "to download and pre-process up to four-component " + "(H1, H2, Z and P), day-long seismograms to use in " + "noise corrections of vertical component of OBS data. " + "Data are requested from the internet using the client " + "services framework for a given date range. The stations " + "are processed one by one and the data are stored to disk.") parser.add_argument( "indb", help="Station Database to process from.", type=str) # General Settings parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma-separated list of station keys " + "for which to perform the analysis. These must be " + "contained within the station database. Partial keys " + "will be used to match against those in the dictionary. " + "For instance, providing IU will match with all stations " + "in the IU network. " + "[Default processes all stations in the database]") parser.add_argument( "-C", "--channels", action="store", type=str, dest="channels", default="", help="Specify a comma-separated list of channels for " + "which to perform the transfer function analysis. " + "Possible options are H (for horizontal channels) or P " + "(for pressure channel). Specifying H allows " + "for tilt correction. Specifying P allows for compliance " + "correction. [Default looks for both horizontal and " + "pressure and allows for both tilt AND compliance corrections]") parser.add_argument( "-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") # Server Settings ServerGroup = parser.add_argument_group( title="Server Settings", description="Settings associated with which " "datacenter to log into.") ServerGroup.add_argument( "-S", "--Server", action="store", type=str, dest="Server", default="IRIS", help="Specify the server to connect to. Options include: " + "BGR, ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, LMU, NCEDC, " + "NEIP, NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. " + "[Default IRIS]") ServerGroup.add_argument( "-U", "--User-Auth", action="store", type=str, dest="UserAuth", default="", help="Enter your IRIS Authentification Username and Password " + "(--User-Auth='username:authpassword') to access and download " + "restricted data. [Default no user and password]") """ # # Database Settings # DataGroup = parser.add_argument_group(parser, title="Local Data Settings", description="Settings associated with defining " \ # "and using a local data base of pre-downloaded day-long SAC files.") # DataGroup.add_argument("--local-data", action="store", type=str, dest="localdata", default=None, \ # help="Specify a comma separated list of paths containing day-long sac files of data already downloaded. " \ # "If data exists for a seismogram is already present on disk, it is selected preferentially over downloading " \ # "the data using the Client interface") # DataGroup.add_argument("--no-data-zero", action="store_true", dest="ndval", default=False, \ # help="Specify to force missing data to be set as zero, rather than default behaviour which sets to nan.") """ # Constants Settings FreqGroup = parser.add_argument_group( title='Frequency Settings', description="Miscellaneous frequency settings") FreqGroup.add_argument( "--sampling-rate", action="store", type=float, dest="new_sampling_rate", default=5., help="Specify new sampling rate (float, in Hz). [Default 5.]") FreqGroup.add_argument( "--pre-filt", action="store", type=str, dest="pre_filt", default=None, help="Specify four comma-separated corner frequencies " + "(float, in Hz) for deconvolution pre-filter. " + "[Default 0.001,0.005,45.,50.]") # Event Selection Criteria DaysGroup = parser.add_argument_group( title="Time Search Settings", description="Time settings associated with searching " + "for day-long seismograms") DaysGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string representing " + "the start day for the data search. This will override any " + "station start times. " + "[Default start date for each station in database]") DaysGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string representing " + "the start time for the event search. This will override any " + "station end times [Default end date for each station in database]") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # create channel list if len(args.channels) > 0: args.channels = args.channels.split(',') else: args.channels = ["H", "P"] for cha in args.channels: if cha not in ["H", "P"]: parser.error("Error: Channel not recognized ", cha) # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Error: Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Error: Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None # Parse User Authentification if not len(args.UserAuth) == 0: tt = args.UserAuth.split(':') if not len(tt) == 2: parser.error( "Error: Incorrect Username and Password Strings for " + "User Authentification") else: args.UserAuth = tt else: args.UserAuth = [] # # Parse Local Data directories # if args.localdata is not None: # args.localdata = args.localdata.split(',') # else: # args.localdata = [] # # Check NoData Value # if args.ndval: # args.ndval = 0.0 # else: # args.ndval = nan if not type(args.new_sampling_rate) is float: raise(Exception( "Error: Type of --sampling-rate is not a float")) if args.pre_filt is None: args.pre_filt = [0.001, 0.005, 45., 50.] else: args.pre_filt = [float(args.pre_filt.split(','))] args.pre_filt = sorted(args.pre_filt) if (len(args.pre_filt)) != 4: raise(Exception( "Error: --pre-filt should contain 4 comma-separated floats")) return args
def get_correct_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. Calling options for the script `obs_correct_event.py` that accompany this package. """ parser = ArgumentParser( usage="%(prog)s [options] <Station Database>", description="Script used " "to extract transfer functions between various " + "components, and use them to clean vertical " + "component of OBS data for selected events. The " + "noise data can be those obtained from the daily " + "spectra (i.e., from `obs_daily_spectra.py`) " "or those obtained from the averaged noise spectra " + "(i.e., from `obs_clean_spectra.py`). Flags are " + "available to specify the source of data to use as " + "well as the time range for given events. " "The stations are processed one by one and the " + "data are stored to disk.") parser.add_argument( "indb", help="Station Database to process from.", type=str) # General Settings parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station " + "keys for which to perform the analysis. These must be " "contained within the station database. Partial keys " + "will be used to match against those in the " "dictionary. For instance, providing IU will match with " + "all stations in the IU network. [Default processes " "all stations in the database]") parser.add_argument( "-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") # Event Selection Criteria DaysGroup = parser.add_argument_group( title="Time Search Settings", description="Time settings associated with " + "searching for specific event-related seismograms") DaysGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string " + "representing the start day for the event search. " "This will override any station start times. " + "[Default start date of each station in database]") DaysGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string " + "representing the start time for the event search. " "This will override any station end times. [Default " + "end date of each station in database]") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default " + "values and settings") ConstGroup.add_argument( "--skip-daily", action="store_true", dest="skip_daily", default=False, help="Skip daily spectral averages in application " + "of transfer functions. [Default False]") ConstGroup.add_argument( "--skip-clean", action="store_true", dest="skip_clean", default=False, help="Skip cleaned spectral averages in " + "application of transfer functions. " + "[Default False]") ConstGroup.add_argument( "--fmin", action="store", type=float, dest="fmin", default="0.006666666666666667", help="Low frequency corner (in Hz) for " + "plotting the raw (un-corrected) seismograms. " "Filter is a 2nd order, zero phase butterworth " + "filter. [Default 1./150.]") ConstGroup.add_argument( "--fmax", action="store", type=float, dest="fmax", default="0.1", help="High frequency corner (in Hz) for " + "plotting the raw (un-corrected) seismograms. " "Filter is a 2nd order, zero phase butterworth " + "filter. [Default 1./10.]") # Constants Settings FigureGroup = parser.add_argument_group( title='Figure Settings', description="Flags for plotting figures") FigureGroup.add_argument( "--figRaw", action="store_true", dest="fig_event_raw", default=False, help="Plot raw seismogram figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--figClean", action="store_true", dest="fig_plot_corrected", default=False, help="Plot cleaned vertical seismogram figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--save-fig", action="store_true", dest="saveplot", default=False, help="Set this option if you wish to save the figure(s). [Default " + "does not save figure]") FigureGroup.add_argument( "--format", action="store", type=str, dest="form", default="png", help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Error: Cannot construct UTCDateTime from " + "start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Error: Cannot construct UTCDateTime from " + "end time: " + args.endT) else: args.endT = None if args.skip_clean and args.skip_daily: parser.error( "Error: cannot skip both daily and clean averages") return args
def get_arguments_average(argv=None): parser = ArgumentParser( usage="%(prog)s [arguments] <station database>", description="Script to plot the average splitting results for a " + "given station. Loads the available .pkl files in the specified " + "Station Directory.") # General Settings parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys " + "for which to perform analysis. These must be " + "contained within the station database. Partial keys " + "will be used to match against those in the " + "dictionary. For instance, providing IU will match " + "with all stations in the IU network [Default " + "processes all stations in the database]") parser.add_argument("-v", "-V", "--verbose", action="store_true", dest="verb", default=False, help="Specify to increase verbosity.") parser.add_argument("--show-fig", action="store_true", dest="showfig", default=False, help="Specify show plots during processing - " + "they are still saved to disk. [Default only saves]") parser.add_argument( "-A", "--auto", action="store_true", dest="auto", default=False, help="Specify to use automatically processed split results. " + "[Default uses refined ('manual') split results]") # Null Settings NullGroup = parser.add_argument_group( title="Null Selection Settings", description="Settings " "associated with selecting which Null or Non-Null data is included") NullGroup.add_argument( "--nulls", "--Nulls", action="store_true", dest="nulls", default=False, help="Specify this flag to include Null Values in the average. " + "[Default Non-Nulls only]") NullGroup.add_argument( "--no-nons", "--No-Nons", action="store_false", dest="nons", default=True, help="Specify this flag to exclude Non-Nulls from the average " + "[Default False]") # Quality Settings QualGroup = parser.add_argument_group( title="Quality Selection Settings", description="Settings associated with selecting the qualities " + "to include in the selection.") QualGroup.add_argument( "--No-Good", "--no-good", action="store_false", dest="goods", default=True, help="Specify to exclude 'Good' measurements from the average. " + "[Default Good + Fair]") QualGroup.add_argument( "--No-Fair", "--no-fair", action="store_false", dest="fairs", default=True, help="Specify to exclude 'Fair' measurements from the average " + "[Default Good + Fair]") QualGroup.add_argument( "--Poor", "--poor", action="store_true", dest="poors", default=False, help="Specify to include 'Poor' measurements in the average " + "[Default No Poors]") # Split Type Settings SpTypGroup = parser.add_argument_group( title="Split Type Settings", description="Settings to Select " "which Split types are included in the selection.") SpTypGroup.add_argument( "--RC-Only", "--rc-only", "--RC-only", action="store_false", dest="SCinc", default=True, help="Specify to only include RC splits in the average. " + "[Default RC + SC]") SpTypGroup.add_argument( "--SC-Only", "--sc-only", "--SC-only", action="store_false", dest="RCinc", default=True, help="Specify to only include SC splits in the average. " + "[Default RC + SC]") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # Check Nulls if not args.nons and not args.nulls: parser.error("One of Non-Nulls or Nulls must be included.") # Check Quality if not args.goods and not args.fairs and not args.poors: parser.error("At least one Quality must be included.") # Check Types if not args.RCinc and not args.SCinc: parser.error("At leat one Splitting Tyhpe must be included.") # Construct Null FileName Components NullName = "" if args.nons: NullName = "_Nons" if args.nulls: NullName = NullName + "-Nulls" else: if args.nulls: NullName = "_Nulls" args.NullName = NullName # Construct Quality FileName Components QualName = "" if args.goods: QualName = "_G" if args.fairs: QualName = QualName + "-F" if args.poors: QualName = QualName + "-P" else: if args.fairs: QualName = "_F" if args.poors: QualName = QualName + "-P" else: if args.poors: QualName = "_P" args.QualName = QualName # Construct Type FileName Components TypeName = "" if args.RCinc and args.SCinc: TypeName = "_RC-SC" elif args.RCinc and not args.SCinc: TypeName = "_RC" elif not args.RCinc and args.SCinc: TypeName = "_SC" args.TypeName = TypeName return args
def get_arguments_calc_manual(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for processing SKS data offline """ parser = ArgumentParser( usage="%(prog)s [arguments] <station database>", description="Script to process " "and calculate the spliting parameters for a dataset " + "that has already been downloaded by split_calc_auto.py. ") # General Settings parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys " + "for which to perform analysis. These must be " + "contained within the station database. Partial keys " + "will be used to match against those in the " + "dictionary. For instance, providing IU will match " + "with all stations in the IU network [Default " + "processes all stations in the database]") parser.add_argument("-v", "-V", "--verbose", action="store_true", dest="verb", default=False, help="Specify to increase verbosity.") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values and settings") ConstGroup.add_argument( "--window", action="store", type=float, dest="dts", default=120., help="Specify time window length before and after the SKS " "arrival. The total window length is 2*dst (sec). [Default 120]") ConstGroup.add_argument( "--max-delay", action="store", type=float, dest="maxdt", default=4., help="Specify the maximum delay time. [Default 4 s]") ConstGroup.add_argument("--time-increment", action="store", type=float, dest="ddt", default=0.1, help="Specify the time increment. [Default 0.1 s]") ConstGroup.add_argument("--angle-increment", action="store", type=float, dest="dphi", default=1., help="Specify the angle increment. [Default 1 d]") ConstGroup.add_argument( "--transverse-SNR", action="store", type=float, dest="snrTlim", default=1., help="Specify the minimum SNR Threshold for the Transverse " + "component to be considered Non-Null. [Default 1.]") # Event Selection Criteria EventGroup = parser.add_argument_group( title="Event Settings", description="Settings associated with " + "refining the events to include in matching station pairs") EventGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string representing the " + "start time for the event search. This will override any station " + "start times. [Default more recent start date for each station pair]") EventGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string representing the " + "end time for the event search. This will override any station " + "end times [Default older end date for each the pair of stations]") EventGroup.add_argument( "--reverse-order", "-R", action="store_true", dest="reverse", default=False, help="Reverse order of events. Default behaviour starts at oldest " + "event and works towards most recent. Specify reverse order and " + "instead the program will start with the most recent events and " + "work towards older") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error("Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error("Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None return args
def get_arguments_calc_auto(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <station database>", description="Script wrapping " "together the python-based implementation of SplitLab by " + "Wustefeld and others. This version " + "requests data on the fly for a given date range. Data is " + "requested from the internet using " + "the client services framework or from data provided on a " + "local disk. The stations are processed " + "one by one with the SKS Splitting parameters measured " + "individually using both the " + "Rotation-Correlation (RC) and Silver & Chan (SC) methods.") parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys for " + "which to perform the analysis. These must be " + "contained within the station database. Partial keys " + "will be used to match against those in the " + "dictionary. For instance, providing IU will match with " + "all stations in the IU network [Default processes " + "all stations in the database]") parser.add_argument("-v", "-V", "--verbose", action="store_true", dest="verb", default=False, help="Specify to increase verbosity.") parser.add_argument( "-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing Split results. " + "Default behaviour prompts for those that " + "already exist. Selecting overwrite and skip (ie, both flags) " + "negate each other, and both are set to " + "false (every repeat is prompted). [Default False]") parser.add_argument( "-K", "--skip-existing", action="store_true", dest="skip", default=False, help="Skip any event for which existing splitting results are " + "saved to disk. Default behaviour prompts for " + "each event. Selecting skip and overwrite (ie, both flags) " + "negate each other, and both are set to " + "False (every repeat is prompted). [Default False]") parser.add_argument( "-C", "--calc", action="store_true", dest="calc", default=False, help="Analyze data for shear-wave splitting. [Default saves data " + "to folders for subsequent analysis]") parser.add_argument( "-P", "--plot-diagnostic", action="store_true", dest="diagplot", default=False, help="Plot diagnostic window at end of process. [Default False]") parser.add_argument( "-R", "--recalc", action="store_true", dest="recalc", default=False, help="Re-calculate estimates and overwrite existing splitting " + "results without re-downloading data. [Default False]") # Server Settings ServerGroup = parser.add_argument_group( title="Server Settings", description="Settings associated with which " + "datacenter to log into.") ServerGroup.add_argument( "-S", "--Server", action="store", type=str, dest="Server", default="IRIS", help="Specify the server to connect to. Options include: " + "BGR, ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, LMU, NCEDC, " + "NEIP, NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. [Default IRIS]") ServerGroup.add_argument( "-U", "--User-Auth", action="store", type=str, dest="UserAuth", default="", help="Enter your IRIS Authentification Username and Password " + "(--User-Auth='username:authpassword') to access and download " + "restricted data. [Default no user and password]") # Database Settings DataGroup = parser.add_argument_group( title="Local Data Settings", description="Settings associated with defining and using a " + "local data base of pre-downloaded day-long SAC files.") DataGroup.add_argument( "--local-data", action="store", type=str, dest="localdata", default=None, help="Specify a comma separated list of paths containing " + "day-long sac files of data already downloaded. " + "If data exists for a seismogram is already present on " + "disk, it is selected preferentially over downloading " + "the data using the Client interface") DataGroup.add_argument( "--dtype", action="store", type=str, dest="dtype", default='SAC', help="Specify the data archive file type, either SAC " + " or MSEED. Note the default behaviour is to search for " + "SAC files. Local archive files must have extensions of '.SAC' " + " or '.MSEED. These are case dependent, so specify the correct case" + "here.") DataGroup.add_argument( "--no-data-zero", action="store_true", dest="ndval", default=False, help="Specify to force missing data to be set as zero, rather " + "than default behaviour which sets to nan.") DataGroup.add_argument( "--no-local-net", action="store_false", dest="useNet", default=True, help="Specify to prevent using the Network code in the " + "search for local data (sometimes for CN stations " + "the dictionary name for a station may disagree with that " + "in the filename. [Default Network used]") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values and settings") ConstGroup.add_argument( "--sampling-rate", action="store", type=float, dest="new_sampling_rate", default=10., help="Specify new sampling rate in Hz. [Default 10.]") ConstGroup.add_argument( "--min-snr", action="store", type=float, dest="msnr", default=5., help="Minimum SNR value calculated on the radial (Q) component " + "to proceed with analysis (dB). [Default 5.]") ConstGroup.add_argument( "--window", action="store", type=float, dest="dts", default=120., help="Specify time window length before and after the SKS " "arrival. The total window length is 2*dst (sec). [Default 120]") ConstGroup.add_argument( "--max-delay", action="store", type=float, dest="maxdt", default=4., help="Specify the maximum delay time in search (sec). " + "[Default 4]") ConstGroup.add_argument( "--dt-delay", action="store", type=float, dest="ddt", default=0.1, help="Specify the time delay increment in search (sec). " + "[Default 0.1]") ConstGroup.add_argument( "--dphi", action="store", type=float, dest="dphi", default=1., help="Specify the fast angle increment in search (degree). " + "[Default 1.]") ConstGroup.add_argument( "--snrT", action="store", type=float, dest="snrTlim", default=1., help="Specify the minimum SNR Threshold for the Transverse " + "component to be considered Non-Null. [Default 1.]") ConstGroup.add_argument( "--fmin", action="store", type=float, dest="fmin", default=0.02, help="Specify the minimum frequency corner for bandpass " + "filter (Hz). [Default 0.02]") ConstGroup.add_argument( "--fmax", action="store", type=float, dest="fmax", default=0.5, help="Specify the maximum frequency corner for bandpass " + "filter (Hz). [Default 0.5]") # Event Selection Criteria EventGroup = parser.add_argument_group( title="Event Settings", description="Settings associated with refining " "the events to include in matching station pairs") EventGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string representing " + "the start time for the event search. This will override any " + "station start times. [Default start date of each station]") EventGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string representing " + "the end time for the event search. This will override any " + "station end times [Default end date of each station]") EventGroup.add_argument( "--reverse", action="store_true", dest="reverse", default=False, help="Reverse order of events. Default behaviour starts at " + "oldest event and works towards most recent. " + "Specify reverse order and instead the program will start " + "with the most recent events and work towards older") EventGroup.add_argument( "--min-mag", action="store", type=float, dest="minmag", default=6.0, help="Specify the minimum magnitude of event for which to " + "search. [Default 6.0]") EventGroup.add_argument( "--max-mag", action="store", type=float, dest="maxmag", default=None, help="Specify the maximum magnitude of event for which to " + "search. [Default None, i.e. no limit]") # Geometry Settings GeomGroup = parser.add_argument_group( title="Geometry Settings", description="Settings associatd with the " "event-station geometries") GeomGroup.add_argument( "--min-dist", action="store", type=float, dest="mindist", default=85., help="Specify the minimum great circle distance (degrees) " + "between the station and event. [Default 85]") GeomGroup.add_argument( "--max-dist", action="store", type=float, dest="maxdist", default=120., help="Specify the maximum great circle distance (degrees) " + "between the station and event. [Default 120]") GeomGroup.add_argument( "--phase", action="store", type=str, dest="phase", default='SKS', help="Specify the phase name to use. Be careful with the distance. " + "setting. Options are 'SKS' or 'SKKS'. [Default 'SKS']") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error("Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error("Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None # Parse User Authentification if not len(args.UserAuth) == 0: tt = args.UserAuth.split(':') if not len(tt) == 2: parser.error( "Error: Incorrect Username and Password Strings for " + "User Authentification") else: args.UserAuth = tt else: args.UserAuth = [] # Check existing file behaviour if args.skip and args.ovr: args.skip = False args.ovr = False # Parse Local Data directories if args.localdata is not None: args.localdata = args.localdata.split(',') else: args.localdata = [] # Check NoData Value if args.ndval: args.ndval = 0.0 else: args.ndval = nan # Check selected phase if args.phase not in ['SKS', 'SKKS', 'PKS']: parser.error("Error: choose between 'SKS', 'SKKS and 'PKS'.") # Check distances for all phases if not args.mindist: if args.phase == 'SKS': args.mindist = 85. elif args.phase == 'SKKS': args.mindist = 90. elif args.phase == 'PKS': args.mindist = 130. if not args.maxdist: if args.phase == 'SKS': args.maxdist = 120. elif args.phase == 'SKKS': args.maxdist = 130. elif args.phase == 'PKS': args.maxdist = 150. if args.mindist < 85. or args.maxdist > 180.: parser.error("Distances should be between 85 and 180 deg. for " + "teleseismic 'SKS', 'SKKS' and 'PKS' waves.") return args
def get_recalc_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <station database>", description="Script used to re-calculate receiver functions " + "that already exist on disk, but using different " + "processing options. The stations are processed one " + "by one and the data are stored to disk. " + " \n" + "Note: The sampling rate cannot be changed to a new rate") # General Settings parser.add_argument( "indb", help="Station Database to process from.", type=str) parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys for " + "which to perform the analysis. These must be " + "contained within the station database. Partial keys will " + "be used to match against those in the dictionary. For " + "instance, providing IU will match with all stations in " + "the IU network [Default processes all stations in the database]") parser.add_argument( "-v", "-V", "--verbose", action="store_true", dest="verb", default=False, help="Specify to increase verbosity.") parser.add_argument( "-L", "--long-name", action="store_true", dest="lkey", default=False, help="Force folder names to use long-key form (NET.STN.CHN). " + "Default behaviour uses short key form (NET.STN) for the folder " + "names, regardless of the key type of the database." ) # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values and settings") ConstGroup.add_argument( "--Z12", action="store_true", dest="Z12", default=False, help="Use Z12 data if available. [Default uses ZNE data]") ConstGroup.add_argument( "--phase", action="store", type=str, dest="phase", default='allP', help="Specify the phase name to use. Be careful with the distance. " + "setting. Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. " + "[Default 'allP']") ConstGroup.add_argument( "--resample", action="store", type=float, dest="resample", default=None, help="Specify the new sampling-rate for the receiver functions. " + "Note the sampling rate of the original data (ZNE or Z12) stored " + "on disk is unchanged. [Default None]") ConstGroup.add_argument( "--align", action="store", type=str, dest="align", default=None, help="Specify component alignment key. Can be either " + "ZRT, LQT, or PVH. [Default ZRT]") ConstGroup.add_argument( "--vp", action="store", type=float, dest="vp", default=6.0, help="Specify near-surface Vp to use with --align=PVH (km/s). " + "[Default 6.0]") ConstGroup.add_argument( "--vs", action="store", type=float, dest="vs", default=3.5, help="Specify near-surface Vs to use with --align=PVH (km/s). " + "[Default 3.5]") ConstGroup.add_argument( "--dt-snr", action="store", type=float, dest="dt_snr", default=30., help="Specify the window length over which to calculate " + "the SNR in sec. [Default 30.]") ConstGroup.add_argument( "--pre-filt", action="store", type=str, dest="pre_filt", default=None, help="Specify two floats with low and high frequency corners for " + "pre-filter (before deconvolution). [Default None]") ConstGroup.add_argument( "--fmin", action="store", type=float, dest="fmin", default=0.05, help="Specify the minimum frequency corner for SNR " + "filter (Hz). [Default 0.05]") ConstGroup.add_argument( "--fmax", action="store", type=float, dest="fmax", default=1.0, help="Specify the maximum frequency corner for SNR " + "filter (Hz). [Default 1.0]") # Deconvolution Settings DeconGroup = parser.add_argument_group( title='Deconvolution Settings', description="Parameters for deconvolution") DeconGroup.add_argument( "--method", action="store", dest="method", type=str, default="wiener", help="Specify the deconvolution method. Available methods " + "include 'wiener', 'water' and 'multitaper'. [Default 'wiener']") DeconGroup.add_argument( "--gfilt", action="store", dest="gfilt", type=float, default=None, help="Specify the Gaussian filter width in Hz. " + "[Default None]") DeconGroup.add_argument( "--wlevel", action="store", dest="wlevel", type=float, default=0.01, help="Specify the water level, used in the 'water' method. " + "[Default 0.01]") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( "Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.") if args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if args.align is None: args.align = 'ZRT' elif args.align not in ['ZRT', 'LQT', 'PVH']: parser.error( "Error: Incorrect alignment specifier. Should be " + "either 'ZRT', 'LQT', or 'PVH'.") if args.method not in ['wiener', 'water', 'multitaper']: parser.error( "Error: 'method' should be either 'wiener', 'water' or " + "'multitaper'") if args.pre_filt is not None: args.pre_filt = [float(val) for val in args.pre_filt.split(',')] args.pre_filt = sorted(args.pre_filt) if (len(args.pre_filt)) != 2: parser.error( "Error: --pre-filt should contain 2 " + "comma-separated floats") return args
fg="black", font=("Arial Bold", 15)) lab3.pack(side="top") sep = tk.PanedWindow(root, orient="horizontal", width=200, background="black") sep.pack(pady=5) lab2 = tk.Label(root, textvariable=CurClass, background="#FFFFFF", fg="black", font=("Arial Bold", 10)) lab2.pack(side="top") from os.path import exists as exist if exist(hwFile): ReadHomework() sep1 = tk.PanedWindow(root, orient="horizontal", width=200, background="white") sep1.pack(pady=5) sep2 = tk.PanedWindow(root, orient="horizontal", width=600, height=1) sep2.config(background="#A0A0A0") sep2.pack(pady=0, padx=5) sayhw = tk.Label(root, textvariable=amntofhw, background='white', fg='black', font=("Arial", 10)) sayhw.pack(side='top', anchor='w', padx='0', pady='0')
def get_cleanspec_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. Calling options for the script `obs_clean_spectra.py` that accompany this package. """ parser = ArgumentParser( usage="%(prog)s [options] <Station Database>", description="Script used " "to extract daily spectra calculated from " + "`obs_daily_spectra.py` and flag days for outlier " + "PSDs and calculate spectral averages of the " + "corresponding Fourier transforms over the entire " + "time period specified. The stations are processed " + "one by one and the data are stored to disk.") parser.add_argument( "indb", help="Station Database to process from.", type=str) # General Settings parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station " + "keys for which to perform the analysis. These must " + "be contained within the station database. Partial " + "keys will be used to match against those in the " + "dictionary. For instance, providing IU will match " + "with all stations in the IU network. " + "[Default processes all stations in the database]") parser.add_argument( "-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") # Event Selection Criteria DaysGroup = parser.add_argument_group( title="Time Search Settings", description="Time settings associated with " + "searching for day-long seismograms") DaysGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string " + "representing the start day for the data search. " + "This will override any station start times. " + "[Default start date of each station in database]") DaysGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string " + "representing the start time for the data search. " + "This will override any station end times. " + "[Default end date of each station in database]") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values " + "and settings") ConstGroup.add_argument( "--freq-band", action="store", type=str, dest="pd", default=None, help="Specify comma-separated frequency limits " + "(float, in Hz) over which to calculate spectral " + "features used in flagging the days/windows. " + "[Default 0.004,2.0]") ConstGroup.add_argument( "--tolerance", action="store", type=float, dest="tol", default=1.5, help="Specify parameter for tolerance threshold. " + "If spectrum > std*tol, window is flagged as bad. " + "[Default 1.5]") ConstGroup.add_argument( "--alpha", action="store", type=float, dest="alpha", default=0.05, help="Confidence level for f-test, for iterative " + "flagging of windows. [Default 0.05, or 95 percent confidence]") # Constants Settings FigureGroup = parser.add_argument_group( title='Figure Settings', description="Flags for plotting figures") FigureGroup.add_argument( "--figQC", action="store_true", dest="fig_QC", default=False, help="Plot Quality-Control figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--debug", action="store_true", dest="debug", default=False, help="Plot intermediate steps for debugging. " + "[Default does not plot figure]") FigureGroup.add_argument( "--figAverage", action="store_true", dest="fig_average", default=False, help="Plot daily average figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--figCoh", action="store_true", dest="fig_coh_ph", default=False, help="Plot Coherence and Phase figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--figCross", action="store_true", dest="fig_av_cross", default=False, help="Plot cross-spectra figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--save-fig", action="store_true", dest="saveplot", default=False, help="Set this option if you wish to save the figure(s). [Default " + "does not save figure]") FigureGroup.add_argument( "--format", action="store", type=str, dest="form", default="png", help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Error: Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Error: Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None if args.pd is None: args.pd = [0.004, 2.0] else: args.pd = [float(val) for val in args.pd.split(',')] args.pd = sorted(args.pd) if (len(args.pd)) != 2: raise(Exception( "Error: --freq-band should contain 2 " + "comma-separated floats")) return args
def get_bng_average_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <Station Database>", description="Program to average the orientations of the seismometer " + "in a station database.") parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "-v", "--verbose", default=2, type=int, dest="verb", help="Enable Level of verbose output during processing. " + "(0) No Output; (1) Output Event Analysis counter; " + "(2) Counter and results. Default 2") parser.add_argument( "--load-location", default="BNG_RESULTS", type=str, dest="loadloc", help="Specify Load destination. Default is BNG_RESULTS " + "(and sub-directories based on Station Name).") parser.add_argument("--plot", default=False, action="store_true", dest="showplot", help="Plot results at end (Default False)") parser.add_argument( "--save", action="store_true", dest="saveplot", default=False, help="Set this option if you wish to save the figure. [Default " + "does not save figure]") parser.add_argument( "--format", default="png", dest="fmt", type=str, help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") # Station Selection Parameters stparm = parser.add_argument_group( title="Station Selection Parameters", description="Parameters to select a specific station.") stparm.add_argument( "--keys", dest="stkeys", type=str, default="", help="Specify list of Station Keys in the database to process.") # Select QC criteria qcparm = parser.add_argument_group( title="Quality control parameters", description="Quality control parameters on the estimates for " + "calculating the average.") qcparm.add_argument( "--cc", dest="cc", type=float, default=0.5, help="Threshold for cross-correlation betwen vertical and radial " + "components. [Default 0.5]") qcparm.add_argument( "--snr", dest="snr", type=float, default=5., help="Threshold for signal-to-noise ratio on vertical component, " + "in dB. [Default 5.]") qcparm.add_argument( "--TR", dest="TR", type=float, default=0.5, help="Threshold for transverse to radial ratio (1 - T/R). " + "[Default 0.5]") qcparm.add_argument( "--RZ", dest="RZ", type=float, default=-1., help="Threshold for radial to vertical ratio (1 - R/Z). " + "[Default -1.]") # Parse Arguments args = parser.parse_args(argv) # Check inputs #if len(args) != 1: parser.error("Need station database file") # indb=args[0] if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') return args
def get_calc_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <station database>", description="Script used to download and pre-process " + "three-component ('Z', 'N', and 'E'), seismograms for individual " + "events and calculate teleseismic P-wave receiver functions" + "This version requests data on the fly for a given date " + "range. Data are requested from the internet using the " + "client services framework. The stations are processed one " + "by one and the data are stored to disk.") # General Settings parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys for " + "which to perform the analysis. These must be " + "contained within the station database. Partial keys will " + "be used to match against those in the dictionary. For " + "instance, providing IU will match with all stations in " + "the IU network [Default processes all stations in the database]") parser.add_argument("-v", "-V", "--verbose", action="store_true", dest="verb", default=False, help="Specify to increase verbosity.") parser.add_argument("-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") parser.add_argument( "-L", "--long-name", action="store_true", dest="lkey", default=False, help="Force folder names to use long-key form (NET.STN.CHN). " + "Default behaviour uses short key form (NET.STN) for the folder " + "names, regardless of the key type of the database.") # Server Settings ServerGroup = parser.add_argument_group( title="Server Settings", description="Settings associated with which " "datacenter to log into.") ServerGroup.add_argument( "-S", "--Server", action="store", type=str, dest="Server", default="IRIS", help="Specify the server to connect to. Options include: " + "BGR, ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, " + "LMU, NCEDC, NEIP, NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. " + "[Default IRIS]") ServerGroup.add_argument( "-U", "--User-Auth", action="store", type=str, dest="UserAuth", default="", help="Enter your IRIS Authentification Username and Password " + "(--User-Auth='username:authpassword') to " + "access and download restricted data. " + "[Default no user and password]") # Database Settings DataGroup = parser.add_argument_group( title="Local Data Settings", description="Settings associated with defining " + "and using a local data base of pre-downloaded " + "day-long SAC files.") DataGroup.add_argument( "--local-data", action="store", type=str, dest="localdata", default=None, help="Specify a comma separated list of paths containing " + "day-long sac files of data already downloaded. " + "If data exists for a seismogram is already present on disk, " + "it is selected preferentially over downloading " + "the data using the Client interface") DataGroup.add_argument( "--no-data-zero", action="store_true", dest="ndval", default=False, help="Specify to force missing data to be set as zero, rather " + "than default behaviour which sets to nan.") DataGroup.add_argument( "--no-local-net", action="store_false", dest="useNet", default=True, help="Specify to prevent using the Network code in the " + "search for local data (sometimes for CN stations " + "the dictionary name for a station may disagree with that " + "in the filename. [Default Network used]") DataGroup.add_argument( "--save-Z12", action="store_true", dest="saveZ12", default=False, help="Specify to save Z12 (un-rotated) components. [Default " + "False]") # Event Selection Criteria EventGroup = parser.add_argument_group( title="Event Settings", description="Settings associated with refining " + "the events to include in matching event-station pairs") EventGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string representing " + "the start time for the event search. This will override any " + "station start times. [Default start date of station]") EventGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string representing " + "the end time for the event search. This will override any " + "station end times [Default end date of station]") EventGroup.add_argument( "--reverse", "-R", action="store_true", dest="reverse", default=False, help="Reverse order of events. Default behaviour starts at " + "oldest event and works towards most recent. Specify reverse " + "order and instead the program will start with the most recent " + "events and work towards older") EventGroup.add_argument( "--minmag", action="store", type=float, dest="minmag", default=6.0, help="Specify the minimum magnitude of event for which to search. " + "[Default 6.0]") EventGroup.add_argument( "--maxmag", action="store", type=float, dest="maxmag", default=9.0, help="Specify the maximum magnitude of event for which to search. " + "[Default None, i.e. no limit]") # Geometry Settings PhaseGroup = parser.add_argument_group( title="Geometry Settings", description="Settings associatd with the " "event-station geometries for the specified phase") PhaseGroup.add_argument( "--phase", action="store", type=str, dest="phase", default='P', help="Specify the phase name to use. Be careful with the distance. " + "setting. Options are 'P' or 'PP'. [Default 'P']") PhaseGroup.add_argument( "--mindist", action="store", type=float, dest="mindist", default=None, help="Specify the minimum great circle distance (degrees) between " + "the station and event. [Default depends on phase]") PhaseGroup.add_argument( "--maxdist", action="store", type=float, dest="maxdist", default=None, help="Specify the maximum great circle distance (degrees) between " + "the station and event. [Default depends on phase]") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values and settings") ConstGroup.add_argument( "--sampling-rate", action="store", type=float, dest="new_sampling_rate", default=10., help="Specify new sampling rate in Hz. [Default 10.]") ConstGroup.add_argument( "--dts", action="store", type=float, dest="dts", default=150., help="Specify the window length in sec (symmetric about arrival " + "time). [Default 150.]") ConstGroup.add_argument( "--align", action="store", type=str, dest="align", default=None, help="Specify component alignment key. Can be either " + "ZRT, LQT, or PVH. [Default ZRT]") ConstGroup.add_argument( "--vp", action="store", type=float, dest="vp", default=6.0, help="Specify near-surface Vp to use with --align=PVH (km/s). " + "[Default 6.0]") ConstGroup.add_argument( "--vs", action="store", type=float, dest="vs", default=3.5, help="Specify near-surface Vs to use with --align=PVH (km/s). " + "[Default 3.5]") ConstGroup.add_argument( "--dt-snr", action="store", type=float, dest="dt_snr", default=30., help="Specify the window length over which to calculate " + "the SNR in sec. [Default 30.]") ConstGroup.add_argument( "--pre-filt", action="store", type=str, dest="pre_filt", default=None, help="Specify two floats with low and high frequency corners for " + "pre-filter (before deconvolution). [Default None]") ConstGroup.add_argument( "--fmin", action="store", type=float, dest="fmin", default=0.05, help="Specify the minimum frequency corner for SNR and CC " + "filter (Hz). [Default 0.05]") ConstGroup.add_argument( "--fmax", action="store", type=float, dest="fmax", default=1.0, help="Specify the maximum frequency corner for SNR and CC " + "filter (Hz). [Default 1.0]") # Constants Settings DeconGroup = parser.add_argument_group( title='Deconvolution Settings', description="Parameters for deconvolution") DeconGroup.add_argument( "--method", action="store", dest="method", type=str, default="wiener", help="Specify the deconvolution method. Available methods " + "include 'wiener', 'water' and 'multitaper'. [Default 'wiener']") DeconGroup.add_argument("--gfilt", action="store", dest="gfilt", type=float, default=None, help="Specify the Gaussian filter width in Hz. " + "[Default None]") DeconGroup.add_argument( "--wlevel", action="store", dest="wlevel", type=float, default=0.01, help="Specify the water level, used in the 'water' method. " + "[Default 0.01]") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error("Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error("Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None # Parse User Authentification if not len(args.UserAuth) == 0: tt = args.UserAuth.split(':') if not len(tt) == 2: parser.error("Error: Incorrect Username and Password " + "Strings for User Authentification") else: args.UserAuth = tt else: args.UserAuth = [] # Parse Local Data directories if args.localdata is not None: args.localdata = args.localdata.split(',') else: args.localdata = [] # Check NoData Value if args.ndval: args.ndval = 0.0 else: args.ndval = nan # Check distances for selected phase if args.phase not in ['P', 'PP', 'S', 'SKS']: parser.error("Error: choose between 'P', 'PP', 'S' and 'SKS'.") if args.phase == 'P': if not args.mindist: args.mindist = 30. if not args.maxdist: args.maxdist = 100. if args.mindist < 30. or args.maxdist > 100.: parser.error("Distances should be between 30 and 100 deg. for " + "teleseismic 'P' waves.") elif args.phase == 'PP': if not args.mindist: args.mindist = 100. if not args.maxdist: args.maxdist = 180. if args.mindist < 100. or args.maxdist > 180.: parser.error("Distances should be between 100 and 180 deg. for " + "teleseismic 'PP' waves.") elif args.phase == 'S': if not args.mindist: args.mindist = 55. if not args.maxdist: args.maxdist = 85. if args.mindist < 55. or args.maxdist > 85.: parser.error("Distances should be between 55 and 85 deg. for " + "teleseismic 'S' waves.") elif args.phase == 'SKS': if not args.mindist: args.mindist = 85. if not args.maxdist: args.maxdist = 115. if args.mindist < 85. or args.maxdist > 115.: parser.error("Distances should be between 85 and 115 deg. for " + "teleseismic 'SKS' waves.") if args.pre_filt is not None: args.pre_filt = [float(val) for val in args.pre_filt.split(',')] args.pre_filt = sorted(args.pre_filt) if (len(args.pre_filt)) != 2: parser.error("Error: --pre-filt should contain 2 " + "comma-separated floats") # Check alignment arguments if args.align is None: args.align = 'ZRT' elif args.align not in ['ZRT', 'LQT', 'PVH']: parser.error("Error: Incorrect alignment specifier. Should be " + "either 'ZRT', 'LQT', or 'PVH'.") if args.dt_snr > args.dts: args.dt_snr = args.dts - 10. print("SNR window > data window. Defaulting to data " + "window minus 10 sec.") if args.method not in ['wiener', 'water', 'multitaper']: parser.error("Error: 'method' should be either 'wiener', 'water' or " + "'multitaper'") return args
def test_export_firmware_extended_fedora29(C): """ Check, whether the firmware file is exported correctly, and in correct size. Apparently, the auto-remounting side effect of the v0.46 change, is disturbing the export process. Unmounting the UV just before the export gives the device 20/20 success rate. Test case for issue https://github.com/Nitrokey/nitrokey-app/issues/399 """ skip_if_device_version_lower_than({'S': 43}) skip_if_not_fedora('Tested on Fedora only. To check on other distros.') from time import sleep import os from os.path import exists as exist import re try: import pyudev as pu import pexpect except: pytest.skip('Skipping due to missing required packages: pyudev and pexpect.') ctx = pu.Context() devices = ctx.list_devices(subsystem='block', ID_VENDOR='Nitrokey') device = None for d in devices: if d.device_type == 'partition': device = '/dev/{}'.format(d.sys_name) break assert device, 'Device could not be found' pexpect.run(f'udisksctl unmount -b {device}').decode() sleep(1) _res = pexpect.run(f'udisksctl mount -b {device}').decode() firmware_abs_path = re.findall('at (/.*)\.', _res) assert firmware_abs_path, 'Cannot get mount point' firmware_abs_path = firmware_abs_path[0] print('path: {}, device: {}'.format(firmware_abs_path, device)) assert firmware_abs_path, 'Cannot get mount point' firmware_abs_path = firmware_abs_path + '/firmware.bin' checks = 0 checks_add = 0 if exist(firmware_abs_path): os.remove(firmware_abs_path) assert not exist(firmware_abs_path) ATTEMPTS = 20 for i in range(ATTEMPTS): # if umount is disabled, success rate is 3/10, enabled: 10/10 pexpect.run(f'udisksctl unmount -b {device}') assert C.NK_export_firmware(DefaultPasswords.ADMIN) == DeviceErrorCode.STATUS_OK pexpect.run(f'udisksctl mount -b {device}') sleep(1) firmware_file_exist = exist(firmware_abs_path) if firmware_file_exist: checks += 1 getsize = os.path.getsize(firmware_abs_path) print('Firmware file exist, size: {}'.format(getsize)) checks_add += 1 if getsize >= 100 * 1024 else 0 # checks_add += 1 if os.path.getsize(firmware_abs_path) == 256*1024 else 0 os.remove(firmware_abs_path) assert not exist(firmware_abs_path) print('CHECK {} ; CHECK ADDITIONAL {}'.format(checks, checks_add)) assert checks == ATTEMPTS assert checks_add == checks
tr_full = st_obs.select(channel=f"xx{comps[i]}")[0].copy() if fmin is not None and fmax is not None: _PreProcess.filter_tr(tr_full, fmin=fmin, fmax=fmax, zerophase=zerophase) o_time = tr_full.stats.starttime tr = tr_full.slice( starttime=o_time + obs_tts[i] - t_pres[i], endtime=o_time + obs_tts[i] + t_posts[i], ) st_obs_w += tr st_obs_full += tr_full """ """ # check if folder exists: if not exist(save_path): makedirs(save_path) # check if folder is empty if not lsdir(save_path): subprocess.call(f"scp {bin_path} .", shell=True, cwd=save_path) bm_file_path = "/home/nienke/Documents/Research/Data/MTI/MT_vs_STR/bm_models/TAYAK.bm" Create_Vmod.create_dat_file( src_depth=depth, focal_mech=focal_mech0, M0=None, epi_in_km=epi_in_km, baz=baz, dt=dt, save_path=save_path, bm_file_path=bm_file_path, )
def get_bng_calc_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <Station Database>", description="Program to compute the orientation of the components " + "of a station based on those in a station database.") parser.add_argument( "indb", help="Station Database to process from.", type=str) parser.add_argument( "-v", "--verbose", default=2, type=int, dest="verb", help="Enable Level of verbose output during processing. " + "(0) No Output; (1) Output Event Analysis counter; " + "(2) Counter and results. Default 2") parser.add_argument( "-O", "--overwrite", default=False, action="store_true", dest="ovr", help="Overwrite existing data on disk. [Default False]") parser.add_argument( "--save-location", default="BNG_RESULTS", type=str, dest="saveloc", help="Specify Save destination. Default is BNG_RESULTS " + "(and sub-directories based on Station Name).") parser.add_argument( "--no-save-progress", default=True, action="store_false", dest="constsave", help="Do not save progress during processing.") # Use local data directory Dtparm = parser.add_argument_group( title="Local Data Settings", description="Settings associated with defining and using a " + "local data base of pre-downloaded day-long SAC files.") Dtparm.add_argument( "--local-data", action="store", type=str, dest="localdata", default="", help="Specify a comma separated list of paths containing " + "day-long sac files of data already downloaded. If data exists " + "for a seismogram is already present on disk, it is selected " + "preferentially over downloading the data using the Client interface") Dtparm.add_argument( "--no-data-zero", action="store_true", dest="ndval", default=False, help="Specify to force missing data to be set as zero, rather " + "than default behaviour. [Default sets to nan]") Dtparm.add_argument( "--no-local-net", action="store_false", dest="useNet", default=True, help="Specify to prevent using the Network code in the search " + "for local data (sometimes for CN stations the dictionary name " + "for a station may disagree with that in the filename. " + "[Default Network used]") # Server Settings Svparm = parser.add_argument_group( title="Server Settings", description="Settings associated with which datacenter to log into.") Svparm.add_argument( "--catalogue-source", action="store", type=str, dest="cat_client", default="IRIS", help="Specify the server to connect to for the event catalogue. " + "Options include: BGR, ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, " + "LMU, NCEDC, NEIP, NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. " + "[Default IRIS]") Svparm.add_argument( "--waveform-source", action="store", type=str, dest="wf_client", default="IRIS", help="Specify the server to connect to for the waveform data. " + "Options include: BGR, ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, " + "LMU, NCEDC, NEIP, NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. " + "[Default IRIS]") Svparm.add_argument( "-U", "--User-Auth", action="store", type=str, dest="UserAuth", default="", help="Enter your Authentification Username and Password for the " + "waveform server (--User-Auth='username:authpassword') to access " + "and download restricted data. [Default no user and password]") # Station Selection Parameters stparm = parser.add_argument_group( title="Station Selection Parameters", description="Parameters to select a specific station.") stparm.add_argument( "--keys", dest="stkeys", type=str, default="", help="Specify list of Station Keys in the database to process.") stparm.add_argument( "-c", "--coord-system", dest="nameconv", type=int, default=2, help="Coordinate system specification of instrument. " + "(0) Attempt Autodetect between 1 and 2; (1) HZ, HN, HE; " + "(2) Left Handed: HZ, H2 90 CW H1; (3) Right Handed: HZ, H2 90 CCW " + "H1. [Default 2]") #-- Timing Tmparm = parser.add_argument_group( title="Timing Parameters", description="Parameters associated with event timing and window " + "length.") Tmparm.add_argument( "--start", dest="startT", type=str, default="", help="Enter Start date for event catalogue search. Note, more " + "recent of this value or station start date will be used.") Tmparm.add_argument( "--end", dest="endT", type=str, default="", help="Enter End date for event catalogue search. Note, less " + "recent of this or the station end date will be used.") Tmparm.add_argument( "--window", dest="wlen", type=float, default=15., help="Enter length of time window following P arrival time in "+ "seconds. [Default 15.]") Tmparm.add_argument( "--times", dest="tt", type=str, default=None, help="Enter window start and end times relative to predicted P "+ "arrival time in seconds. Negative values imply start of window "+ "before P wave arrival. [Default -2., 5.]") # EQ Specifications Eqparm = parser.add_argument_group( title="Earthquake Selection Criteria", description="Parameters associated with selecing the subset of " + "earthquakes to use in calculations.") Eqparm.add_argument( "--min-mag", dest="minmag", type=float, default=5.5, help="Specify the minimum magnitude of Earthquakes to use in " + "the catalogue search. [Default 5.5]") Eqparm.add_argument( "--max-mag", dest="maxmag", type=float, default=9., help="Specify the maximum magnitude of Earthquakes to use in " + "the catalogue search. [Default 9.]") Eqparm.add_argument( "--min-dist", dest="mindist", type=float, default=5., help="Specify the minimum earthquake distance (in degrees). " + "[Default 5.]") Eqparm.add_argument( "--max-dist", dest="maxdist", type=float, default=175., help="Specify the maximum earthquake distance (in degrees). " + "[Default 175.]") Eqparm.add_argument( "--max-dep", dest="maxdep", type=float, default=1000., help="Specify maximum Earthquake Depth (km). [Default no limit]") Eqparm.add_argument( "--discard-catalogue", dest="savecat", default=True, action="store_false", help="Specify to discard the eq catalogue after processing.") # Processing Specifications Procparm = parser.add_argument_group( title="Processing Parameters", description="Parameters associated with BNG processing.") Procparm.add_argument( "--new-sampling-rate", dest="new_sr", type=float, default=None, help="Specify new sampling rate in Hz. [Default no resampling]") Procparm.add_argument( "--dphi", dest="dphi", type=float, default=0.1, help="Specify angle interval for search, in degrees. [Default 0.1]") Procparm.add_argument( "--bp", dest="bp", type=str, default=None, help="Specify corner frequencies in Hz as a list of two floats. "+ "[Default 0.7,5.0]") Procparm.add_argument( "--plot", dest="showplot", default=False, action="store_true", help="Show processing step including raw and rotated waveforms. "+ "[Default doesn't show plot]") # Parse Arguments args = parser.parse_args(argv) # Check inputs #if len(args) != 1: parser.error("Need station database file") # indb=args[0] if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None # Parse User Authentification if not len(args.UserAuth) == 0: tt = args.UserAuth.split(':') if not len(tt) == 2: parser.error( "Error: Incorrect Username and Password Strings " + "for User Authentification") else: args.UserAuth = tt else: args.UserAuth = [] # Parse Local Data directories if len(args.localdata) > 0: args.localdata = args.localdata.split(',') else: args.localdata = [] # Check NoData Value if args.ndval: args.ndval = 0.0 else: args.ndval = nan if args.bp is not None: args.bp = [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error( "Error: --bp should contain 2 " + "comma-separated floats") if args.tt is not None: args.tt = [float(val) for val in args.tt.split(',')] args.tt = sorted(args.tt) if (len(args.tt)) != 2: parser.error( "Error: --times should contain 2 " + "comma-separated floats") else: args.tt = [-2., 5.] return args
def get_dailyspec_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. Calling options for the script `obs_daily_spectra.py` that accompany this package. """ parser = ArgumentParser( usage="%(prog)s [options] <Station Database>", description="Script used " "to extract shorter windows from the day-long " + "seismograms, calculate the power-spectral " + "properties, flag windows for outlier PSDs and " + "calculate daily averages of the corresponding " + "Fourier transforms. The stations are processed " + "one by one and the data are stored to disk. The " + "program will look for data saved in the previous " + "steps and use all available components.") parser.add_argument( "indb", help="Station Database to process from.", type=str) # General Settings parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys " + "for which to perform the analysis. These must be " + "contained within the station database. Partial keys " + "will be used to match against those in the " + "dictionary. For instance, providing IU will match " + "with all stations in the IU network. " + "[Default processes all stations in the database]") parser.add_argument( "-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") # Event Selection Criteria DaysGroup = parser.add_argument_group( title="Time Search Settings", description="Time settings associated with " + "searching for day-long seismograms") DaysGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string " + "representing the start day for the data search. " + "This will override any station start times. " + "[Default start date of each station in database]") DaysGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string " + "representing the start time for the data search. " + "This will override any station end times. " + "[Default end date of each station n database]") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values " + "and settings") ConstGroup.add_argument( "--window", action="store", type=float, dest="window", default=7200., help="Specify window length in seconds. " + "Default value is highly recommended. " "Program may not be stable for large deviations " + "from default value. [Default 7200. (or 2 hours)]") ConstGroup.add_argument( "--overlap", action="store", type=float, dest="overlap", default=0.3, help="Specify fraction of overlap between windows. " + "[Default 0.3 (or 30%)]") ConstGroup.add_argument( "--minwin", action="store", type=int, dest="minwin", default=10, help="Specify minimum number of 'good' windows " + "in any given day to continue with analysis. " + "[Default 10]") ConstGroup.add_argument( "--freq-band", action="store", type=str, dest="pd", default=None, help="Specify comma-separated frequency limits " + "(float, in Hz) over which to calculate spectral " + "features used in flagging the bad windows. " + "[Default 0.004,2.0]") ConstGroup.add_argument( "--tolerance", action="store", type=float, dest="tol", default=2.0, help="Specify parameter for tolerance threshold. " + "If spectrum > std*tol, window is flagged as bad. " + "[Default 2.0]") ConstGroup.add_argument( "--alpha", action="store", type=float, dest="alpha", default=0.05, help="Specify confidence level for f-test, " + "for iterative flagging of windows. " + "[Default 0.05, or 95% confidence]") ConstGroup.add_argument( "--raw", action="store_true", dest="raw", default=False, help="Raw spectra will be used in calculating " + "spectral features for flagging. " + "[Default uses smoothed spectra]") ConstGroup.add_argument( "--no-rotation", action="store_false", dest="calc_rotation", default=True, help="Do not rotate horizontal components " + "to tilt direction. [Default calculates rotation]") # Constants Settings FigureGroup = parser.add_argument_group( title='Figure Settings', description="Flags for plotting figures") FigureGroup.add_argument( "--figQC", action="store_true", dest="fig_QC", default=False, help="Plot Quality-Control figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--debug", action="store_true", dest="debug", default=False, help="Plot intermediate steps for debugging. " + "[Default does not plot figure]") FigureGroup.add_argument( "--figAverage", action="store_true", dest="fig_average", default=False, help="Plot daily average figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--figCoh", action="store_true", dest="fig_coh_ph", default=False, help="Plot Coherence and Phase figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--save-fig", action="store_true", dest="saveplot", default=False, help="Set this option if you wish to save the figure(s). [Default " + "does not save figure]") FigureGroup.add_argument( "--format", action="store", type=str, dest="form", default="png", help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') if args.raw: args.smooth = False else: args.smooth = True # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Error: Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Error: Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None # Check input frequency band if args.pd is None: args.pd = [0.004, 2.0] else: args.pd = [float(args.pd.split(','))] args.pd = sorted(args.pd) if (len(args.pd)) != 2: raise(Exception( "Error: --freq-band should contain 2 " + "comma-separated floats")) return args
def get_transfer_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. Calling options for the script `obs_transfer functions.py` that accompany this package. """ parser = ArgumentParser( usage="%(prog)s [options] <Station Database>", description="Script used " "to calculate transfer functions between various " + "components, to be used in cleaning vertical " + "component of OBS data. The noise data can be " + "those obtained from the daily spectra (i.e., " + "from `obs_daily_spectra.py`) or those obtained " + "from the averaged noise spectra (i.e., from " + "`obs_clean_spectra.py`). Flags are available " + "to specify the source of data to use as well as " + "the time range over which to calculate the " + "transfer functions. The stations are processed " + "one by one and the data are stored to disk.") parser.add_argument( "indb", help="Station Database to process from.", type=str) # General Settings parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station " + "keys for which to perform the analysis. These must " + "be contained within the station database. Partial " + "keys will be used to match against those in the " + "dictionary. For instance, providing IU will match " + "with all stations in the IU network. " + "[Default processes all stations in the database]") parser.add_argument( "-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") # Event Selection Criteria DaysGroup = parser.add_argument_group( title="Time Search Settings", description="Time settings associated with searching " "for day-long seismograms") DaysGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string " + "representing the start day for the data search. " "This will override any station start times. " + "[Default start date of each station in database]") DaysGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string " + "representing the start time for the data search. " "This will override any station end times. " + "[Default end date of each station in database]") # Constants Settings ConstGroup = parser.add_argument_group( title='Parameter Settings', description="Miscellaneous default values " + "and settings") ConstGroup.add_argument( "--skip-daily", action="store_true", dest="skip_daily", default=False, help="Skip daily spectral averages in construction " + "of transfer functions. [Default False]") ConstGroup.add_argument( "--skip-clean", action="store_true", dest="skip_clean", default=False, help="Skip cleaned spectral averages in " + "construction of transfer functions. Defaults " + "to True if data cannot be found in default " + "directory. [Default False]") # Constants Settings FigureGroup = parser.add_argument_group( title='Figure Settings', description="Flags for plotting figures") FigureGroup.add_argument( "--figTF", action="store_true", dest="fig_TF", default=False, help="Plot transfer function figure. " + "[Default does not plot figure]") FigureGroup.add_argument( "--save-fig", action="store_true", dest="saveplot", default=False, help="Set this option if you wish to save the figure(s). [Default " + "does not save figure]") FigureGroup.add_argument( "--format", action="store", type=str, dest="form", default="png", help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Error: Cannot construct UTCDateTime from " + "start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Error: Cannot construct UTCDateTime from " + "end time: " + args.endT) else: args.endT = None if args.skip_clean and args.skip_daily: parser.error( "Error: cannot skip both daily and clean averages") return args
def deleteFile(file: str) -> None: if os_path.exist(file): os.remove(file) return
def get_dl_average_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <Station Database>", description="Program to average the orientations of the seismometer " + "in a station database.") parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "-v", "--verbose", default=2, type=int, dest="verb", help="Enable Level of verbose output during processing. " + "(0) No Output; (1) Output Event Analysis counter; " + "(2) Counter and results. Default 2") parser.add_argument( "--load-location", default="DL_RESULTS", type=str, dest="loadloc", help="Specify Load destination. [Default is DL_RESULTS " + "(and sub-directories based on Station Name)]") parser.add_argument("--plot", default=False, action="store_true", dest="showplot", help="Plot results at end [Default False]") parser.add_argument( "--save", action="store_true", dest="saveplot", default=False, help="Set this option if you wish to save the figure. [Default " + "does not save figure]") parser.add_argument( "--format", default="png", dest="fmt", type=str, help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") parser.add_argument( "--cc", default=0.8, type=float, dest="cc", help="Cross-correlation threshold for final estimate. [Default 0.8]") # Station Selection Parameters stparm = parser.add_argument_group( title="Station Selection Parameters", description="Parameters to select a specific station.") stparm.add_argument( "--keys", dest="stkeys", type=str, default="", help="Specify list of Station Keys in the database to process.") # Parse Arguments args = parser.parse_args(argv) # Check inputs #if len(args) != 1: parser.error("Need station database file") # indb=args[0] if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') return args
def get_event_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. Calling options for the script `obs_download_event.py` that accompany this package. """ parser = ArgumentParser( usage="%(prog)s [options] <Station Database>", description="Script used " + "to download and pre-process four-component " + "(H1, H2, Z and P), two-hour-long seismograms for " + "individual events on which to apply the de-noising " + "algorithms. Data are requested from the internet using " + "the client services framework for a given date range. " + "The stations are processed one by one and the data are " + "stored to disk.") parser.add_argument("indb", help="Station Database to process from.", type=str) # General Settings parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys " + "for which to perform the analysis. These must be " + "contained within the station database. Partial keys " + "will be used to match against those in the " "dictionary. For instance, providing IU will match with " + "all stations in the IU network [Default processes " + "all stations in the database]") parser.add_argument( "-C", "--channels", action="store", type=str, dest="channels", default="", help="Specify a comma-separated list of channels for " + "which to perform the transfer function analysis. " + "Possible options are H (for horizontal channels) or P " + "(for pressure channel). Specifying H allows " + "for tilt correction. Specifying P allows for compliance " + "correction. [Default looks for both horizontal and " + "pressure and allows for both tilt AND compliance corrections]") parser.add_argument("-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") # Server Settings ServerGroup = parser.add_argument_group( title="Server Settings", description="Settings associated with which " "datacenter to log into.") ServerGroup.add_argument( "-S", "--Server", action="store", type=str, dest="Server", default="IRIS", help="Specify the server to connect to. Options include: BGR, " + "ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, LMU, NCEDC, NEIP, " + "NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. [Default IRIS]") ServerGroup.add_argument( "-U", "--User-Auth", action="store", type=str, dest="UserAuth", default="", help="Enter your IRIS Authentification Username and Password " + "(--User-Auth='username:authpassword') to access and download " + "restricted data. [Default no user and password]") """ # Database Settings DataGroup = parser.add_argument_group( title="Local Data Settings", description="Settings associated with defining " + "and using a local data base of pre-downloaded day-long SAC files.") DataGroup.add_argument( "--local-data", action="store", type=str, dest="localdata", default=None, help="Specify a comma separated list of paths containing " + "day-long sac files of data already downloaded. " + "If data exists for a seismogram is already present on disk, " + "it is selected preferentially over downloading " + "the data using the Client interface") DataGroup.add_argument( "--no-data-zero", action="store_true", dest="ndval", default=False, help="Specify to force missing data to be set as zero, " + "rather than default behaviour which sets to nan.") """ # Constants Settings FreqGroup = parser.add_argument_group( title='Frequency Settings', description="Miscellaneous frequency settings") FreqGroup.add_argument("--sampling-rate", action="store", type=float, dest="new_sampling_rate", default=5., help="Specify new sampling rate (float, in Hz). " + "[Default 5.]") FreqGroup.add_argument( "--units", action="store", type=str, dest="units", default="DISP", help="Choose the output seismogram units. Options are: " + "'DISP', 'VEL', 'ACC'. [Default 'DISP']") FreqGroup.add_argument("--pre-filt", action="store", type=str, dest="pre_filt", default=None, help="Specify four comma-separated corner " + "frequencies (float, in Hz) for deconvolution " + "pre-filter. [Default 0.001,0.005,45.,50.]") # Event Selection Criteria EventGroup = parser.add_argument_group( title="Event Settings", description="Settings associated with refining " + "the events to include in matching station " + "pairs") EventGroup.add_argument("--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string " + "representing the start time for the event " + "search. This will override any station start " + "times. [Default start date of each station in " + "database]") EventGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string " + "representing the start time for the event " + "search. This will override any station end times " + "[Default end date of each station in database]") EventGroup.add_argument( "--reverse-order", "-R", action="store_true", dest="reverse", default=False, help="Reverse order of events. Default behaviour " + "starts at oldest event and works towards most " + "recent. Specify reverse order and instead the " + "program will start with the most recent events " + "and work towards older") EventGroup.add_argument("--min-mag", action="store", type=float, dest="minmag", default=5.5, help="Specify the minimum magnitude of event " + "for which to search. [Default 5.5]") EventGroup.add_argument("--max-mag", action="store", type=float, dest="maxmag", default=None, help="Specify the maximum magnitude of event " + "for which to search. " + "[Default None, i.e. no limit]") # Geometry Settings GeomGroup = parser.add_argument_group( title="Geometry Settings", description="Settings associatd with the " + "event-station geometries") GeomGroup.add_argument("--min-dist", action="store", type=float, dest="mindist", default=30., help="Specify the minimum great circle distance " + "(degrees) between the station and event. " + "[Default 30]") GeomGroup.add_argument("--max-dist", action="store", type=float, dest="maxdist", default=120., help="Specify the maximum great circle distance " + "(degrees) between the station and event. " + "[Default 120]") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # create channel list if len(args.channels) > 0: args.channels = args.channels.split(',') else: args.channels = ['H', 'P'] for cha in args.channels: if cha not in ['H', 'P']: parser.error("Error: Channel not recognized " + str(cha)) # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( "Error: Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( "Error: Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None # Parse User Authentification if not len(args.UserAuth) == 0: tt = args.UserAuth.split(':') if not len(tt) == 2: parser.error( "Error: Incorrect Username and Password Strings for User " + "Authentification") else: args.UserAuth = tt else: args.UserAuth = [] # # Parse Local Data directories # if args.localdata is not None: # args.localdata = args.localdata.split(',') # else: # args.localdata = [] # # Check NoData Value # if args.ndval: # args.ndval = 0.0 # else: # args.ndval = nan if args.pre_filt is None: args.pre_filt = [0.001, 0.005, 45., 50.] else: args.pre_filt = [float(val) for val in args.pre_filt.split(',')] args.pre_filt = sorted(args.pre_filt) if (len(args.pre_filt)) != 4: raise (Exception("Error: --pre-filt should contain 4 " + "comma-separated floats")) return args
def test_export_firmware_extended_fedora29(C): """ Check, whether the firmware file is exported correctly, and in correct size. Apparently, the auto-remounting side effect of the v0.46 change, is disturbing the export process. Unmounting the UV just before the export gives the device 20/20 success rate. Test case for issue https://github.com/Nitrokey/nitrokey-app/issues/399 """ skip_if_device_version_lower_than({'S': 43}) skip_if_not_fedora('Tested on Fedora only. To check on other distros.') from time import sleep import os from os.path import exists as exist import re try: import pyudev as pu import pexpect except: pytest.skip( 'Skipping due to missing required packages: pyudev and pexpect.') ctx = pu.Context() devices = ctx.list_devices(subsystem='block', ID_VENDOR='Nitrokey') device = None for d in devices: if d.device_type == 'partition': device = '/dev/{}'.format(d.sys_name) break assert device, 'Device could not be found' pexpect.run(f'udisksctl unmount -b {device}').decode() sleep(1) _res = pexpect.run(f'udisksctl mount -b {device}').decode() firmware_abs_path = re.findall('at (/.*)\.', _res) assert firmware_abs_path, 'Cannot get mount point' firmware_abs_path = firmware_abs_path[0] print('path: {}, device: {}'.format(firmware_abs_path, device)) assert firmware_abs_path, 'Cannot get mount point' firmware_abs_path = firmware_abs_path + '/firmware.bin' checks = 0 checks_add = 0 if exist(firmware_abs_path): os.remove(firmware_abs_path) assert not exist(firmware_abs_path) ATTEMPTS = 20 for i in range(ATTEMPTS): # if umount is disabled, success rate is 3/10, enabled: 10/10 pexpect.run(f'udisksctl unmount -b {device}') assert C.NK_export_firmware( DefaultPasswords.ADMIN) == DeviceErrorCode.STATUS_OK pexpect.run(f'udisksctl mount -b {device}') sleep(1) firmware_file_exist = exist(firmware_abs_path) if firmware_file_exist: checks += 1 getsize = os.path.getsize(firmware_abs_path) print('Firmware file exist, size: {}'.format(getsize)) checks_add += 1 if getsize >= 100 * 1024 else 0 # checks_add += 1 if os.path.getsize(firmware_abs_path) == 256*1024 else 0 os.remove(firmware_abs_path) assert not exist(firmware_abs_path) print('CHECK {} ; CHECK ADDITIONAL {}'.format(checks, checks_add)) assert checks == ATTEMPTS assert checks_add == checks
def test_export_firmware_extended_macos(C): """ Check, whether the firmware file is exported correctly, and in correct size. Apparently, the auto-remounting side effect of the v0.46 change, is disturbing the export process. Unmounting the UV just before the export gives the device 20/20 success rate. Test case for issue https://github.com/Nitrokey/nitrokey-app/issues/399 """ skip_if_device_version_lower_than({'S': 43}) skip_if_not_macos('macOS specific test, due to the mount path and command.') import pexpect from time import sleep import os from os.path import exists as exist import plistlib usb_devices = pexpect.run('system_profiler -xml SPUSBDataType') assert b'Nitrokey' in usb_devices, 'No Nitrokey devices connected' usb_devices_parsed = plistlib.loads(usb_devices) assert isinstance(usb_devices_parsed, list), 'usb_devices_parsed has unexpected type' # Try to get all USB devices try: devices = usb_devices_parsed[0]['_items'][0]['_items'] except KeyError: devices = None assert devices is not None, 'could not list USB devices' device_item = None for item in devices: if '_items' in item: # Fix for macOS 10.13.6, Python 3.6.2 item = item['_items'][0] if 'manufacturer' in item and item['manufacturer'] == 'Nitrokey': device_item = item # Try to get first volume of USB device try: volume = device_item['Media'][0]['volumes'][0] except (KeyError, TypeError): volume = None assert volume is not None, 'could not determine volume' assert 'bsd_name' in volume, 'could not get BSD style device name' device = '/dev/' + volume['bsd_name'] pexpect.run(f'diskutil mount {device}') sleep(3) assert 'mount_point' in volume, 'could not get mount point' firmware_abs_path = volume['mount_point'] + '/firmware.bin' checks = 0 print('path: {}, device: {}'.format(firmware_abs_path, device)) checks_add = 0 if exist(firmware_abs_path): os.remove(firmware_abs_path) assert not exist(firmware_abs_path) ATTEMPTS = 20 for i in range(ATTEMPTS): # if umount is disabled, success rate is 3/10, enabled: 10/10 pexpect.run(f'diskutil unmount {device}') assert C.NK_export_firmware(DefaultPasswords.ADMIN) == DeviceErrorCode.STATUS_OK pexpect.run(f'diskutil mount {device}') sleep(1) firmware_file_exist = exist(firmware_abs_path) if firmware_file_exist: checks += 1 getsize = os.path.getsize(firmware_abs_path) print('Firmware file exist, size: {}'.format(getsize)) checks_add += 1 if getsize >= 100 * 1024 else 0 # checks_add += 1 if os.path.getsize(firmware_abs_path) == 256*1024 else 0 os.remove(firmware_abs_path) assert not exist(firmware_abs_path) print('CHECK {} ; CHECK ADDITIONAL {}'.format(checks, checks_add)) assert checks == ATTEMPTS assert checks_add == checks
def test_export_firmware_extended_macos(C): """ Check, whether the firmware file is exported correctly, and in correct size. Apparently, the auto-remounting side effect of the v0.46 change, is disturbing the export process. Unmounting the UV just before the export gives the device 20/20 success rate. Test case for issue https://github.com/Nitrokey/nitrokey-app/issues/399 """ skip_if_device_version_lower_than({'S': 43}) skip_if_not_macos( 'macOS specific test, due to the mount path and command.') import pexpect from time import sleep import os from os.path import exists as exist import plistlib usb_devices = pexpect.run('system_profiler -xml SPUSBDataType') assert b'Nitrokey' in usb_devices, 'No Nitrokey devices connected' usb_devices_parsed = plistlib.loads(usb_devices) assert isinstance(usb_devices_parsed, list), 'usb_devices_parsed has unexpected type' # Try to get all USB devices try: devices = usb_devices_parsed[0]['_items'][0]['_items'] except KeyError: devices = None assert devices is not None, 'could not list USB devices' device_item = None for item in devices: if '_items' in item: # Fix for macOS 10.13.6, Python 3.6.2 item = item['_items'][0] if 'manufacturer' in item and item['manufacturer'] == 'Nitrokey': device_item = item # Try to get first volume of USB device try: volume = device_item['Media'][0]['volumes'][0] except (KeyError, TypeError): volume = None assert volume is not None, 'could not determine volume' assert 'bsd_name' in volume, 'could not get BSD style device name' device = '/dev/' + volume['bsd_name'] pexpect.run(f'diskutil mount {device}') sleep(3) assert 'mount_point' in volume, 'could not get mount point' firmware_abs_path = volume['mount_point'] + '/firmware.bin' checks = 0 print('path: {}, device: {}'.format(firmware_abs_path, device)) checks_add = 0 if exist(firmware_abs_path): os.remove(firmware_abs_path) assert not exist(firmware_abs_path) ATTEMPTS = 20 for i in range(ATTEMPTS): # if umount is disabled, success rate is 3/10, enabled: 10/10 pexpect.run(f'diskutil unmount {device}') assert C.NK_export_firmware( DefaultPasswords.ADMIN) == DeviceErrorCode.STATUS_OK pexpect.run(f'diskutil mount {device}') sleep(1) firmware_file_exist = exist(firmware_abs_path) if firmware_file_exist: checks += 1 getsize = os.path.getsize(firmware_abs_path) print('Firmware file exist, size: {}'.format(getsize)) checks_add += 1 if getsize >= 100 * 1024 else 0 # checks_add += 1 if os.path.getsize(firmware_abs_path) == 256*1024 else 0 os.remove(firmware_abs_path) assert not exist(firmware_abs_path) print('CHECK {} ; CHECK ADDITIONAL {}'.format(checks, checks_add)) assert checks == ATTEMPTS assert checks_add == checks
def get_harmonics_arguments(argv=None): """ Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection) """ parser = ArgumentParser( usage="%(prog)s [arguments] <station database>", description="Script used to process receiver function data " + "for harmonic decomposition.") # General Settings parser.add_argument("indb", help="Station Database to process from.", type=str) parser.add_argument( "--keys", action="store", type=str, dest="stkeys", default="", help="Specify a comma separated list of station keys for " + "which to perform the analysis. These must be " + "contained within the station database. Partial keys will " + "be used to match against those in the dictionary. For " + "instance, providing IU will match with all stations in " + "the IU network [Default processes all stations in the database]") parser.add_argument("-v", "-V", "--verbose", action="store_true", dest="verb", default=False, help="Specify to increase verbosity.") parser.add_argument("-O", "--overwrite", action="store_true", dest="ovr", default=False, help="Force the overwriting of pre-existing data. " + "[Default False]") parser.add_argument( "-L", "--long-name", action="store_true", dest="lkey", default=False, help="Force folder names to use long-key form (NET.STN.CHN). " + "Default behaviour uses short key form (NET.STN) for the folder " + "names, regardless of the key type of the database.") # Event Selection Criteria TimeGroup = parser.add_argument_group( title="Time Settings", description="Settings associated with refining " + "the times to include in searching for receiver function data") TimeGroup.add_argument( "--start", action="store", type=str, dest="startT", default="", help="Specify a UTCDateTime compatible string representing " + "the start time for the search. This will override any " + "station start times. [Default start date of station]") TimeGroup.add_argument( "--end", action="store", type=str, dest="endT", default="", help="Specify a UTCDateTime compatible string representing " + "the end time for the search. This will override any " + "station end times [Default end date of station]") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description="Options for pre-processing of receiver function " + "data prior to harmonic decomposition") PreGroup.add_argument( "--bp", action="store", type=str, dest="bp", default=None, help="Specify the corner frequencies for the bandpass filter. " + "[Default 0.05,0.5]") PreGroup.add_argument( "--bin", action="store", dest="nbin", type=int, default=None, help="Specify integer number of back-azimuth bins to consider " + "(typically 36 or 72). [Default does not bin data]") PreGroup.add_argument( "--snr", action="store", type=float, dest="snr", default=-9999., help="Specify the SNR threshold for extracting receiver functions. " + "[Default None]") PreGroup.add_argument( "--snrh", action="store", type=float, dest="snrh", default=-9999, help="Specify the horizontal component SNR threshold for " + "extracting receiver functions. [Default None]") PreGroup.add_argument( "--cc", action="store", type=float, dest="cc", default=-1., help="Specify the CC threshold for extracting receiver functions. " + "[Default None]") PreGroup.add_argument( "--no-outlier", action="store_true", dest="no_outl", default=False, help="Set this option to delete outliers based on the MAD " + "on the variance. [Default False]") PreGroup.add_argument( "--phase", action="store", type=str, dest="phase", default='allP', help="Specify the phase name to plot. " + "Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. " + "[Default 'allP']") HarmonicGroup = parser.add_argument_group( title='Settings for harmonic decomposition', description="Specify parameters for the decomposition, e.g. " + "a fixed azimuth, depth range for finding the optimal azimuth, etc.") HarmonicGroup.add_argument( "--azim", action="store", type=float, dest="azim", default=None, help="Specify the azimuth angle along with to perform the " + "decomposition. [Default 0.]") HarmonicGroup.add_argument( "--find-azim", action="store_true", dest="find_azim", default=False, help="Set this option to calculate the optimal azimuth. [Default " + "uses the '--azim' value]") HarmonicGroup.add_argument( "--trange", action="store", type=str, dest="trange", default=None, help="Specify a list of two floats with minimum and maximum" + "bounds on time range for finding the optimal azimuth (sec). " + "[Default [0., 10.] when '--find-azim' is set]") HarmonicGroup.add_argument( "--save", action="store_true", dest="save", default=False, help="Set this option to save the Harmonics object " + "to a pickled file. [Default does not save object]") PlotGroup = parser.add_argument_group( title='Settings for plotting results', description="Specify parameters for plotting the back-azimuth " + "harmonics.") PlotGroup.add_argument( "--plot", action="store_true", dest="plot", default=False, help="Set this option to produce a plot of the back-azimuth harmonics") PlotGroup.add_argument( "--ymax", action="store", type=float, dest="ymax", default=30., help="Specify the maximum y axis value for the plot in units of the" + "dependent variable (e.g., sec). [Default 30.]") PlotGroup.add_argument( "--scale", action="store", type=float, dest="scale", default=30., help="Specify the scaling value that multiplies the amplitude " + "of the harmonic components. [Default 10.]") PlotGroup.add_argument( "--save-plot", action="store_true", dest="save_plot", default=False, help="Set this option to save the plot [Default doesn't save]") PlotGroup.add_argument("--title", action="store", type=str, dest="title", default="", help="Specify plot title [Default has no title]") PlotGroup.add_argument( "--format", action="store", type=str, dest="form", default="png", help="Specify format of figure. Can be any one of the valid" + "matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error("Input file " + args.indb + " does not exist") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error("Cannot construct UTCDateTime from start time: " + args.startT) else: args.startT = None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error("Cannot construct UTCDateTime from end time: " + args.endT) else: args.endT = None if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( "Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.") if args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if args.bp is None: args.bp = [0.05, 0.5] else: args.bp = [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error("Error: --bp should contain 2 " + "comma-separated floats") if args.azim is not None and args.find_azim: print("Warning: Setting both '--azim' and '--find-azim' is " + "conflictual. Ignoring '--find-azim'") args.find_azim = False elif args.azim is None and not args.find_azim: args.azim = 0. if args.find_azim: if args.trange is None: args.trange = [0., 10.] else: args.trange = [float(val) for val in args.trange.split(',')] args.trange = sorted(args.trange) if (len(args.trange)) != 2: parser.error("Error: --trange should contain 2 " + "comma-separated floats") return args
def _transfer_config(self, config): self.train = {} self.test = {} root = config.get('data', 'root') list_dir = config.get('data', 'list_dir') dataset = config.get('data', 'dataset') if root is not None: if data_dir is not None: data_dir = osp.join(root, data_dir) if list_dir is not None: list_dir = osp.join(root, list_dir) if data_dir is None: raise RuntimeError('The path of dataset are required, \ you need to check the config file \ and add \'data_dir\'!') if not osp.exist(list_dir): raise RuntimeError('Invalid path for list_dir!') if not osp.exist(data_dir): raise RuntimeError('Invalid path for data_dir!') num_workers = config.get('data', 'num_workers') if num_workers is None: num_workers = 4 label_info = config.get('data', 'label_info') if label_info is None: label_info = ['fname', 'label'] self.train['data_dir'] = data_dir self.train['list_dir'] = list_dir self.train['num_workers'] = num_workers self.train['label_info'] = label_info self.test['data_dir'] = data_dir self.test['list_dir'] = list_dir self.test['num_workers'] = num_workers self.test['label_info'] = label_info self.train['augmentation'] = config.get('data', 'train_augmentation') self.test['augmentation'] = config.get('data', 'test_augmentation') if train_aug is None: self.train['augmentation'] = config.get('data', 'augmentation') if self.train['augmentation'] is None: self.train['augmentation'] = 'resize, totensor, normlize' if self.test_aug is None: self.test['augmentation'] = 'resize, totensor, normlize' self.norm_mean = config.get('data', 'norm_mean') self.norm_var = config.get('data', 'norm_var') self.re_mean = config.get('data', 're_mean') self.height = config.get('data', 'height') self.width = config.get('data', 'width') if self.norm_mean is None: self.norm_mean = [0.485, 0.456, 0.406] if self.norm_var is None: self.norm_var = [0.229, 0.224, 0.225] if self.re_mean is None: self.re_mean = [0.0, 0.0, 0.0] if self.height is None: self.height = 256 if self.width is None: self.width = 256 self.pin_memory = config.get('data', 'pin_memory') if self.pin_memory is None: self.train['pin_memory'] = True self.test['pin_memory'] = True self.train['shuffle'] = config.get('data', 'train_shuffle') self.test['shuffle'] = config.get('data', 'test_shuffle') if self.train['shuffle'] is None: self.train['shuffle'] = True if self.test['shuffle'] is None: self.test['shuffle'] = False self.train['drop_last'] = config.get('data', 'train_drop_last') self.test['drop_last'] = config.get('data', 'test_drop_last') if self.train['drop_last'] is None: self.train['drop_last'] = True if self.test['drop_last'] is None: self.test['drop_last'] = False self.train['sampler'] = None self.test['sampler'] = None self.train['transformer'] = None self.test['transformer'] = None
parser.add_argument("input_file", help=helptext) return parser.parse_args() if __name__ == "__main__": Parallel = True if Parallel: print("Your inversion will be run in parallel") # input_file = "/home/nienke/Documents/Research/SS_MTI/Input/TAYAK_BKE_tstar_update.toml" args = define_arguments() print(f"Inversion based on input file: {args.input_file}") event_input = toml.load(args.input_file, _dict=dict) save_folder = pjoin("/home/nienke/Data_2020/Test_2021/", args.input_file.split("/")[-1].strip(".toml")) if not exist(save_folder): makedirs(save_folder) # save_folder = "/home/nienke/Documents/Research/Data/MTI/Inversion/Result_2/Test/" path = "/home/nienke/Data_2020/catalog" # path = "/home/nienke/Documents/Research/Data/MTI/catalog" path_to_inventory = pjoin(path, "inventory.xml") path_to_catalog = pjoin(path, "catalog.xml") """ Read the inventory and catalog file (the once that contain info about the marsquakes) """ inv = None # SS_MTI.DataGetter.read_inv(inv_path=path_to_inventory) # Inventory file cat = SS_MTI.DataGetter.read_cat(cat_path=path_to_catalog) # Catalog file """ Get the data into a list of obspy.Event objects """ events = SS_MTI.DataGetter.read_events_from_cat( event_params=event_input, cat=cat,