Exemple #1
0
def get_picks_from_pdf(feature, phase, pick_set, height=0.5, distance=100):
    i = feature.phase.index(phase)
    peaks, properties = find_peaks(feature.pdf[-1, :, i],
                                   height=height,
                                   distance=distance)

    for p in peaks:
        if p:
            pick_time = UTCDateTime(feature.starttime) + p * feature.delta
            feature.pick_time.append(pick_time.isoformat())
            feature.pick_phase.append(feature.phase[i])
            feature.pick_set.append(pick_set)
Exemple #2
0
def get_comments(paz, net, sta, cha):
    sens = cha.response.instrument_sensitivity
    input_unit = sens.input_units.upper()
    out = []
    now = UTCDateTime()
    out.append("* " + "*" * 50)
    out.append(f"* NETWORK     : {net.code}")
    out.append("* STATION     : %s" % sta.code)
    out.append("* LOCATION    : %s" % cha.location_code)
    out.append("* CHANNEL     : %s" % cha.code)
    out.append("* CREATED     : %s" % now.isoformat())
    out.append("* START       : %s" % cha.start_date.isoformat())
    out.append("* END         : %s" % cha.end_date.isoformat())
    out.append("* DESCRIPTION : %s" % sta.site.name)
    out.append("* LATITUDE    : %s" % (cha.latitude or sta.latitude))
    out.append("* LONGITUDE   : %s" % (cha.longitude or sta.longitude))
    out.append("* ELEVATION   : %s" % (cha.elevation or sta.elevation))
    out.append("* DEPTH       : %s" % cha.depth)
    # DIP in SACPZ served by IRIS SACPZ web service is
    # systematically different from the StationXML entries.
    # It is defined as an incidence angle (like done in SAC for
    # sensor orientation), rather than an actual dip.
    # Add '(SEED)' to clarify that we adhere to SEED convention
    out.append("* DIP (SEED)  : %s" % cha.dip)
    out.append("* AZIMUTH     : %s" % cha.azimuth)
    out.append("* SAMPLE RATE : %s" % cha.sample_rate)
    if input_unit in ['PA', 'PASCALS']:
        out.append(f"* INPUT UNIT  : {input_unit}")
    else:
        out.append("* INPUT UNIT  : M")
    out.append("* OUTPUT UNIT : %s" % sens.output_units)
    out.append("* INSTTYPE    : %s" % paz.description)
    #  out.append("* INSTTYPE    : %s" % cha.sensor.type)
    out.append(f"* INSTGAIN    : {paz.stage_gain:.6e} ({sens.input_units})")
    out.append(f"* SENSITIVITY : {sens.value:.6e} ({sens.input_units})")
    out.append("* A0          : {:.6e} ({})".format(paz.normalization_factor,
                                                    sens.input_units))
    out.append("* " + "*" * 50)
    return out
Exemple #3
0
def main(argv=None):
    parser = ArgumentParser(prog='obspy-scan',
                            description=__doc__.strip(),
                            formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument('-V',
                        '--version',
                        action='version',
                        version='%(prog)s ' + __version__)
    parser.add_argument('-f',
                        '--format',
                        choices=ENTRY_POINTS['waveform'],
                        help='Optional, the file format.\n' +
                        ' '.join(__doc__.split('\n')[-4:]))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Optional. Verbose output.')
    parser.add_argument('-n',
                        '--non-recursive',
                        action='store_false',
                        dest='recursive',
                        help='Optional. Do not descend into directories.')
    parser.add_argument('-i',
                        '--ignore-links',
                        action='store_true',
                        help='Optional. Do not follow symbolic links.')
    parser.add_argument('--start-time',
                        default=None,
                        type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                        'Only visualize data after this time and set ' +
                        'time-axis axis accordingly.')
    parser.add_argument('--end-time',
                        default=None,
                        type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                        'Only visualize data before this time and set ' +
                        'time-axis axis accordingly.')
    parser.add_argument('--id',
                        action='append',
                        help='Optional, a SEED channel identifier '
                        "(e.g. 'GR.FUR..HHZ'). You may provide this " +
                        'option multiple times. Only these ' +
                        'channels will be plotted.')
    parser.add_argument('-t',
                        '--event-time',
                        default=None,
                        type=UTCDateTime,
                        action='append',
                        help='Optional, a UTCDateTime compatible string ' +
                        "(e.g. '2010-01-01T12:00:00'). You may provide " +
                        'this option multiple times. These times get ' +
                        'marked by vertical lines in the plot. ' +
                        'Useful e.g. to mark event origin times.')
    parser.add_argument('-w',
                        '--write',
                        default=None,
                        help='Optional, npz file for writing data '
                        'after scanning waveform files')
    parser.add_argument('-l',
                        '--load',
                        default=None,
                        help='Optional, npz file for loading data '
                        'before scanning waveform files')
    parser.add_argument('--no-x',
                        action='store_true',
                        help='Optional, Do not plot crosses.')
    parser.add_argument('--no-gaps',
                        action='store_true',
                        help='Optional, Do not plot gaps.')
    parser.add_argument('-o',
                        '--output',
                        default=None,
                        help='Save plot to image file (e.g. out.pdf, ' +
                        'out.png) instead of opening a window.')
    parser.add_argument('--print-gaps',
                        action='store_true',
                        help='Optional, prints a list of gaps at the end.')
    parser.add_argument('paths',
                        nargs='*',
                        help='Files or directories to scan.')

    # Deprecated arguments
    action = _DeprecatedArgumentAction('--endtime', '--end-time')
    parser.add_argument('--endtime',
                        type=UTCDateTime,
                        action=action,
                        help=SUPPRESS)

    action = _DeprecatedArgumentAction('--event-times', '--event-time')
    parser.add_argument('--event-times', action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--ids', '--id')
    parser.add_argument('--ids', action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--nox',
                                       '--no-x',
                                       real_action='store_true')
    parser.add_argument('--nox',
                        dest='no_x',
                        nargs=0,
                        action=action,
                        help=SUPPRESS)

    action = _DeprecatedArgumentAction('--nogaps',
                                       '--no-gaps',
                                       real_action='store_true')
    parser.add_argument('--nogaps',
                        dest='no_gaps',
                        nargs=0,
                        action=action,
                        help=SUPPRESS)

    action = _DeprecatedArgumentAction('--starttime', '--start-time')
    parser.add_argument('--starttime',
                        type=UTCDateTime,
                        action=action,
                        help=SUPPRESS)

    args = parser.parse_args(argv)

    # Print help and exit if no arguments are given
    if len(args.paths) == 0 and args.load is None:
        parser.error('No paths specified.')

    # Use recursively parsing function?
    if args.recursive:
        parse_func = recursive_parse
    else:
        parse_func = parse_file_to_dict

    if args.output is not None:
        import matplotlib
        matplotlib.use("agg")
    global date2num
    from matplotlib.dates import date2num, num2date
    from matplotlib.patches import Rectangle
    from matplotlib.collections import PatchCollection
    import matplotlib.pyplot as plt

    fig = plt.figure()
    ax = fig.add_subplot(111)

    # Plot vertical lines if option 'event_times' was specified
    if args.event_time:
        times = map(date2num, args.event_time)
        for time in times:
            ax.axvline(time, color='k')
    # Deprecated version (don't plot twice)
    if args.event_times and not args.event_time:
        times = args.event_times.split(',')
        times = map(UTCDateTime, times)
        times = map(date2num, times)
        for time in times:
            ax.axvline(time, color='k')

    if args.start_time:
        args.start_time = date2num(args.start_time)
    elif args.starttime:
        # Deprecated version
        args.start_time = date2num(args.starttime)
    if args.end_time:
        args.end_time = date2num(args.end_time)
    elif args.endtime:
        # Deprecated version
        args.end_time = date2num(args.endtime)

    # Generate dictionary containing nested lists of start and end times per
    # station
    data = {}
    samp_int = {}
    counter = 1
    if args.load:
        load_npz(args.load, data, samp_int)
    for path in args.paths:
        counter = parse_func(data, samp_int, path, counter, args.format,
                             args.verbose, args.ignore_links)
    if not data:
        print("No waveform data found.")
        return
    if args.write:
        write_npz(args.write, data, samp_int)

    # Loop through this dictionary
    ids = list(data.keys())
    # Handle deprecated argument
    if args.ids and not args.id:
        args.id = args.ids.split(',')
    # restrict plotting of results to given ids
    if args.id:
        ids = [x for x in ids if x in args.id]
    ids = sorted(ids)[::-1]
    labels = [""] * len(ids)
    print('\n')
    for _i, _id in enumerate(ids):
        labels[_i] = ids[_i]
        data[_id].sort()
        startend = np.array(data[_id])
        if len(startend) == 0:
            continue
        # restrict plotting of results to given start/endtime
        if args.start_time:
            startend = startend[startend[:, 1] > args.start_time]
        if len(startend) == 0:
            continue
        if args.start_time:
            startend = startend[startend[:, 0] < args.end_time]
        if len(startend) == 0:
            continue
        timerange = startend[:, 1].max() - startend[:, 0].min()
        if timerange == 0.0:
            warnings.warn('Zero sample long data for _id=%s, skipping' % _id)
            continue

        startend_compressed = compressStartend(startend, 1000)

        offset = np.ones(len(startend)) * _i  # generate list of y values
        ax.xaxis_date()
        if not args.no_x:
            ax.plot_date(startend[:, 0], offset, 'x', linewidth=2)
        ax.hlines(offset[:len(startend_compressed)],
                  startend_compressed[:, 0],
                  startend_compressed[:, 1],
                  'b',
                  linewidth=2,
                  zorder=3)
        # find the gaps
        diffs = startend[1:, 0] - startend[:-1, 1]  # currend.start - last.end
        gapsum = diffs[diffs > 0].sum()
        perc = (timerange - gapsum) / timerange
        labels[_i] = labels[_i] + "\n%.1f%%" % (perc * 100)
        gap_indices = diffs > 1.8 * np.array(samp_int[_id][:-1])
        gap_indices = np.concatenate((gap_indices, [False]))
        if any(gap_indices):
            # dont handle last endtime as start of gap
            gaps_start = startend[gap_indices, 1]
            gaps_end = startend[np.roll(gap_indices, 1), 0]
            if not args.no_gaps and any(gap_indices):
                rects = [
                    Rectangle((start_, offset[0] - 0.4), end_ - start_, 0.8)
                    for start_, end_ in zip(gaps_start, gaps_end)
                ]
                ax.add_collection(PatchCollection(rects, color="r"))
            if args.print_gaps:
                for start_, end_ in zip(gaps_start, gaps_end):
                    start_, end_ = num2date((start_, end_))
                    start_ = UTCDateTime(start_.isoformat())
                    end_ = UTCDateTime(end_.isoformat())
                    print("%s %s %s %.3f" % (_id, start_, end_, end_ - start_))

    # Pretty format the plot
    ax.set_ylim(0 - 0.5, _i + 0.5)
    ax.set_yticks(np.arange(_i + 1))
    ax.set_yticklabels(labels, family="monospace", ha="right")
    # set x-axis limits according to given start/endtime
    if args.start_time:
        ax.set_xlim(left=args.start_time, auto=None)
    if args.end_time:
        ax.set_xlim(right=args.end_time, auto=None)
    fig.autofmt_xdate()  # rotate date
    plt.subplots_adjust(left=0.2)
    if args.output is None:
        plt.show()
    else:
        fig.set_dpi(72)
        height = len(ids) * 0.5
        height = max(4, height)
        fig.set_figheight(height)
        # tight_layout() only available from matplotlib >= 1.1
        try:
            plt.tight_layout()
            days = ax.get_xlim()
            days = days[1] - days[0]
            width = max(6, days / 30.)
            width = min(width, height * 4)
            fig.set_figwidth(width)
            plt.subplots_adjust(top=1, bottom=0, left=0, right=1)
            plt.tight_layout()
        except:
            pass
        fig.savefig(args.output)
    sys.stdout.write('\n')
Exemple #4
0
    def analyze_parsed_data(self,
                            print_gaps=False,
                            starttime=None,
                            endtime=None,
                            seed_ids=None):
        """
        Prepare information for plotting.

        Information is stored in a dictionary as ``scanner._info``, only
        containing these data matching the given parameters.

        :type print_gaps: bool
        :param print_gaps: Whether to print information on all encountered gaps
            and overlaps.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Whether to use a fixed start time for the plot and
            data percentage calculation.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: Whether to use a fixed end time for the plot and
            data percentage calculation.
        :type seed_ids: list of str
        :param endtime: Whether to consider only a specific set of SEED IDs
            (e.g. ``seed_ids=["GR.FUR..BHZ", "GR.WET..BHZ"]``) or just all SEED
            IDs encountered in data (if left ``None``).
        """
        data = self.data
        samp_int = self.samp_int
        if starttime is not None:
            starttime = starttime.matplotlib_date
        if endtime is not None:
            endtime = endtime.matplotlib_date
        # either use ids specified by user or use ids based on what data we
        # have parsed
        ids = seed_ids or list(data.keys())
        ids = sorted(ids)[::-1]
        if self.verbose:
            print('\n')
        self._info = {}
        for _i, _id in enumerate(ids):
            info = {
                "gaps": [],
                "overlaps": [],
                "data_starts": [],
                "data_startends_compressed": [],
                "percentage": None
            }
            self._info[_id] = info
            gap_info = info["gaps"]
            overlap_info = info["overlaps"]
            # sort data list and sampling rate list
            if _id in data:
                startend = np.array(data[_id])
                _samp_int = np.array(samp_int[_id])
                indices = np.lexsort((startend[:, 1], startend[:, 0]))
                startend = startend[indices]
                _samp_int = _samp_int[indices]
            else:
                startend = np.array([])
                _samp_int = np.array([])
            if len(startend) == 0:
                if not (starttime and endtime):
                    continue
                gap_info.append((starttime, endtime))
                if print_gaps:
                    print("%s %s %s %.3f" %
                          (_id, starttime, endtime, endtime - starttime))
                continue
            # restrict plotting of results to given start/end time
            if starttime or endtime:
                indices = np.ones(len(startend), dtype=np.bool_)
                if starttime:
                    indices &= startend[:, 1] > starttime
                if endtime:
                    indices &= startend[:, 0] < endtime
                startend = startend[indices]
                _samp_int = _samp_int[indices]
            if len(startend) == 0:
                # if both start and endtime are given, add it to gap info
                if starttime and endtime:
                    gap_info.append((starttime, endtime))
                continue
            data_start = startend[:, 0].min()
            data_end = startend[:, 1].max()
            timerange_start = starttime or data_start
            timerange_end = endtime or data_end
            timerange = timerange_end - timerange_start
            if timerange == 0.0:
                msg = 'Zero sample long data for _id=%s, skipping' % _id
                warnings.warn(msg)
                continue

            startend_compressed = compress_start_end(startend.copy(),
                                                     1000,
                                                     merge_overlaps=True)

            info["data_starts"] = startend[:, 0]
            info["data_startends_compressed"] = startend_compressed

            # find the gaps
            # currend.start - last.end
            diffs = startend[1:, 0] - startend[:-1, 1]
            gapsum = diffs[diffs > 0].sum()
            # if start- and/or endtime is specified, add missing data at
            # start/end to gap sum
            has_gap = False
            gap_at_start = (starttime and data_start > starttime
                            and data_start - starttime)
            gap_at_end = (endtime and endtime > data_end
                          and endtime - data_end)
            if gap_at_start:
                gapsum += gap_at_start
                has_gap = True
            if gap_at_end:
                gapsum += gap_at_end
                has_gap = True
            info["percentage"] = (timerange - gapsum) / timerange * 100
            # define a gap as over 0.8 delta after expected sample time
            gap_indices = diffs > 0.8 * _samp_int[:-1]
            gap_indices = np.append(gap_indices, False)
            # define an overlap as over 0.8 delta before expected sample time
            overlap_indices = diffs < -0.8 * _samp_int[:-1]
            overlap_indices = np.append(overlap_indices, False)
            has_gap |= any(gap_indices)
            has_gap |= any(overlap_indices)
            if has_gap:
                # don't handle last end time as start of gap
                gaps_start = startend[gap_indices, 1]
                gaps_end = startend[np.roll(gap_indices, 1), 0]
                overlaps_end = startend[overlap_indices, 1]
                overlaps_start = startend[np.roll(overlap_indices, 1), 0]
                # but now, manually add start/end for gaps at start/end of user
                # specified start/end times
                if gap_at_start:
                    gaps_start = np.append(gaps_start, starttime)
                    gaps_end = np.append(gaps_end, data_start)
                if gap_at_end:
                    gaps_start = np.append(gaps_start, data_end)
                    gaps_end = np.append(gaps_end, endtime)

                _starts = np.concatenate((gaps_start, overlaps_end))
                _ends = np.concatenate((gaps_end, overlaps_start))
                sort_order = np.argsort(_starts)
                _starts = _starts[sort_order]
                _ends = _ends[sort_order]
                for start_, end_ in zip(_starts, _ends):
                    if print_gaps:
                        start__, end__ = num2date((start_, end_))
                        start__ = UTCDateTime(start__.isoformat())
                        end__ = UTCDateTime(end__.isoformat())
                        print("{} {} {} {:.3f}".format(_id, start__, end__,
                                                       end__ - start__))
                    if start_ < end_:
                        gap_info.append((start_, end_))
                    else:
                        overlap_info.append((start_, end_))
Exemple #5
0
def main(option_list=None):
    parser = OptionParser(__doc__.strip())
    parser.add_option("-f",
                      "--format",
                      default=None,
                      type="string",
                      dest="format",
                      help="Optional, the file format.\n" +
                      " ".join(__doc__.split('\n')[-4:]))
    parser.add_option("-v",
                      "--verbose",
                      default=False,
                      action="store_true",
                      dest="verbose",
                      help="Optional. Verbose output.")
    parser.add_option("-n",
                      "--non-recursive",
                      default=True,
                      action="store_false",
                      dest="recursive",
                      help="Optional. Do not descend into directories.")
    parser.add_option("-i",
                      "--ignore-links",
                      default=False,
                      action="store_true",
                      dest="ignore_links",
                      help="Optional. Do not follow symbolic links.")
    parser.add_option("--starttime",
                      default=None,
                      type="string",
                      dest="starttime",
                      help="Optional, a UTCDateTime compatible string. " +
                      "Only visualize data after this time and set " +
                      "time-axis axis accordingly.")
    parser.add_option("--endtime",
                      default=None,
                      type="string",
                      dest="endtime",
                      help="Optional, a UTCDateTime compatible string. " +
                      "Only visualize data after this time and set " +
                      "time-axis axis accordingly.")
    parser.add_option("--ids",
                      default=None,
                      type="string",
                      dest="ids",
                      help="Optional, a list of SEED channel identifiers " +
                      "separated by commas " +
                      "(e.g. 'GR.FUR..HHZ,BW.MANZ..EHN'. Only these " +
                      "channels will not be plotted.")
    parser.add_option("-t",
                      "--event-times",
                      default=None,
                      type="string",
                      dest="event_times",
                      help="Optional, a list of UTCDateTime compatible " +
                      "strings separated by commas " +
                      "(e.g. '2010-01-01T12:00:00,2010-01-01T13:00:00'). " +
                      "These get marked by vertical lines in the plot. " +
                      "Useful e.g. to mark event origin times.")
    parser.add_option("-w",
                      "--write",
                      default=None,
                      type="string",
                      dest="write",
                      help="Optional, npz file for writing data "
                      "after scanning waveform files")
    parser.add_option("-l",
                      "--load",
                      default=None,
                      type="string",
                      dest="load",
                      help="Optional, npz file for loading data "
                      "before scanning waveform files")
    parser.add_option("--nox",
                      default=False,
                      action="store_true",
                      dest="nox",
                      help="Optional, Do not plot crosses.")
    parser.add_option("--nogaps",
                      default=False,
                      action="store_true",
                      dest="nogaps",
                      help="Optional, Do not plot gaps.")
    parser.add_option("-o",
                      "--output",
                      default=None,
                      type="string",
                      dest="output",
                      help="Save plot to image file (e.g. out.pdf, " +
                      "out.png) instead of opening a window.")
    parser.add_option("--print-gaps",
                      default=False,
                      action="store_true",
                      dest="print_gaps",
                      help="Optional, prints a list of gaps at the end.")
    (options, largs) = parser.parse_args(option_list)

    # Print help and exit if no arguments are given
    if len(largs) == 0 and options.load is None:
        parser.print_help()
        sys.exit(1)

    # Use recursively parsing function?
    if options.recursive:
        parse_func = recursive_parse
    else:
        parse_func = parse_file_to_dict

    if options.output is not None:
        import matplotlib
        matplotlib.use("agg")
    global date2num
    from matplotlib.dates import date2num, num2date
    from matplotlib.patches import Rectangle
    from matplotlib.collections import PatchCollection
    import matplotlib.pyplot as plt

    fig = plt.figure()
    ax = fig.add_subplot(111)

    # Plot vertical lines if option 'event_times' was specified
    if options.event_times:
        times = options.event_times.split(',')
        times = map(UTCDateTime, times)
        times = map(date2num, times)
        for time in times:
            ax.axvline(time, color='k')

    if options.starttime:
        options.starttime = UTCDateTime(options.starttime)
        options.starttime = date2num(options.starttime)
    if options.endtime:
        options.endtime = UTCDateTime(options.endtime)
        options.endtime = date2num(options.endtime)

    # Generate dictionary containing nested lists of start and end times per
    # station
    data = {}
    samp_int = {}
    counter = 1
    if options.load:
        load_npz(options.load, data, samp_int)
    for path in largs:
        counter = parse_func(data, samp_int, path, counter, options.format,
                             options.verbose, options.ignore_links)
    if not data:
        print("No waveform data found.")
        return
    if options.write:
        write_npz(options.write, data, samp_int)

    # Loop through this dictionary
    ids = data.keys()
    # restrict plotting of results to given ids
    if options.ids:
        options.ids = options.ids.split(',')
        ids = filter(lambda x: x in options.ids, ids)
    ids = sorted(ids)[::-1]
    labels = [""] * len(ids)
    print
    for _i, _id in enumerate(ids):
        labels[_i] = ids[_i]
        data[_id].sort()
        startend = np.array(data[_id])
        if len(startend) == 0:
            continue
        # restrict plotting of results to given start/endtime
        if options.starttime:
            startend = startend[startend[:, 1] > options.starttime]
        if len(startend) == 0:
            continue
        if options.starttime:
            startend = startend[startend[:, 0] < options.endtime]
        if len(startend) == 0:
            continue
        timerange = startend[:, 1].max() - startend[:, 0].min()
        if timerange == 0.0:
            warnings.warn('Zero sample long data for _id=%s, skipping' % _id)
            continue

        startend_compressed = compressStartend(startend, 1000)

        offset = np.ones(len(startend)) * _i  # generate list of y values
        ax.xaxis_date()
        if not options.nox:
            ax.plot_date(startend[:, 0], offset, 'x', linewidth=2)
        ax.hlines(offset[:len(startend_compressed)],
                  startend_compressed[:, 0],
                  startend_compressed[:, 1],
                  'b',
                  linewidth=2,
                  zorder=3)
        # find the gaps
        diffs = startend[1:, 0] - startend[:-1, 1]  # currend.start - last.end
        gapsum = diffs[diffs > 0].sum()
        perc = (timerange - gapsum) / timerange
        labels[_i] = labels[_i] + "\n%.1f%%" % (perc * 100)
        gap_indices = diffs > 1.8 * np.array(samp_int[_id][:-1])
        gap_indices = np.concatenate((gap_indices, [False]))
        if any(gap_indices):
            # dont handle last endtime as start of gap
            gaps_start = startend[gap_indices, 1]
            gaps_end = startend[np.roll(gap_indices, 1), 0]
            if not options.nogaps and any(gap_indices):
                rects = [
                    Rectangle((start_, offset[0] - 0.4), end_ - start_, 0.8)
                    for start_, end_ in zip(gaps_start, gaps_end)
                ]
                ax.add_collection(PatchCollection(rects, color="r"))
            if options.print_gaps:
                for start_, end_ in zip(gaps_start, gaps_end):
                    start_, end_ = num2date((start_, end_))
                    start_ = UTCDateTime(start_.isoformat())
                    end_ = UTCDateTime(end_.isoformat())
                    print "%s %s %s %.3f" % (_id, start_, end_, end_ - start_)

    # Pretty format the plot
    ax.set_ylim(0 - 0.5, _i + 0.5)
    ax.set_yticks(np.arange(_i + 1))
    ax.set_yticklabels(labels, family="monospace", ha="right")
    # set x-axis limits according to given start/endtime
    if options.starttime:
        ax.set_xlim(left=options.starttime, auto=None)
    if options.endtime:
        ax.set_xlim(right=options.endtime, auto=None)
    fig.autofmt_xdate()  # rotate date
    plt.subplots_adjust(left=0.2)
    if options.output is None:
        plt.show()
    else:
        fig.set_dpi(72)
        height = len(ids) * 0.5
        height = max(4, height)
        fig.set_figheight(height)
        # tight_layout() only available from matplotlib >= 1.1
        try:
            plt.tight_layout()
            days = ax.get_xlim()
            days = days[1] - days[0]
            width = max(6, days / 30.)
            width = min(width, height * 4)
            fig.set_figwidth(width)
            plt.subplots_adjust(top=1, bottom=0, left=0, right=1)
            plt.tight_layout()
        except:
            pass
        fig.savefig(options.output)
    sys.stdout.write('\n')
Exemple #6
0
def main(argv=None):
    parser = ArgumentParser(prog='obspy-scan',
                            description=__doc__.strip(),
                            formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument('-V',
                        '--version',
                        action='version',
                        version='%(prog)s ' + __version__)
    parser.add_argument('-f',
                        '--format',
                        choices=ENTRY_POINTS['waveform'],
                        help='Optional, the file format.\n' +
                        ' '.join(__doc__.split('\n')[-4:]))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Optional. Verbose output.')
    parser.add_argument('-q',
                        '--quiet',
                        action='store_true',
                        help='Optional. Be quiet. Overwritten by --verbose '
                        'flag.')
    parser.add_argument('-n',
                        '--non-recursive',
                        action='store_false',
                        dest='recursive',
                        help='Optional. Do not descend into directories.')
    parser.add_argument('-i',
                        '--ignore-links',
                        action='store_true',
                        help='Optional. Do not follow symbolic links.')
    parser.add_argument('--start-time',
                        default=None,
                        type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                        'Only visualize data after this time and set ' +
                        'time-axis axis accordingly.')
    parser.add_argument('--end-time',
                        default=None,
                        type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                        'Only visualize data before this time and set ' +
                        'time-axis axis accordingly.')
    parser.add_argument('--id',
                        action='append',
                        help='Optional, a SEED channel identifier '
                        "(e.g. 'GR.FUR..HHZ'). You may provide this " +
                        'option multiple times. Only these ' +
                        'channels will be plotted.')
    parser.add_argument('-t',
                        '--event-time',
                        default=None,
                        type=UTCDateTime,
                        action='append',
                        help='Optional, a UTCDateTime compatible string ' +
                        "(e.g. '2010-01-01T12:00:00'). You may provide " +
                        'this option multiple times. These times get ' +
                        'marked by vertical lines in the plot. ' +
                        'Useful e.g. to mark event origin times.')
    parser.add_argument('-w',
                        '--write',
                        default=None,
                        help='Optional, npz file for writing data '
                        'after scanning waveform files')
    parser.add_argument('-l',
                        '--load',
                        default=None,
                        help='Optional, npz file for loading data '
                        'before scanning waveform files')
    parser.add_argument('--no-x',
                        action='store_true',
                        help='Optional, Do not plot crosses.')
    parser.add_argument('--no-gaps',
                        action='store_true',
                        help='Optional, Do not plot gaps.')
    parser.add_argument('-o',
                        '--output',
                        default=None,
                        help='Save plot to image file (e.g. out.pdf, ' +
                        'out.png) instead of opening a window.')
    parser.add_argument('--print-gaps',
                        action='store_true',
                        help='Optional, prints a list of gaps at the end.')
    parser.add_argument('paths',
                        nargs='*',
                        help='Files or directories to scan.')

    args = parser.parse_args(argv)

    if args.output is not None:
        MatplotlibBackend.switch_backend("AGG", sloppy=False)

    # Print help and exit if no arguments are given
    if len(args.paths) == 0 and args.load is None:
        parser.error('No paths specified.')

    # Use recursively parsing function?
    if args.recursive:
        parse_func = recursive_parse
    else:
        parse_func = parse_file_to_dict

    from matplotlib.dates import date2num, num2date
    from matplotlib.ticker import FuncFormatter
    from matplotlib.patches import Rectangle
    from matplotlib.collections import PatchCollection
    import matplotlib.pyplot as plt

    fig = plt.figure()
    ax = fig.add_subplot(111)

    # Plot vertical lines if option 'event_time' was specified
    if args.event_time:
        times = [date2num(t.datetime) for t in args.event_time]
        for time in times:
            ax.axvline(time, color='k')

    if args.start_time:
        args.start_time = date2num(args.start_time.datetime)
    if args.end_time:
        args.end_time = date2num(args.end_time.datetime)

    # Generate dictionary containing nested lists of start and end times per
    # station
    data = {}
    samp_int = {}
    counter = 1
    if args.load:
        load_npz(args.load, data, samp_int)
    for path in args.paths:
        counter = parse_func(data,
                             samp_int,
                             path,
                             counter,
                             args.format,
                             verbose=args.verbose,
                             quiet=args.quiet,
                             ignore_links=args.ignore_links)
    if not data:
        if args.verbose or not args.quiet:
            print("No waveform data found.")
        return
    if args.write:
        write_npz(args.write, data, samp_int)

    # either use ids specified by user or use ids based on what data we have
    # parsed
    ids = args.id or list(data.keys())
    ids = sorted(ids)[::-1]
    labels = [""] * len(ids)
    if args.verbose or not args.quiet:
        print('\n')
    for _i, _id in enumerate(ids):
        labels[_i] = ids[_i]
        # sort data list and sampling rate list
        if _id in data:
            startend = np.array(data[_id])
            _samp_int = np.array(samp_int[_id])
            indices = np.lexsort((startend[:, 1], startend[:, 0]))
            startend = startend[indices]
            _samp_int = _samp_int[indices]
        else:
            startend = np.array([])
            _samp_int = np.array([])
        if len(startend) == 0:
            if not (args.start_time and args.end_time):
                continue
            if not args.no_gaps:
                rects = [
                    Rectangle((args.start_time, _i - 0.4),
                              args.end_time - args.start_time, 0.8)
                ]
                ax.add_collection(PatchCollection(rects, color="r"))
            if args.print_gaps and (args.verbose or not args.quiet):
                print("%s %s %s %.3f" % (_id, args.start_time, args.end_time,
                                         args.end_time - args.start_time))
            continue
        # restrict plotting of results to given start/end time
        if args.start_time:
            indices = startend[:, 1] > args.start_time
            startend = startend[indices]
            _samp_int = _samp_int[indices]
        if len(startend) == 0:
            continue
        if args.end_time:
            indices = startend[:, 0] < args.end_time
            startend = startend[indices]
            _samp_int = _samp_int[indices]
        if len(startend) == 0:
            continue
        data_start = startend[:, 0].min()
        data_end = startend[:, 1].max()
        timerange_start = args.start_time or data_start
        timerange_end = args.end_time or data_end
        timerange = timerange_end - timerange_start
        if timerange == 0.0:
            warnings.warn('Zero sample long data for _id=%s, skipping' % _id)
            continue

        startend_compressed = compress_start_end(startend, 1000)

        offset = np.ones(len(startend)) * _i  # generate list of y values
        if not args.no_x:
            ax.plot(startend[:, 0], offset, 'x', linewidth=2)
        ax.hlines(offset[:len(startend_compressed)],
                  startend_compressed[:, 0],
                  startend_compressed[:, 1],
                  'b',
                  linewidth=2,
                  zorder=3)
        # find the gaps
        diffs = startend[1:, 0] - startend[:-1, 1]  # currend.start - last.end
        gapsum = diffs[diffs > 0].sum()
        # if start- and/or endtime is specified, add missing data at start/end
        # to gap sum
        has_gap = False
        gap_at_start = (args.start_time and data_start > args.start_time
                        and data_start - args.start_time)
        gap_at_end = (args.end_time and args.end_time > data_end
                      and args.end_time - data_end)
        if args.start_time and gap_at_start:
            gapsum += gap_at_start
            has_gap = True
        if args.end_time and gap_at_end:
            gapsum += gap_at_end
            has_gap = True
        perc = (timerange - gapsum) / timerange
        labels[_i] = labels[_i] + "\n%.1f%%" % (perc * 100)
        gap_indices = diffs > 1.8 * _samp_int[:-1]
        gap_indices = np.append(gap_indices, False)
        has_gap |= any(gap_indices)
        if has_gap:
            # don't handle last end time as start of gap
            gaps_start = startend[gap_indices, 1]
            gaps_end = startend[np.roll(gap_indices, 1), 0]
            if args.start_time and gap_at_start:
                gaps_start = np.append(gaps_start, args.start_time)
                gaps_end = np.append(gaps_end, data_start)
            if args.end_time and gap_at_end:
                gaps_start = np.append(gaps_start, data_end)
                gaps_end = np.append(gaps_end, args.end_time)
            if not args.no_gaps:
                rects = [
                    Rectangle((start_, offset[0] - 0.4), end_ - start_, 0.8)
                    for start_, end_ in zip(gaps_start, gaps_end)
                ]
                ax.add_collection(PatchCollection(rects, color="r"))
            if args.print_gaps:
                for start_, end_ in zip(gaps_start, gaps_end):
                    start_, end_ = num2date((start_, end_))
                    start_ = UTCDateTime(start_.isoformat())
                    end_ = UTCDateTime(end_.isoformat())
                    if args.verbose or not args.quiet:
                        print("%s %s %s %.3f" %
                              (_id, start_, end_, end_ - start_))

    # Pretty format the plot
    ax.set_ylim(0 - 0.5, len(ids) - 0.5)
    ax.set_yticks(np.arange(len(ids)))
    ax.set_yticklabels(labels, family="monospace", ha="right")
    fig.autofmt_xdate()  # rotate date
    ax.xaxis_date()
    # set custom formatters to always show date in first tick
    formatter = ObsPyAutoDateFormatter(ax.xaxis.get_major_locator())
    formatter.scaled[1 / 24.] = \
        FuncFormatter(decimal_seconds_format_date_first_tick)
    formatter.scaled.pop(1 / (24. * 60.))
    ax.xaxis.set_major_formatter(formatter)
    plt.subplots_adjust(left=0.2)
    # set x-axis limits according to given start/end time
    if args.start_time and args.end_time:
        ax.set_xlim(left=args.start_time, right=args.end_time)
    elif args.start_time:
        ax.set_xlim(left=args.start_time, auto=None)
    elif args.end_time:
        ax.set_xlim(right=args.end_time, auto=None)
    else:
        left, right = ax.xaxis.get_data_interval()
        x_axis_range = right - left
        ax.set_xlim(left - 0.05 * x_axis_range, right + 0.05 * x_axis_range)
    if args.output is None:
        plt.show()
    else:
        fig.set_dpi(72)
        height = len(ids) * 0.5
        height = max(4, height)
        fig.set_figheight(height)
        plt.tight_layout()

        if not args.start_time or not args.end_time:
            days = ax.get_xlim()
            days = days[1] - days[0]
        else:
            days = args.end_time - args.start_time

        width = max(6, days / 30.)
        width = min(width, height * 4)
        fig.set_figwidth(width)
        plt.subplots_adjust(top=1, bottom=0, left=0, right=1)
        plt.tight_layout()

        fig.savefig(args.output)
    if args.verbose and not args.quiet:
        sys.stdout.write('\n')
Exemple #7
0
def get_gps_from_mermaid_environment(mmd_name, content):
    gps = list()

    # Mermaid environment can be empty
    if content is None:
        return gps

    # get gps information in the mermaid environment
    gps_tag_list = content.split("</ENVIRONMENT>")[0].split("<GPSINFO")[1:]
    for gps_tag in gps_tag_list:
        fixdate = re.findall(" DATE=(\d+-\d+-\d+T\d+:\d+:\d+)", gps_tag)
        if len(fixdate) > 0:
            fixdate = fixdate[0]
            fixdate = UTCDateTime(fixdate)
        else:
            fixdate = None

        latitude = re.findall(" LAT=([+,-])(\d{2})(\d+\.\d+)", gps_tag)
        if len(latitude) > 0:
            latitude = latitude[0]
            if latitude[0] == "+":
                sign = 1
            elif latitude[0] == "-":
                sign = -1
            latitude = sign*(float(latitude[1]) + float(latitude[2])/60.)
        else:
            latitude = None

        longitude = re.findall(" LON=([+,-])(\d{3})(\d+\.\d+)", gps_tag)
        if len(longitude) > 0:
            longitude = longitude[0]
            if longitude[0] == "+":
                sign = 1
            elif longitude[0] == "-":
                sign = -1
            longitude = sign*(float(longitude[1]) + float(longitude[2])/60.)
        else:
            longitude = None

        clockdrift = re.findall("<DRIFT( [^>]+) />", gps_tag)
        if len(clockdrift) > 0:
            clockdrift = clockdrift[0]
            _df = 0
            catch = re.findall(" USEC=(-?\d+)", clockdrift)
            if catch:
                _df += 10 ** (-6) * float(catch[0])
            catch = re.findall(" SEC=(-?\d+)", clockdrift)
            if catch:
                _df += float(catch[0])
            catch = re.findall(" MIN=(-?\d+)", clockdrift)
            if catch:
                _df += 60 * float(catch[0])
            catch = re.findall(" HOUR=(-?\d+)", clockdrift)
            if catch:
                _df += 60 * 60 * float(catch[0])
            catch = re.findall(" DAY=(-?\d+)", clockdrift)
            if catch:
                _df += 24 * 60 * 60 * float(catch[0])
            catch = re.findall(" MONTH=(-?\d+)", clockdrift)
            if catch:
                # An approximation of 30 days per month is sufficient this is just to see if there is something
                # wrong with the drift
                _df += 30 * 24 * 60 * 60 * float(catch[0])
            catch = re.findall(" YEAR=(-?\d+)", clockdrift)
            if catch:
                _df += 365 * 24 * 60 * 60 * float(catch[0])
            clockdrift = _df
        else:
            clockdrift = None

        clockfreq = re.findall("<CLOCK Hz=(-?\d+)", gps_tag)
        if len(clockfreq) > 0:
            clockfreq = clockfreq[0]
            clockfreq = int(clockfreq)
        else:
            clockfreq = None

        # Check if there is an error of clock synchronization
        if clockfreq <= 0:
            err_msg = "WARNING: Error with clock synchronization in file \"" + mmd_name + "\"" \
                   + " at " + fixdate.isoformat() + ", clockfreq = " + str(clockfreq) + "Hz"
            print err_msg

        # Add date to the list
        if fixdate is not None and latitude is not None and longitude is not None \
                and clockdrift is not None and clockfreq is not None:
            gps.append(GPS(fixdate, latitude, longitude, clockdrift, clockfreq, "mer"))
        else:
            raise ValueError

    return gps
Exemple #8
0
def main(option_list=None):
    parser = OptionParser(__doc__.strip())
    parser.add_option("-f", "--format", default=None,
                      type="string", dest="format",
                      help="Optional, the file format.\n" +
                      " ".join(__doc__.split('\n')[-4:]))
    parser.add_option("-v", "--verbose", default=False,
                      action="store_true", dest="verbose",
                      help="Optional. Verbose output.")
    parser.add_option("-n", "--non-recursive", default=True,
                      action="store_false", dest="recursive",
                      help="Optional. Do not descend into directories.")
    parser.add_option("-i", "--ignore-links", default=False,
                      action="store_true", dest="ignore_links",
                      help="Optional. Do not follow symbolic links.")
    parser.add_option("--starttime", default=None,
                      type="string", dest="starttime",
                      help="Optional, a UTCDateTime compatible string. " +
                      "Only visualize data after this time and set " +
                      "time-axis axis accordingly.")
    parser.add_option("--endtime", default=None,
                      type="string", dest="endtime",
                      help="Optional, a UTCDateTime compatible string. " +
                      "Only visualize data after this time and set " +
                      "time-axis axis accordingly.")
    parser.add_option("--ids", default=None,
                      type="string", dest="ids",
                      help="Optional, a list of SEED channel identifiers " +
                      "separated by commas " +
                      "(e.g. 'GR.FUR..HHZ,BW.MANZ..EHN'. Only these " +
                      "channels will not be plotted.")
    parser.add_option("-t", "--event-times", default=None,
                      type="string", dest="event_times",
                      help="Optional, a list of UTCDateTime compatible " +
                      "strings separated by commas " +
                      "(e.g. '2010-01-01T12:00:00,2010-01-01T13:00:00'). " +
                      "These get marked by vertical lines in the plot. " +
                      "Useful e.g. to mark event origin times.")
    parser.add_option("-w", "--write", default=None,
                      type="string", dest="write",
                      help="Optional, npz file for writing data "
                      "after scanning waveform files")
    parser.add_option("-l", "--load", default=None,
                      type="string", dest="load",
                      help="Optional, npz file for loading data "
                      "before scanning waveform files")
    parser.add_option("--nox", default=False,
                      action="store_true", dest="nox",
                      help="Optional, Do not plot crosses.")
    parser.add_option("--nogaps", default=False,
                      action="store_true", dest="nogaps",
                      help="Optional, Do not plot gaps.")
    parser.add_option("-o", "--output", default=None,
                      type="string", dest="output",
                      help="Save plot to image file (e.g. out.pdf, " +
                      "out.png) instead of opening a window.")
    parser.add_option("--print-gaps", default=False,
                      action="store_true", dest="print_gaps",
                      help="Optional, prints a list of gaps at the end.")
    (options, largs) = parser.parse_args(option_list)

    # Print help and exit if no arguments are given
    if len(largs) == 0 and options.load is None:
        parser.print_help()
        sys.exit(1)

    # Use recursively parsing function?
    if options.recursive:
        parse_func = recursive_parse
    else:
        parse_func = parse_file_to_dict

    if options.output is not None:
        import matplotlib
        matplotlib.use("agg")
    global date2num
    from matplotlib.dates import date2num, num2date
    from matplotlib.patches import Rectangle
    from matplotlib.collections import PatchCollection
    import matplotlib.pyplot as plt

    fig = plt.figure()
    ax = fig.add_subplot(111)

    # Plot vertical lines if option 'event_times' was specified
    if options.event_times:
        times = options.event_times.split(',')
        times = map(UTCDateTime, times)
        times = map(date2num, times)
        for time in times:
            ax.axvline(time, color='k')

    if options.starttime:
        options.starttime = UTCDateTime(options.starttime)
        options.starttime = date2num(options.starttime)
    if options.endtime:
        options.endtime = UTCDateTime(options.endtime)
        options.endtime = date2num(options.endtime)

    # Generate dictionary containing nested lists of start and end times per
    # station
    data = {}
    samp_int = {}
    counter = 1
    if options.load:
        load_npz(options.load, data, samp_int)
    for path in largs:
        counter = parse_func(data, samp_int, path, counter, options.format,
                             options.verbose, options.ignore_links)
    if not data:
        print("No waveform data found.")
        return
    if options.write:
        write_npz(options.write, data, samp_int)

    # Loop through this dictionary
    ids = data.keys()
    # restrict plotting of results to given ids
    if options.ids:
        options.ids = options.ids.split(',')
        ids = filter(lambda x: x in options.ids, ids)
    ids = sorted(ids)[::-1]
    labels = [""] * len(ids)
    print
    for _i, _id in enumerate(ids):
        labels[_i] = ids[_i]
        data[_id].sort()
        startend = np.array(data[_id])
        if len(startend) == 0:
            continue
        # restrict plotting of results to given start/endtime
        if options.starttime:
            startend = startend[startend[:, 1] > options.starttime]
        if len(startend) == 0:
            continue
        if options.starttime:
            startend = startend[startend[:, 0] < options.endtime]
        if len(startend) == 0:
            continue
        if _id not in samp_int:
            warnings.warn('Problem with _id=%s, skipping' % _id)
            continue

        startend_compressed = compressStartend(startend, 1000)

        offset = np.ones(len(startend)) * _i  # generate list of y values
        ax.xaxis_date()
        if not options.nox:
            ax.plot_date(startend[:, 0], offset, 'x', linewidth=2)
        ax.hlines(offset[:len(startend_compressed)], startend_compressed[:, 0],
                  startend_compressed[:, 1], 'b', linewidth=2, zorder=3)
        # find the gaps
        diffs = startend[1:, 0] - startend[:-1, 1]  # currend.start - last.end
        gapsum = diffs[diffs > 0].sum()
        timerange = startend[:, 1].max() - startend[:, 0].min()
        perc = (timerange - gapsum) / timerange
        labels[_i] = labels[_i] + "\n%.1f%%" % (perc * 100)
        gap_indices = diffs > 1.8 * samp_int[_id]
        gap_indices = np.concatenate((gap_indices, [False]))
        if any(gap_indices):
            # dont handle last endtime as start of gap
            gaps_start = startend[gap_indices, 1]
            gaps_end = startend[np.roll(gap_indices, 1), 0]
            if not options.nogaps and any(gap_indices):
                rects = [Rectangle((start_, offset[0] - 0.4),
                                   end_ - start_, 0.8)
                         for start_, end_ in zip(gaps_start, gaps_end)]
                ax.add_collection(PatchCollection(rects, color="r"))
            if options.print_gaps:
                for start_, end_ in zip(gaps_start, gaps_end):
                    start_, end_ = num2date((start_, end_))
                    start_ = UTCDateTime(start_.isoformat())
                    end_ = UTCDateTime(end_.isoformat())
                    print "%s %s %s %.3f" % (_id, start_, end_, end_ - start_)

    # Pretty format the plot
    ax.set_ylim(0 - 0.5, _i + 0.5)
    ax.set_yticks(np.arange(_i + 1))
    ax.set_yticklabels(labels, family="monospace", ha="right")
    # set x-axis limits according to given start/endtime
    if options.starttime:
        ax.set_xlim(left=options.starttime, auto=None)
    if options.endtime:
        ax.set_xlim(right=options.endtime, auto=None)
    fig.autofmt_xdate()  # rotate date
    plt.subplots_adjust(left=0.2)
    if options.output is None:
        plt.show()
    else:
        fig.set_dpi(72)
        height = len(ids) * 0.5
        height = max(4, height)
        fig.set_figheight(height)
        # tight_layout() only available from matplotlib >= 1.1
        try:
            plt.tight_layout()
            days = ax.get_xlim()
            days = days[1] - days[0]
            width = max(6, days / 30.)
            width = min(width, height * 4)
            fig.set_figwidth(width)
            plt.subplots_adjust(top=1, bottom=0, left=0, right=1)
            plt.tight_layout()
        except:
            pass
        fig.savefig(options.output)
    sys.stdout.write('\n')
Exemple #9
0
def main(argv=None):
    parser = ArgumentParser(prog='obspy-scan', description=__doc__.strip(),
                            formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument('-V', '--version', action='version',
                        version='%(prog)s ' + __version__)
    parser.add_argument('-f', '--format', choices=ENTRY_POINTS['waveform'],
                        help='Optional, the file format.\n' +
                             ' '.join(__doc__.split('\n')[-4:]))
    parser.add_argument('-v', '--verbose', action='store_true',
                        help='Optional. Verbose output.')
    parser.add_argument('-q', '--quiet', action='store_true',
                        help='Optional. Be quiet. Overwritten by --verbose '
                             'flag.')
    parser.add_argument('-n', '--non-recursive',
                        action='store_false', dest='recursive',
                        help='Optional. Do not descend into directories.')
    parser.add_argument('-i', '--ignore-links', action='store_true',
                        help='Optional. Do not follow symbolic links.')
    parser.add_argument('--start-time', default=None, type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                             'Only visualize data after this time and set ' +
                             'time-axis axis accordingly.')
    parser.add_argument('--end-time', default=None, type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                             'Only visualize data before this time and set ' +
                             'time-axis axis accordingly.')
    parser.add_argument('--id', action='append',
                        help='Optional, a SEED channel identifier '
                             "(e.g. 'GR.FUR..HHZ'). You may provide this " +
                             'option multiple times. Only these ' +
                             'channels will be plotted.')
    parser.add_argument('-t', '--event-time', default=None, type=UTCDateTime,
                        action='append',
                        help='Optional, a UTCDateTime compatible string ' +
                             "(e.g. '2010-01-01T12:00:00'). You may provide " +
                             'this option multiple times. These times get ' +
                             'marked by vertical lines in the plot. ' +
                             'Useful e.g. to mark event origin times.')
    parser.add_argument('-w', '--write', default=None,
                        help='Optional, npz file for writing data '
                             'after scanning waveform files')
    parser.add_argument('-l', '--load', default=None,
                        help='Optional, npz file for loading data '
                             'before scanning waveform files')
    parser.add_argument('--no-x', action='store_true',
                        help='Optional, Do not plot crosses.')
    parser.add_argument('--no-gaps', action='store_true',
                        help='Optional, Do not plot gaps.')
    parser.add_argument('-o', '--output', default=None,
                        help='Save plot to image file (e.g. out.pdf, ' +
                             'out.png) instead of opening a window.')
    parser.add_argument('--print-gaps', action='store_true',
                        help='Optional, prints a list of gaps at the end.')
    parser.add_argument('paths', nargs='*',
                        help='Files or directories to scan.')

    args = parser.parse_args(argv)

    if args.output is not None:
        MatplotlibBackend.switch_backend("AGG", sloppy=False)

    # Print help and exit if no arguments are given
    if len(args.paths) == 0 and args.load is None:
        parser.error('No paths specified.')

    # Use recursively parsing function?
    if args.recursive:
        parse_func = recursive_parse
    else:
        parse_func = parse_file_to_dict

    from matplotlib.dates import date2num, num2date
    from matplotlib.ticker import FuncFormatter
    from matplotlib.patches import Rectangle
    from matplotlib.collections import PatchCollection
    import matplotlib.pyplot as plt

    fig = plt.figure()
    ax = fig.add_subplot(111)

    # Plot vertical lines if option 'event_time' was specified
    if args.event_time:
        times = [date2num(t.datetime) for t in args.event_time]
        for time in times:
            ax.axvline(time, color='k')

    if args.start_time:
        args.start_time = date2num(args.start_time.datetime)
    if args.end_time:
        args.end_time = date2num(args.end_time.datetime)

    # Generate dictionary containing nested lists of start and end times per
    # station
    data = {}
    samp_int = {}
    counter = 1
    if args.load:
        load_npz(args.load, data, samp_int)
    for path in args.paths:
        counter = parse_func(data, samp_int, path, counter, args.format,
                             verbose=args.verbose, quiet=args.quiet,
                             ignore_links=args.ignore_links)
    if not data:
        if args.verbose or not args.quiet:
            print("No waveform data found.")
        return
    if args.write:
        write_npz(args.write, data, samp_int)

    # either use ids specified by user or use ids based on what data we have
    # parsed
    ids = args.id or list(data.keys())
    ids = sorted(ids)[::-1]
    labels = [""] * len(ids)
    if args.verbose or not args.quiet:
        print('\n')
    for _i, _id in enumerate(ids):
        labels[_i] = ids[_i]
        # sort data list and sampling rate list
        if _id in data:
            startend = np.array(data[_id])
            _samp_int = np.array(samp_int[_id])
            indices = np.lexsort((startend[:, 1], startend[:, 0]))
            startend = startend[indices]
            _samp_int = _samp_int[indices]
        else:
            startend = np.array([])
            _samp_int = np.array([])
        if len(startend) == 0:
            if not (args.start_time and args.end_time):
                continue
            if not args.no_gaps:
                rects = [Rectangle((args.start_time, _i - 0.4),
                                   args.end_time - args.start_time, 0.8)]
                ax.add_collection(PatchCollection(rects, color="r"))
            if args.print_gaps and (args.verbose or not args.quiet):
                print("%s %s %s %.3f" % (
                    _id, args.start_time, args.end_time,
                    args.end_time - args.start_time))
            continue
        # restrict plotting of results to given start/end time
        if args.start_time:
            indices = startend[:, 1] > args.start_time
            startend = startend[indices]
            _samp_int = _samp_int[indices]
        if len(startend) == 0:
            continue
        if args.end_time:
            indices = startend[:, 0] < args.end_time
            startend = startend[indices]
            _samp_int = _samp_int[indices]
        if len(startend) == 0:
            continue
        data_start = startend[:, 0].min()
        data_end = startend[:, 1].max()
        timerange_start = args.start_time or data_start
        timerange_end = args.end_time or data_end
        timerange = timerange_end - timerange_start
        if timerange == 0.0:
            warnings.warn('Zero sample long data for _id=%s, skipping' % _id)
            continue

        startend_compressed = compress_start_end(startend, 1000)

        offset = np.ones(len(startend)) * _i  # generate list of y values
        if not args.no_x:
            ax.plot(startend[:, 0], offset, 'x', linewidth=2)
        ax.hlines(offset[:len(startend_compressed)], startend_compressed[:, 0],
                  startend_compressed[:, 1], 'b', linewidth=2, zorder=3)
        # find the gaps
        diffs = startend[1:, 0] - startend[:-1, 1]  # currend.start - last.end
        gapsum = diffs[diffs > 0].sum()
        # if start- and/or endtime is specified, add missing data at start/end
        # to gap sum
        has_gap = False
        gap_at_start = (
            args.start_time and
            data_start > args.start_time and
            data_start - args.start_time)
        gap_at_end = (
            args.end_time and
            args.end_time > data_end and
            args.end_time - data_end)
        if args.start_time and gap_at_start:
            gapsum += gap_at_start
            has_gap = True
        if args.end_time and gap_at_end:
            gapsum += gap_at_end
            has_gap = True
        perc = (timerange - gapsum) / timerange
        labels[_i] = labels[_i] + "\n%.1f%%" % (perc * 100)
        gap_indices = diffs > 1.8 * _samp_int[:-1]
        gap_indices = np.append(gap_indices, False)
        has_gap |= any(gap_indices)
        if has_gap:
            # don't handle last end time as start of gap
            gaps_start = startend[gap_indices, 1]
            gaps_end = startend[np.roll(gap_indices, 1), 0]
            if args.start_time and gap_at_start:
                gaps_start = np.append(gaps_start, args.start_time)
                gaps_end = np.append(gaps_end, data_start)
            if args.end_time and gap_at_end:
                gaps_start = np.append(gaps_start, data_end)
                gaps_end = np.append(gaps_end, args.end_time)
            if not args.no_gaps:
                rects = [Rectangle((start_, offset[0] - 0.4), end_ - start_,
                                   0.8)
                         for start_, end_ in zip(gaps_start, gaps_end)]
                ax.add_collection(PatchCollection(rects, color="r"))
            if args.print_gaps:
                for start_, end_ in zip(gaps_start, gaps_end):
                    start_, end_ = num2date((start_, end_))
                    start_ = UTCDateTime(start_.isoformat())
                    end_ = UTCDateTime(end_.isoformat())
                    if args.verbose or not args.quiet:
                        print("%s %s %s %.3f" % (_id, start_, end_,
                                                 end_ - start_))

    # Pretty format the plot
    ax.set_ylim(0 - 0.5, len(ids) - 0.5)
    ax.set_yticks(np.arange(len(ids)))
    ax.set_yticklabels(labels, family="monospace", ha="right")
    fig.autofmt_xdate()  # rotate date
    ax.xaxis_date()
    # set custom formatters to always show date in first tick
    formatter = ObsPyAutoDateFormatter(ax.xaxis.get_major_locator())
    formatter.scaled[1 / 24.] = \
        FuncFormatter(decimal_seconds_format_date_first_tick)
    formatter.scaled.pop(1/(24.*60.))
    ax.xaxis.set_major_formatter(formatter)
    plt.subplots_adjust(left=0.2)
    # set x-axis limits according to given start/end time
    if args.start_time and args.end_time:
        ax.set_xlim(left=args.start_time, right=args.end_time)
    elif args.start_time:
        ax.set_xlim(left=args.start_time, auto=None)
    elif args.end_time:
        ax.set_xlim(right=args.end_time, auto=None)
    else:
        left, right = ax.xaxis.get_data_interval()
        x_axis_range = right - left
        ax.set_xlim(left - 0.05 * x_axis_range, right + 0.05 * x_axis_range)
    if args.output is None:
        plt.show()
    else:
        fig.set_dpi(72)
        height = len(ids) * 0.5
        height = max(4, height)
        fig.set_figheight(height)
        plt.tight_layout()

        if not args.start_time or not args.end_time:
            days = ax.get_xlim()
            days = days[1] - days[0]
        else:
            days = args.end_time - args.start_time

        width = max(6, days / 30.)
        width = min(width, height * 4)
        fig.set_figwidth(width)
        plt.subplots_adjust(top=1, bottom=0, left=0, right=1)
        plt.tight_layout()

        fig.savefig(args.output)
    if args.verbose and not args.quiet:
        sys.stdout.write('\n')
Exemple #10
0
    def analyze_parsed_data(self, print_gaps=False, starttime=None,
                            endtime=None, seed_ids=None):
        """
        Prepare information for plotting.

        Information is stored in a dictionary as ``scanner._info``, only
        containing these data matching the given parameters.

        :type print_gaps: bool
        :param print_gaps: Whether to print information on all encountered gaps
            and overlaps.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Whether to use a fixed start time for the plot and
            data percentage calculation.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: Whether to use a fixed end time for the plot and
            data percentage calculation.
        :type seed_ids: list of str
        :param endtime: Whether to consider only a specific set of SEED IDs
            (e.g. ``seed_ids=["GR.FUR..BHZ", "GR.WET..BHZ"]``) or just all SEED
            IDs encountered in data (if left ``None``).
        """
        data = self.data
        samp_int = self.samp_int
        if starttime is not None:
            starttime = starttime.matplotlib_date
        if endtime is not None:
            endtime = endtime.matplotlib_date
        # either use ids specified by user or use ids based on what data we
        # have parsed
        ids = seed_ids or list(data.keys())
        ids = sorted(ids)[::-1]
        if self.verbose:
            print('\n')
        self._info = {}
        for _i, _id in enumerate(ids):
            info = {"gaps": [], "overlaps": [], "data_starts": [],
                    "data_startends_compressed": [], "percentage": None}
            self._info[_id] = info
            gap_info = info["gaps"]
            overlap_info = info["overlaps"]
            # sort data list and sampling rate list
            if _id in data:
                startend = np.array(data[_id])
                _samp_int = np.array(samp_int[_id])
                indices = np.lexsort((startend[:, 1], startend[:, 0]))
                startend = startend[indices]
                _samp_int = _samp_int[indices]
            else:
                startend = np.array([])
                _samp_int = np.array([])
            if len(startend) == 0:
                if not (starttime and endtime):
                    continue
                gap_info.append((starttime, endtime))
                if print_gaps:
                    print("%s %s %s %.3f" % (
                        _id, starttime, endtime, endtime - starttime))
                continue
            # restrict plotting of results to given start/end time
            if starttime or endtime:
                indices = np.ones(len(startend), dtype=np.bool_)
                if starttime:
                    indices &= startend[:, 1] > starttime
                if endtime:
                    indices &= startend[:, 0] < endtime
                startend = startend[indices]
                _samp_int = _samp_int[indices]
            if len(startend) == 0:
                # if both start and endtime are given, add it to gap info
                if starttime and endtime:
                    gap_info.append((starttime, endtime))
                continue
            data_start = startend[:, 0].min()
            data_end = startend[:, 1].max()
            timerange_start = starttime or data_start
            timerange_end = endtime or data_end
            timerange = timerange_end - timerange_start
            if timerange == 0.0:
                msg = 'Zero sample long data for _id=%s, skipping' % _id
                warnings.warn(msg)
                continue

            startend_compressed = compress_start_end(startend.copy(), 1000,
                                                     merge_overlaps=True)

            info["data_starts"] = startend[:, 0]
            info["data_startends_compressed"] = startend_compressed

            # find the gaps
            # currend.start - last.end
            diffs = startend[1:, 0] - startend[:-1, 1]
            gapsum = diffs[diffs > 0].sum()
            # if start- and/or endtime is specified, add missing data at
            # start/end to gap sum
            has_gap = False
            gap_at_start = (
                starttime and
                data_start > starttime and
                data_start - starttime)
            gap_at_end = (
                endtime and
                endtime > data_end and
                endtime - data_end)
            if gap_at_start:
                gapsum += gap_at_start
                has_gap = True
            if gap_at_end:
                gapsum += gap_at_end
                has_gap = True
            info["percentage"] = (timerange - gapsum) / timerange * 100
            # define a gap as over 0.8 delta after expected sample time
            gap_indices = diffs > 0.8 * _samp_int[:-1]
            gap_indices = np.append(gap_indices, False)
            # define an overlap as over 0.8 delta before expected sample time
            overlap_indices = diffs < -0.8 * _samp_int[:-1]
            overlap_indices = np.append(overlap_indices, False)
            has_gap |= any(gap_indices)
            has_gap |= any(overlap_indices)
            if has_gap:
                # don't handle last end time as start of gap
                gaps_start = startend[gap_indices, 1]
                gaps_end = startend[np.roll(gap_indices, 1), 0]
                overlaps_end = startend[overlap_indices, 1]
                overlaps_start = startend[np.roll(overlap_indices, 1), 0]
                # but now, manually add start/end for gaps at start/end of user
                # specified start/end times
                if gap_at_start:
                    gaps_start = np.append(gaps_start, starttime)
                    gaps_end = np.append(gaps_end, data_start)
                if gap_at_end:
                    gaps_start = np.append(gaps_start, data_end)
                    gaps_end = np.append(gaps_end, endtime)

                _starts = np.concatenate((gaps_start, overlaps_end))
                _ends = np.concatenate((gaps_end, overlaps_start))
                sort_order = np.argsort(_starts)
                _starts = _starts[sort_order]
                _ends = _ends[sort_order]
                for start_, end_ in zip(_starts, _ends):
                    if print_gaps:
                        start__, end__ = num2date((start_, end_))
                        start__ = UTCDateTime(start__.isoformat())
                        end__ = UTCDateTime(end__.isoformat())
                        print("{} {} {} {:.3f}".format(
                            _id, start__, end__, end__ - start__))
                    if start_ < end_:
                        gap_info.append((start_, end_))
                    else:
                        overlap_info.append((start_, end_))
Exemple #11
0
def format_utcdatetime(o: UTCDateTime) -> str:
    return o.isoformat()
Exemple #12
0
def main(argv=None):
    parser = ArgumentParser(prog='obspy-scan', description=__doc__.strip(),
                            formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument('-V', '--version', action='version',
                        version='%(prog)s ' + __version__)
    parser.add_argument('-f', '--format', choices=ENTRY_POINTS['waveform'],
                        help='Optional, the file format.\n' +
                             ' '.join(__doc__.split('\n')[-4:]))
    parser.add_argument('-v', '--verbose', action='store_true',
                        help='Optional. Verbose output.')
    parser.add_argument('-n', '--non-recursive',
                        action='store_false', dest='recursive',
                        help='Optional. Do not descend into directories.')
    parser.add_argument('-i', '--ignore-links', action='store_true',
                        help='Optional. Do not follow symbolic links.')
    parser.add_argument('--start-time', default=None, type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                             'Only visualize data after this time and set ' +
                             'time-axis axis accordingly.')
    parser.add_argument('--end-time', default=None, type=UTCDateTime,
                        help='Optional, a UTCDateTime compatible string. ' +
                             'Only visualize data before this time and set ' +
                             'time-axis axis accordingly.')
    parser.add_argument('--id', action='append',
                        help='Optional, a SEED channel identifier '
                             "(e.g. 'GR.FUR..HHZ'). You may provide this " +
                             'option multiple times. Only these ' +
                             'channels will be plotted.')
    parser.add_argument('-t', '--event-time', default=None, type=UTCDateTime,
                        action='append',
                        help='Optional, a UTCDateTime compatible string ' +
                             "(e.g. '2010-01-01T12:00:00'). You may provide " +
                             'this option multiple times. These times get ' +
                             'marked by vertical lines in the plot. ' +
                             'Useful e.g. to mark event origin times.')
    parser.add_argument('-w', '--write', default=None,
                        help='Optional, npz file for writing data '
                             'after scanning waveform files')
    parser.add_argument('-l', '--load', default=None,
                        help='Optional, npz file for loading data '
                             'before scanning waveform files')
    parser.add_argument('--no-x', action='store_true',
                        help='Optional, Do not plot crosses.')
    parser.add_argument('--no-gaps', action='store_true',
                        help='Optional, Do not plot gaps.')
    parser.add_argument('-o', '--output', default=None,
                        help='Save plot to image file (e.g. out.pdf, ' +
                             'out.png) instead of opening a window.')
    parser.add_argument('--print-gaps', action='store_true',
                        help='Optional, prints a list of gaps at the end.')
    parser.add_argument('paths', nargs='*',
                        help='Files or directories to scan.')

    # Deprecated arguments
    action = _DeprecatedArgumentAction('--endtime', '--end-time')
    parser.add_argument('--endtime', type=UTCDateTime,
                        action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--event-times', '--event-time')
    parser.add_argument('--event-times', action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--ids', '--id')
    parser.add_argument('--ids', action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--nox', '--no-x',
                                       real_action='store_true')
    parser.add_argument('--nox', dest='no_x', nargs=0,
                        action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--nogaps', '--no-gaps',
                                       real_action='store_true')
    parser.add_argument('--nogaps', dest='no_gaps', nargs=0,
                        action=action, help=SUPPRESS)

    action = _DeprecatedArgumentAction('--starttime', '--start-time')
    parser.add_argument('--starttime', type=UTCDateTime,
                        action=action, help=SUPPRESS)

    args = parser.parse_args(argv)

    # Print help and exit if no arguments are given
    if len(args.paths) == 0 and args.load is None:
        parser.error('No paths specified.')

    # Use recursively parsing function?
    if args.recursive:
        parse_func = recursive_parse
    else:
        parse_func = parse_file_to_dict

    if args.output is not None:
        import matplotlib
        matplotlib.use("agg")
    global date2num
    from matplotlib.dates import date2num, num2date
    from matplotlib.patches import Rectangle
    from matplotlib.collections import PatchCollection
    import matplotlib.pyplot as plt

    fig = plt.figure()
    ax = fig.add_subplot(111)

    # Plot vertical lines if option 'event_times' was specified
    if args.event_time:
        times = map(date2num, args.event_time)
        for time in times:
            ax.axvline(time, color='k')
    # Deprecated version (don't plot twice)
    if args.event_times and not args.event_time:
        times = args.event_times.split(',')
        times = map(UTCDateTime, times)
        times = map(date2num, times)
        for time in times:
            ax.axvline(time, color='k')

    if args.start_time:
        args.start_time = date2num(args.start_time)
    elif args.starttime:
        # Deprecated version
        args.start_time = date2num(args.starttime)
    if args.end_time:
        args.end_time = date2num(args.end_time)
    elif args.endtime:
        # Deprecated version
        args.end_time = date2num(args.endtime)

    # Generate dictionary containing nested lists of start and end times per
    # station
    data = {}
    samp_int = {}
    counter = 1
    if args.load:
        load_npz(args.load, data, samp_int)
    for path in args.paths:
        counter = parse_func(data, samp_int, path, counter, args.format,
                             args.verbose, args.ignore_links)
    if not data:
        print("No waveform data found.")
        return
    if args.write:
        write_npz(args.write, data, samp_int)

    # Loop through this dictionary
    ids = list(data.keys())
    # Handle deprecated argument
    if args.ids and not args.id:
        args.id = args.ids.split(',')
    # restrict plotting of results to given ids
    if args.id:
        ids = [x for x in ids if x in args.id]
    ids = sorted(ids)[::-1]
    labels = [""] * len(ids)
    print('\n')
    for _i, _id in enumerate(ids):
        labels[_i] = ids[_i]
        data[_id].sort()
        startend = np.array(data[_id])
        if len(startend) == 0:
            continue
        # restrict plotting of results to given start/end time
        if args.start_time:
            startend = startend[startend[:, 1] > args.start_time]
        if len(startend) == 0:
            continue
        if args.start_time:
            startend = startend[startend[:, 0] < args.end_time]
        if len(startend) == 0:
            continue
        timerange = startend[:, 1].max() - startend[:, 0].min()
        if timerange == 0.0:
            warnings.warn('Zero sample long data for _id=%s, skipping' % _id)
            continue

        startend_compressed = compressStartend(startend, 1000)

        offset = np.ones(len(startend)) * _i  # generate list of y values
        ax.xaxis_date()
        if not args.no_x:
            ax.plot_date(startend[:, 0], offset, 'x', linewidth=2)
        ax.hlines(offset[:len(startend_compressed)], startend_compressed[:, 0],
                  startend_compressed[:, 1], 'b', linewidth=2, zorder=3)
        # find the gaps
        diffs = startend[1:, 0] - startend[:-1, 1]  # currend.start - last.end
        gapsum = diffs[diffs > 0].sum()
        perc = (timerange - gapsum) / timerange
        labels[_i] = labels[_i] + "\n%.1f%%" % (perc * 100)
        gap_indices = diffs > 1.8 * np.array(samp_int[_id][:-1])
        gap_indices = np.concatenate((gap_indices, [False]))
        if any(gap_indices):
            # don't handle last end time as start of gap
            gaps_start = startend[gap_indices, 1]
            gaps_end = startend[np.roll(gap_indices, 1), 0]
            if not args.no_gaps and any(gap_indices):
                rects = [Rectangle((start_, offset[0] - 0.4),
                                   end_ - start_, 0.8)
                         for start_, end_ in zip(gaps_start, gaps_end)]
                ax.add_collection(PatchCollection(rects, color="r"))
            if args.print_gaps:
                for start_, end_ in zip(gaps_start, gaps_end):
                    start_, end_ = num2date((start_, end_))
                    start_ = UTCDateTime(start_.isoformat())
                    end_ = UTCDateTime(end_.isoformat())
                    print("%s %s %s %.3f" % (_id, start_, end_, end_ - start_))

    # Pretty format the plot
    ax.set_ylim(0 - 0.5, _i + 0.5)
    ax.set_yticks(np.arange(_i + 1))
    ax.set_yticklabels(labels, family="monospace", ha="right")
    # set x-axis limits according to given start/end time
    if args.start_time:
        ax.set_xlim(left=args.start_time, auto=None)
    if args.end_time:
        ax.set_xlim(right=args.end_time, auto=None)
    fig.autofmt_xdate()  # rotate date
    plt.subplots_adjust(left=0.2)
    if args.output is None:
        plt.show()
    else:
        fig.set_dpi(72)
        height = len(ids) * 0.5
        height = max(4, height)
        fig.set_figheight(height)
        # tight_layout() only available from matplotlib >= 1.1
        try:
            plt.tight_layout()
            days = ax.get_xlim()
            days = days[1] - days[0]
            width = max(6, days / 30.)
            width = min(width, height * 4)
            fig.set_figwidth(width)
            plt.subplots_adjust(top=1, bottom=0, left=0, right=1)
            plt.tight_layout()
        except:
            pass
        fig.savefig(args.output)
    sys.stdout.write('\n')
def fdsnws2geomag():
    '''Convert fdsnws query to geomagnetic data file'''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['internet', 'iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument('--output',
                        default=sys.stdout,
                        help='Output file (default: stdout).')
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station', required=True, help='Station code')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        ",".join(DEFAULT_CHANNELS))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    logging.info("Writing informtion to %s", str(args.output))
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream.merge_by_location().trim(starttime,
                                    endtime).write(args.output,
                                                   format=args.format,
                                                   inventory=inventory)
def fdsnws2directory():
    '''
    Much like the fdsnws2geomag but is purely design to get the data from the FDSN-WS
    and add it according to the structure found on geomagnetic daqs servers.

    These structure vary depending on the source but can be customized by input argument.

    Filename for each can not be customized since these following strict naming convention.

    The convention can be found in the pygeomag/data/formats directory.
    '''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument(
        '--directory',
        default=DEFAULT_DIRECTORY,
        help=
        'Output directory with optional datetime parameter as accept by python datetime (default: %s).'
        % DEFAULT_DIRECTORY)
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station',
                        default='*',
                        help='Station code (default: *)')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        "," % DEFAULT_CHANNELS)
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream = stream.merge_by_location().trim(starttime, endtime)

    # Loop through the list of stream and generate the unique list of station
    # codes.  We know the network code is constant and its a single sampling rate
    # request.
    stations = set([trace.stats.station for trace in stream])

    # Convert the directory format string to a full path
    directory = starttime.strftime(args.directory)
    logging.info("Creating directory %s if does not exist", directory)
    pathlib.Path(directory).mkdir(parents=True, exist_ok=True)

    for station in stations:
        # Extract the station I need
        extract = stream.select(station=station)
        # Generate its filename (depends on the format)
        if args.format in ['iaga2002']:
            filename = pygeomag.data.formats.iaga2002.get_filename(
                extract[0].stats)
        elif args.format in ['imfv122']:
            filename = pygeomag.data.formats.imfv122.get_filename(
                extract[0].stats)
        else:
            raise ValueError(
                "Unable to generate filename for unhandled format %s" %
                args.format)
        filename = os.path.join(directory, filename)
        logging.info("Writing magnetic data to %s", filename)
        extract.write(filename, format=args.format, inventory=inventory)
       py = []
       for az in azimuths:
           xy = reckon(ev['lat'], ev['lon'], s_radius, az)
           px.append(xy[0])
           py.append(xy[1])
       
       x, y = m(px, py)
       m.plot(x, y, '-', c='darkorange', lw=1.5, label='S-Phase')
    
       # add legend
       plt.legend(loc=2, fontsize=14, numpoints=1)
        
    # add label
    sample_time = UTCDateTime(ev['datetime']) + mt
    xpos, ypos = m(125.5, -27)
    plt.text(xpos, ypos, sample_time.isoformat()[0:-7], ha='left', va='bottom', fontsize=16, bbox=props)
    
    
    print('mapped_velocity_'+str(i+1)+'.png')
    if i < 9:
        plt.savefig('png/mapped_velocity_00'+str(i+1)+'.png', fmt='png', dpi=300, bbox_inches='tight')
    elif i < 99:
        plt.savefig('png/mapped_velocity_0'+str(i+1)+'.png', fmt='png', dpi=300, bbox_inches='tight')
    else:
        plt.savefig('png/mapped_velocity_'+str(i+1)+'.png', fmt='png', dpi=300, bbox_inches='tight')
    plt.clf()
"""
###############################################################################
# make animation
###############################################################################
import matplotlib.image as mgimg