def retrieveData(self, event_dict):
        """Retrieve data from NIED, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """

        if self.metadata is None:
            raise Exception('Must call getMatchingEvents() first.')

        # get matching event in metadata
        has_event = False
        for event in self.metadata['results']['events']:
            if event['time'] == event_dict['time'].strftime(TIMEFMT):
                has_event = True
                break

        if not has_event:
            raise Exception('Could not find matching event.')

        starttime = self.time - timedelta(seconds=self.eq_dt // 2)
        endtime = self.time + timedelta(seconds=self.eq_dt // 2)

        if len(event['stations']) < MAX_STATIONS:
            try:
                (outfolder, datafiles) = get_records(
                    self.rawdir,
                    self.email,
                    unpack=True,
                    event_latitude=self.lat,
                    event_longitude=self.lon,
                    event_radius=self.radius,
                    process_level=self.process_type,
                    group_by='event',
                    max_station_dist=self.station_radius,
                    station_type=self.station_type,
                    startdate=starttime,
                    enddate=endtime,
                )
            except GMProcessException as gpe:
                eqfmt = 'M%.1f %s'
                eqdesc = eqfmt % (self.magnitude,
                                  self.time.strftime('%Y-%m-%d %H:%M:%S'))
                if '404' in str(gpe):
                    logging.info('Could not find data records for %s' % eqdesc)
                else:
                    logging.info('Unplanned exception getting records for %s' %
                                 eqdesc)
                return []
        else:
            # web service has a maximum number of stations you're allowed to
            # fetch (note that this may not be the same as the number of files)
            # so we're splitting up the stations by distance and downloading them
            # in chunks.
            dataframe = get_stations_dataframe(event)
            distances = dataframe['epidist'].to_numpy()
            distances.sort()
            nchunks = int(np.ceil(len(distances) / MAX_STATIONS))
            distance_chunks = np.array_split(distances, nchunks)
            datafiles = []
            for chunk in distance_chunks:
                mindist = chunk[0]
                maxdist = chunk[-1]
                try:
                    (_, tfiles) = get_records(
                        self.rawdir,
                        self.email,
                        unpack=True,
                        event_latitude=self.lat,
                        event_longitude=self.lon,
                        event_radius=self.radius,
                        process_level=self.process_type,
                        group_by='event',
                        min_station_dist=mindist,
                        max_station_dist=maxdist,
                        station_type=self.station_type,
                        startdate=starttime,
                        enddate=endtime,
                    )
                except GMProcessException as gpe:
                    eqfmt = 'M%.1f %s'
                    eqdesc = eqfmt % (self.magnitude,
                                      self.time.strftime('%Y-%m-%d %H:%M:%S'))
                    if '404' in str(gpe):
                        fmt = ('Could not find data records for %s '
                               'between %.1f km and %.1f km')
                        logging.info(fmt % (eqdesc, mindist, maxdist))
                    else:
                        logging.warning(
                            'Unplanned exception getting records for %s' %
                            eqdesc)
                    continue
                datafiles += tfiles

        streams = []
        for dfile in datafiles:
            logging.info('Reading CESMD file %s...' % dfile)
            try:
                streams += read_data(dfile)
            except GMProcessException as gme:
                logging.info('Could not read %s: error "%s"' %
                             (dfile, str(gme)))

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection
    def retrieveData(self, event_dict):
        """Retrieve data from NIED, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """

        if self.metadata is None:
            raise Exception("Must call getMatchingEvents() first.")

        # get matching event in metadata
        has_event = False
        for event in self.metadata["results"]["events"]:
            if event["time"] == event_dict["time"].strftime(TIMEFMT):
                has_event = True
                nstations = self.metadata["count"]
                break

        if not has_event:
            raise Exception("Could not find matching event.")

        starttime = self.time - timedelta(seconds=self.eq_dt // 2)
        endtime = self.time + timedelta(seconds=self.eq_dt // 2)

        if nstations < MAX_STATIONS:
            try:
                (outfolder, datafiles) = get_records(
                    self.rawdir,
                    self.email,
                    unpack=True,
                    event_latitude=self.lat,
                    event_longitude=self.lon,
                    event_radius=self.radius,
                    process_level=self.process_type,
                    group_by="event",
                    max_station_dist=self.station_radius,
                    station_type=self.station_type,
                    startdate=starttime,
                    enddate=endtime,
                )
            except BaseException as ex:
                eqfmt = "M%.1f %s"
                eqdesc = eqfmt % (
                    self.magnitude,
                    self.time.strftime("%Y-%m-%d %H:%M:%S"),
                )
                if "404" in str(ex):
                    logging.info(f"Could not find data records for {eqdesc}")
                else:
                    logging.info(f"Unplanned exception getting records for {eqdesc}")
                return []
        else:
            # web service has a maximum number of stations you're allowed to
            # fetch (note that this may not be the same as the number of files)
            # so we're splitting up the stations by distance and downloading
            # them in chunks.

            # the stations are grouped a little oddly in the results of
            # the metadata - there are a number of "event" entries, all
            # with the same ID, and they each contain some collection
            # of stations. We want all of those stations, so we need to
            # iterate over the "events" and each station within them.
            dataframe = get_stations_dataframe(self.metadata)
            distances = sorted(dataframe["epidist"].to_numpy())
            nchunks = int(np.ceil(len(distances) / MAX_STATIONS))
            distance_chunks = np.array_split(distances, nchunks)
            datafiles = []
            for chunk in distance_chunks:
                mindist = chunk[0]
                maxdist = chunk[-1]
                try:
                    (_, tfiles) = get_records(
                        self.rawdir,
                        self.email,
                        unpack=True,
                        event_latitude=self.lat,
                        event_longitude=self.lon,
                        event_radius=self.radius,
                        process_level=self.process_type,
                        group_by="event",
                        min_station_dist=mindist,
                        max_station_dist=maxdist,
                        station_type=self.station_type,
                        startdate=starttime,
                        enddate=endtime,
                    )
                except BaseException as gpe:
                    eqfmt = "M%.1f %s"
                    eqdesc = eqfmt % (
                        self.magnitude,
                        self.time.strftime("%Y-%m-%d %H:%M:%S"),
                    )
                    if "404" in str(gpe):
                        fmt = (
                            "Could not find data records for %s "
                            "between %.1f km and %.1f km"
                        )
                        logging.info(fmt % (eqdesc, mindist, maxdist))
                    else:
                        logging.warning(
                            f"Unplanned exception getting records for {eqdesc}"
                        )
                    continue
                datafiles += tfiles

        if self.stream_collection:
            streams = []
            for dfile in datafiles:
                logging.info(f"Reading CESMD file {dfile}...")
                try:
                    streams += read_data(dfile)
                except BaseException as ex:
                    logging.info(f'Could not read {dfile}: error "{str(ex)}"')

            stream_collection = StreamCollection(
                streams=streams, drop_non_free=self.drop_non_free
            )
            return stream_collection
        else:
            return None