コード例 #1
0
def test_nsmn():
    datafiles, origin = read_data_dir('nsmn', 'us20009ynd')

    # make sure format checker works
    assert is_nsmn(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        raw_streams += read_nsmn(dfile)

    peaks = {
        '0921': (13.200332, 12.163827, 9.840572),
        '4304': (1.218825, 1.207812, 0.645862),
        '5405': (1.023915, 1.107856, 0.385138)
    }

    coords = {
        '0921': (37.87470, 27.59223),
        '4304': (38.99478, 29.40040),
        '5405': (40.79609, 30.73520)
    }

    for stream in raw_streams:
        cmp_value = peaks[stream[0].stats.station]
        pga1 = np.abs(stream[0].max())
        pga2 = np.abs(stream[1].max())
        pga3 = np.abs(stream[2].max())
        tpl = (pga1, pga2, pga3)
        np.testing.assert_almost_equal(cmp_value, tpl)
        cmp_coords = coords[stream[0].stats.station]
        tpl = (stream[0].stats['coordinates']['latitude'],
               stream[0].stats['coordinates']['longitude'])
        np.testing.assert_almost_equal(cmp_coords, tpl)
コード例 #2
0
def test_nsmn():
    datafiles, origin = read_data_dir('nsmn', 'us20009ynd')

    # make sure format checker works
    assert is_nsmn(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        raw_streams += read_nsmn(dfile)

    peaks = {'0921': (13.200332, 12.163827, 9.840572),
             '4304': (1.218825, 1.207812, 0.645862),
             '5405': (1.023915, 1.107856, 0.385138)}

    coords = {'0921': (37.87470, 27.59223),
              '4304': (38.99478, 29.40040),
              '5405': (40.79609, 30.73520)}

    for stream in raw_streams:
        cmp_value = peaks[stream[0].stats.station]
        pga1 = np.abs(stream[0].max())
        pga2 = np.abs(stream[1].max())
        pga3 = np.abs(stream[2].max())
        tpl = (pga1, pga2, pga3)
        np.testing.assert_almost_equal(cmp_value, tpl)
        cmp_coords = coords[stream[0].stats.station]
        tpl = (stream[0].stats['coordinates']['latitude'],
               stream[0].stats['coordinates']['longitude'])
        np.testing.assert_almost_equal(cmp_coords, tpl)
コード例 #3
0
    def retrieveData(self, event_dict):
        """Retrieve data from NSMN, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        urlparts = urlparse(SEARCH_URL)
        req = requests.get(event_dict['url'])
        data = req.text
        soup = BeautifulSoup(data, features="lxml")
        table = soup.find_all('table', 'tableType_01')[1]
        datafiles = []
        for row in table.find_all('tr'):
            if 'class' in row.attrs:
                continue
            col = row.find_all('td', 'coltype01')[0]
            href = col.contents[0].attrs['href']
            station_id = col.contents[0].contents[0]
            station_url = urljoin('http://' + urlparts.netloc, href)
            req2 = requests.get(station_url)
            data2 = req2.text
            soup2 = BeautifulSoup(data2, features="lxml")
            center = soup2.find_all('center')[0]
            anchor = center.find_all('a')[0]
            href2 = anchor.attrs['href']
            data_url = urljoin('http://' + urlparts.netloc, href2)
            req3 = requests.get(data_url)
            data = req3.text
            localfile = os.path.join(rawdir, '%s.txt' % station_id)
            logging.info('Downloading Turkish data file %s...' % station_id)
            with open(localfile, 'wt') as f:
                f.write(data)
            datafiles.append(localfile)

        streams = []
        for dfile in datafiles:
            logging.info('Reading datafile %s...' % dfile)
            streams += read_nsmn(dfile)

        if self.rawdir is None:
            shutil.rmtree(rawdir)

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection
コード例 #4
0
    def retrieveData(self, event_dict):
        """Retrieve data from NSMN, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        urlparts = urlparse(SEARCH_URL)
        req = requests.get(event_dict['url'])
        data = req.text
        soup = BeautifulSoup(data, features="lxml")
        table = soup.find_all('table', 'tableType_01')[1]
        datafiles = []
        for row in table.find_all('tr'):
            if 'class' in row.attrs:
                continue
            col = row.find_all('td', 'coltype01')[0]
            href = col.contents[0].attrs['href']
            station_id = col.contents[0].contents[0]
            station_url = urljoin('http://' + urlparts.netloc, href)
            req2 = requests.get(station_url)
            data2 = req2.text
            soup2 = BeautifulSoup(data2, features="lxml")
            center = soup2.find_all('center')[0]
            anchor = center.find_all('a')[0]
            href2 = anchor.attrs['href']
            data_url = urljoin('http://' + urlparts.netloc, href2)
            req3 = requests.get(data_url)
            data = req3.text
            localfile = os.path.join(rawdir, '%s.txt' % station_id)
            logging.info('Downloading Turkish data file %s...' % station_id)
            with open(localfile, 'wt') as f:
                f.write(data)
            datafiles.append(localfile)

        streams = []
        for dfile in datafiles:
            logging.info('Reading datafile %s...' % dfile)
            streams += read_nsmn(dfile)

        if self.rawdir is None:
            shutil.rmtree(rawdir)

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection
コード例 #5
0
    def retrieveData(self, event_dict):
        """Retrieve data from NSMN, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        urlparts = urlparse(SEARCH_URL)
        req = requests.get(event_dict["url"])

        logging.debug("TurkeyFetcher event url: %s", str(event_dict["url"]))
        logging.debug("TurkeyFetcher event response code: %s", req.status_code)

        data = req.text
        soup = BeautifulSoup(data, features="lxml")
        table = soup.find_all("table", "tableType_01")[1]
        datafiles = []
        for row in table.find_all("tr"):
            if "class" in row.attrs:
                continue
            col = row.find_all("td", "coltype01")[0]
            href = col.contents[0].attrs["href"]
            station_id = col.contents[0].contents[0]
            station_url = urljoin("http://" + urlparts.netloc, href)
            req2 = requests.get(station_url)
            logging.debug("TurkeyFetcher station url: %s", str(station_url))
            logging.debug("TurkeyFetcher station response code: %s",
                          req2.status_code)
            data2 = req2.text
            soup2 = BeautifulSoup(data2, features="lxml")
            center = soup2.find_all("center")[0]
            anchor = center.find_all("a")[0]
            href2 = anchor.attrs["href"]
            data_url = urljoin("http://" + urlparts.netloc, href2)
            req3 = requests.get(data_url)
            logging.debug("TurkeyFetcher data url: %s", str(data_url))
            logging.debug("TurkeyFetcher data response code: %s",
                          req3.status_code)
            data = req3.text
            localfile = os.path.join(rawdir, f"{station_id}.txt")
            logging.info(f"Downloading Turkish data file {station_id}...")
            with open(localfile, "wt") as f:
                f.write(data)
            datafiles.append(localfile)

        streams = []
        for dfile in datafiles:
            logging.info(f"Reading datafile {dfile}...")
            streams += read_nsmn(dfile)

        if self.rawdir is None:
            shutil.rmtree(rawdir)

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection