Exemple #1
0
def test_station5():

    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'Calexico'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist_dat.xml')
    dyfifile = os.path.join(datadir, 'ciim3_dat.xml')

    xmlfiles = [inputfile, dyfifile]
    stations1 = StationList.loadFromXML(xmlfiles, ":memory:")
    #
    # Load the data more than once to exercise the code that handles
    # repeated entries.
    #
    xmlfiles = [inputfile, inputfile, dyfifile, dyfifile]
    stations2 = StationList.loadFromXML(xmlfiles, ":memory:")

    df1 = stations1.getStationDictionary(instrumented=True)
    df2 = stations2.getStationDictionary(instrumented=True)

    compare_dataframes(df1, df2)
Exemple #2
0
def test_replace_dyfi():
    stationfile = os.path.join(datadir, 'nepal_dat.xml')
    dyfifile = os.path.join(datadir, 'nepal_dyfi_dat.xml')

    original_stations = StationList.loadFromFiles([stationfile])
    dyfi_stations = StationList.loadFromFiles([dyfifile])
    dcursor = dyfi_stations.cursor
    dcursor.execute('SELECT count(*) from station')
    ndyfi1 = dcursor.fetchone()[0]

    original_cursor = original_stations.cursor
    original_cursor.execute('SELECT count(*) from station')

    original_cursor.execute(
        'SELECT count(*) from station WHERE instrumented=0')
    noriginal_mmi = original_cursor.fetchone()[0]

    original_cursor.execute(
        'SELECT count(*) from station WHERE instrumented=0 and network="DYFI"')
    noriginal_dyfi = original_cursor.fetchone()[0]

    noriginal_observed = noriginal_mmi - noriginal_dyfi

    stations = replace_dyfi(stationfile, dyfifile)

    scursor = stations.cursor
    scursor.execute('SELECT count(*) from station where '
                    'instrumented=0 and network != "DYFI"')
    nobserved = scursor.fetchone()[0]

    assert nobserved == noriginal_observed
    scursor.execute(
        'SELECT count(*) from station where instrumented=0 and network="DYFI"')
    ndyfi = scursor.fetchone()[0]
    assert ndyfi == ndyfi1
Exemple #3
0
def test_station3():

    #
    # Exercise the geojson code.
    #
    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'wenchuan'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist.xml')
    xmlfiles = [inputfile]

    stations = StationList.loadFromFiles(xmlfiles, ":memory:")

    myjson = stations.getGeoJson()

    ofd = tempfile.NamedTemporaryFile(suffix='.json', delete=False)
    jsonfile = ofd.name
    ofd.write(json.dumps(myjson).encode())
    ofd.close()

    stations2 = StationList.loadFromFiles([jsonfile])

    os.unlink(jsonfile)

    df1, _ = stations.getStationDictionary(instrumented=True)
    df2, _ = stations2.getStationDictionary(instrumented=True)
    compare_dataframes(df1, df2)

    df1, _ = stations.getStationDictionary(instrumented=False)
    df2, _ = stations2.getStationDictionary(instrumented=False)
    compare_dataframes(df1, df2)
Exemple #4
0
def test_station5():

    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'Calexico'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist_dat.xml')
    dyfifile = os.path.join(datadir, 'ciim3_dat.xml')

    xmlfiles = [inputfile, dyfifile]
    stations1 = StationList.loadFromXML(xmlfiles, ":memory:")
    #
    # Load the data more than once to exercise the code that handles
    # repeated entries.
    #
    xmlfiles = [inputfile, inputfile, dyfifile, dyfifile]
    stations2 = StationList.loadFromXML(xmlfiles, ":memory:")

    df1, _ = stations1.getStationDictionary(instrumented=True)
    df2, _ = stations2.getStationDictionary(instrumented=True)

    compare_dataframes(df1, df2)
Exemple #5
0
def test_station2():

    #
    # Test the wenchuan data
    #
    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'wenchuan'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist.xml')
    xmlfiles = [inputfile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    df1, _ = stations.getStationDictionary(instrumented=True)
    df2, _ = stations.getStationDictionary(instrumented=False)

    ppath = os.path.abspath(os.path.join(datadir, '..', 'database',
                                         'test3.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, pickle.HIGHEST_PROTOCOL)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)

        #
        # Dump the database to SQL and then restore it to a new
        # StationList object. Compare dataframes.
        #
        sql = stations.dumpToSQL()

        stations2 = StationList.loadFromSQL(sql)

        df1, _ = stations2.getStationDictionary(instrumented=True)
        df2, _ = stations2.getStationDictionary(instrumented=False)

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)
Exemple #6
0
def test_station2():

    #
    # Test the wenchuan data
    #
    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'wenchuan'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist.xml')
    xmlfiles = [inputfile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    df1 = stations.getStationDictionary(instrumented=True)
    df2 = stations.getStationDictionary(instrumented=False)

    ppath = os.path.abspath(os.path.join(datadir, '..', 'database',
                                         'test3.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, pickle.HIGHEST_PROTOCOL)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)

        #
        # Dump the database to SQL and then restore it to a new
        # StationList object. Compare dataframes.
        #
        sql = stations.dumpToSQL()

        stations2 = StationList.loadFromSQL(sql)

        df1 = stations2.getStationDictionary(instrumented=True)
        df2 = stations2.getStationDictionary(instrumented=False)

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)
Exemple #7
0
def replace_dyfi(stationfile, dyfi_xml):
    """Remove any non-instrumented data from station file, add DYFI.

    Args:
        stationfile (str): Existing station data file, presumed to
                           contain old DYFI data.
        dyfi_xml (str): DYFI XML data file, which will be added to
                        instrumented station data.
    Returns:
        StationList: Object containing merged data.

    """
    stations = StationList.loadFromFiles([stationfile])
    # reach into the internal database and find the instrumented stations
    conn = stations.db
    cursor = stations.cursor
    query1 = ('SELECT id from station WHERE instrumented = '
              '0 and (network = "DYFI" or network="CIIM")')
    cursor.execute(query1)
    rows = cursor.fetchall()
    for row in rows:
        sid = row[0]
        query2 = 'DELETE FROM amp WHERE station_id="%s"' % sid
        cursor.execute(query2)
        conn.commit()
        query3 = 'DELETE FROM station where id="%s"' % sid
        cursor.execute(query3)
        conn.commit()

    # now insert the dyfi data
    stations.addData([dyfi_xml])
    return stations
Exemple #8
0
    def setStationData(self, datafiles):
        """
        Insert observed ground motion data into the container.

        Args:
          datafiles (str): Path to XML- or JSON-formatted files containing
              ground motion observations, (macroseismic or instrumented).

        """
        station = StationList.loadFromFiles(datafiles)
        self.setStationList(station)
Exemple #9
0
    def setStationData(self, datafiles):
        """
        Insert observed ground motion data into the container.

        Args:
          datafiles (str): Path to an XML-formatted file containing ground
              motion observations, (macroseismic or instrumented).

        """
        station = StationList.loadFromXML(datafiles)
        self.setStationList(station)
Exemple #10
0
    def getStationList(self):
        """ Retrieve StationList object from container.

        Returns:
            StationList: StationList object.
        Raises:
            AttributeError: If stationlist object has not been set in
                the container.
        """
        if 'stations' not in self.getStrings():
            raise AttributeError('StationList object not set in container.')
        sql_string = self.getString('stations')
        stationlist = StationList.loadFromSQL(sql_string)
        return stationlist
Exemple #11
0
    def getStationList(self):
        """
        Retrieve StationList object from container.

        Returns:
            StationList: StationList object.
        Raises:
            AttributeError: If stationlist object has not been set in
                the container.
        """
        if 'stations' not in self.getStrings():
            raise AttributeError('StationList object not set in container.')
        sql_string = self.getString('stations')
        stationlist = StationList.loadFromSQL(sql_string)
        return stationlist
Exemple #12
0
def test_station4():

    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'northridge'
    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    dyfifile = os.path.join(datadir, 'dyfi_dat.xml')
    xmlfiles = [dyfifile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    df1, _ = stations.getStationDictionary(instrumented=True)  # noqa
    df2, _ = stations.getStationDictionary(instrumented=False)  # noqa
    assert df1 is None
Exemple #13
0
def test_station4():

    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'northridge'
    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    dyfifile = os.path.join(datadir, 'dyfi_dat.xml')
    xmlfiles = [dyfifile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    df1 = stations.getStationDictionary(instrumented=True)  # noqa
    df2 = stations.getStationDictionary(instrumented=False)  # noqa
    assert df1 is None
Exemple #14
0
    def addStationData(self, datafiles):
        """
        Add observed ground motion data into the container.

        Args:
            datafiles (sequence): Sequence of paths to XML- and/or
                JSON-formatted files containing ground motion observations,
                (macroseismic or instrumented).
        """
        if len(datafiles) == 0:
            return
        try:
            station = self.getStationList()
            station.addData(datafiles)
        except AttributeError:
            station = StationList.loadFromFiles(datafiles)
        self.setStationList(station)
Exemple #15
0
def test_station3():

    #
    # Exercise the geojson code. Can't really compare it to anything
    # because the ordering is scrambled by the hashes in the XML
    # parsing stage. Once (if) we institute a loadFromJSON() method, we
    # can do a comparison.
    #
    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'wenchuan'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist.xml')
    xmlfiles = [inputfile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    myjson = stations.getGeoJson()  # noqa
Exemple #16
0
def test_station3():

    #
    # Exercise the geojson code. Can't really compare it to anything
    # because the ordering is scrambled by the hashes in the XML
    # parsing stage. Once (if) we institute a loadFromJSON() method, we
    # can do a comparison.
    #
    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'wenchuan'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist.xml')
    xmlfiles = [inputfile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    myjson = stations.getGeoJson()  # noqa
Exemple #17
0
def test_station():

    homedir = os.path.dirname(os.path.abspath(__file__))

    #
    # First test the Calexico data on its own
    #
    event = 'Calexico'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist_dat.xml')
    dyfifile = os.path.join(datadir, 'ciim3_dat.xml')
    xmlfiles = [inputfile, dyfifile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    df1, _ = stations.getStationDictionary(instrumented=True)
    df2, _ = stations.getStationDictionary(instrumented=False)

    ppath = os.path.abspath(os.path.join(datadir, '..', 'database',
                                         'test1.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, pickle.HIGHEST_PROTOCOL)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)

    #
    # Should at least hit this code
    #
    imtlist = stations.getIMTtypes()
    assert 'PGA' in imtlist
    assert 'PGV' in imtlist

    #
    # Add the Northridge data to the Calexico data to test
    # addData()
    #
    event = 'northridge'
    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'hist_dat.xml')
    dyfifile = os.path.join(datadir, 'dyfi_dat.xml')
    xmlfiles = [inputfile, dyfifile]

    stations = stations.addData(xmlfiles)

    df1, _ = stations.getStationDictionary(instrumented=True)
    df2, _ = stations.getStationDictionary(instrumented=False)

    ppath = os.path.abspath(os.path.join(datadir, '..', 'database',
                                         'test2.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, pickle.HIGHEST_PROTOCOL)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)
Exemple #18
0
def test_station2():

    #
    # Test the wenchuan data
    #
    homedir = os.path.dirname(os.path.abspath(__file__))

    event = 'wenchuan'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist.xml')
    xmlfiles = [inputfile]

    stations = StationList.loadFromFiles(xmlfiles, ":memory:")

    df1, _ = stations.getStationDictionary(instrumented=True)
    # Check Keys pressent
    assert 'PGA' in _
    assert 'PGV' in _
    assert 'SA(0.3)' in _
    assert 'SA(1.0)' in _
    assert 'SA(3.0)' in _
    assert 'PGV_sd' in df1
    assert 'PGV' in df1
    assert 'SA(0.3)' in df1
    assert 'SA(1.0)' in df1
    assert 'SA(3.0)' in df1
    assert 'id' in df1
    df2, _ = stations.getStationDictionary(instrumented=False)
    # Check Keys pressent
    assert 'MMI' in _
    ppath = os.path.abspath(
        os.path.join(datadir, '..', 'database', 'test3.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, protocol=4)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)

        #
        # Dump the database to SQL and then restore it to a new
        # StationList object. Compare dataframes.
        #
        sql = stations.dumpToSQL()

        stations2 = StationList.loadFromSQL(sql)

        df1, _ = stations2.getStationDictionary(instrumented=True)
        df2, _ = stations2.getStationDictionary(instrumented=False)

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)
Exemple #19
0
def test_station():

    homedir = os.path.dirname(os.path.abspath(__file__))

    #
    # First test the Calexico data on its own
    #
    event = 'Calexico'

    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'stationlist_dat.xml')
    dyfifile = os.path.join(datadir, 'ciim3_dat.xml')
    xmlfiles = [inputfile, dyfifile]

    stations = StationList.loadFromXML(xmlfiles, ":memory:")

    df1 = stations.getStationDictionary(instrumented=True)
    df2 = stations.getStationDictionary(instrumented=False)

    ppath = os.path.abspath(os.path.join(datadir, '..', 'database',
                                         'test1.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, pickle.HIGHEST_PROTOCOL)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)

    #
    # Should at least hit this code
    #
    imtlist = stations.getIMTtypes()
    assert 'PGA' in imtlist
    assert 'PGV' in imtlist

    #
    # Add the Northridge data to the Calexico data to test
    # addData()
    #
    event = 'northridge'
    datadir = os.path.abspath(os.path.join(homedir, 'station_data'))
    datadir = os.path.abspath(os.path.join(datadir, event, 'input'))

    inputfile = os.path.join(datadir, 'hist_dat.xml')
    dyfifile = os.path.join(datadir, 'dyfi_dat.xml')
    xmlfiles = [inputfile, dyfifile]

    stations = stations.addData(xmlfiles)

    df1 = stations.getStationDictionary(instrumented=True)
    df2 = stations.getStationDictionary(instrumented=False)

    ppath = os.path.abspath(os.path.join(datadir, '..', 'database',
                                         'test2.pickle'))
    if SAVE:
        ldf = [df1, df2]
        with open(ppath, 'wb') as f:
            pickle.dump(ldf, f, pickle.HIGHEST_PROTOCOL)
    else:
        with open(ppath, 'rb') as f:
            ldf = pickle.load(f)

        saved_df1 = ldf[0]
        saved_df2 = ldf[1]

        compare_dataframes(saved_df1, df1)
        compare_dataframes(saved_df2, df2)