Exemple #1
0
def _get_dyfi_dataframe(detail_or_url, inputfile=None):

    if inputfile:
        with open(inputfile, 'rb') as f:
            rawdata = f.read()
        if 'json' in inputfile:
            df = _parse_geocoded_json(rawdata)
        else:
            df = _parse_geocoded_csv(rawdata)
        if df is None:
            msg = 'Could not read file %s' % inputfile

    else:
        if isinstance(detail_or_url, str):
            detail = DetailEvent(detail_or_url)
        else:
            detail = detail_or_url

        df, msg = _parse_dyfi_detail(detail)

    if df is None:
        return None, msg

    df['netid'] = 'DYFI'
    df['source'] = "USGS (Did You Feel It?)"
    df.columns = df.columns.str.upper()

    return (df, '')
Exemple #2
0
def _get_dyfi_dataframe(detail_or_url, inputfile=None):

    if inputfile:
        with open(inputfile, "rb") as f:
            rawdata = f.read()
        if "json" in inputfile:
            df = _parse_geocoded_json(rawdata)
        else:
            df = _parse_geocoded_csv(rawdata)
        if df is None:
            msg = "Could not read file %s" % inputfile

    else:
        if isinstance(detail_or_url, str):
            detail = DetailEvent(detail_or_url)
        else:
            detail = detail_or_url

        df, msg = _parse_dyfi_detail(detail)

    if df is None:
        return None, msg

    df["netid"] = "DYFI"
    df["source"] = "USGS (Did You Feel It?)"
    df.columns = df.columns.str.upper()

    return (df, "")
Exemple #3
0
def _search(**newargs):
    if 'starttime' in newargs:
        newargs['starttime'] = newargs['starttime'].strftime(TIMEFMT)
    if 'endtime' in newargs:
        newargs['endtime'] = newargs['endtime'].strftime(TIMEFMT)
    if 'updatedafter' in newargs:
        newargs['updatedafter'] = newargs['updatedafter'].strftime(TIMEFMT)
    if 'scenario' in newargs and newargs['scenario'] == 'true':
        template = SCENARIO_SEARCH_TEMPLATE
        template = template.replace('[HOST]', HOST)
        del newargs['scenario']
    else:
        if 'scenario' in newargs:
            del newargs['scenario']
        if 'host' in newargs and newargs['host'] is not None:
            template = SEARCH_TEMPLATE.replace('[HOST]', newargs['host'])
            del newargs['host']
        else:
            template = SEARCH_TEMPLATE.replace('[HOST]', HOST)

    paramstr = urlencode(newargs)
    url = template + '&' + paramstr
    events = []
    # handle the case when they're asking for an event id
    if 'eventid' in newargs:
        return DetailEvent(url)

    try:
        response = requests.get(url, timeout=TIMEOUT, headers=HEADERS)
        jdict = response.json()
        events = []
        for feature in jdict['features']:
            events.append(SummaryEvent(feature))
    except requests.HTTPError as htpe:
        if htpe.code == 503:
            try:
                time.sleep(WAITSECS)
                response = requests.get(url, timeout=TIMEOUT, headers=HEADERS)
                jdict = response.json()
                events = []
                for feature in jdict['features']:
                    events.append(SummaryEvent(feature))
            except Exception as msg:
                fmt = 'Error downloading data from url %s.  "%s".'
                raise ConnectionError(fmt % (url, msg))
    except Exception as msg:
        fmt = 'Error downloading data from url %s.  "%s".'
        raise ConnectionError(fmt % (url, msg))

    return events
Exemple #4
0
def _search(**newargs):
    if 'starttime' in newargs:
        newargs['starttime'] = newargs['starttime'].strftime(TIMEFMT)
    if 'endtime' in newargs:
        newargs['endtime'] = newargs['endtime'].strftime(TIMEFMT)
    if 'updatedafter' in newargs:
        newargs['updatedafter'] = newargs['updatedafter'].strftime(TIMEFMT)
    if 'host' in newargs and newargs['host'] is not None:
        template = SEARCH_TEMPLATE.replace('[HOST]', newargs['host'])
        del newargs['host']
    else:
        template = SEARCH_TEMPLATE.replace('[HOST]', HOST)

    paramstr = urlencode(newargs)
    url = template + '&' + paramstr
    events = []
    # handle the case when they're asking for an event id
    if 'eventid' in newargs:
        return DetailEvent(url)

    try:
        fh = request.urlopen(url, timeout=TIMEOUT)
        data = fh.read().decode('utf8')
        fh.close()
        jdict = json.loads(data)
        events = []
        for feature in jdict['features']:
            events.append(SummaryEvent(feature))
    except HTTPError as htpe:
        if htpe.code == 503:
            try:
                time.sleep(WAITSECS)
                fh = request.urlopen(url, timeout=TIMEOUT)
                data = fh.read().decode('utf8')
                fh.close()
                jdict = json.loads(data)
                events = []
                for feature in jdict['features']:
                    events.append(SummaryEvent(feature))
            except Exception as msg:
                raise Exception(
                    'Error downloading data from url %s.  "%s".' % (url, msg))
    except Exception as msg:
        raise Exception(
            'Error downloading data from url %s.  "%s".' % (url, msg))

    return events
Exemple #5
0
def get_dyfi_dataframe_from_comcat(self, extid):
    df = None
    msg = ''

    if isinstance(extid, DetailEvent):
        detail = extid
    else:
        config = self.config['neic']
        template = config['template']
        url = template.replace('[EID]', extid)
        print('Checking URL:', url)
        detail = DetailEvent(url)

    if detail is None:
        msg = 'Error getting data from Comcat'
        return None, msg

    df, msg = _parse_dyfi_detail(detail)
    if df is None:
        msg = msg or 'Error parsing Comcat data'
        return None, msg

    return df, None
Exemple #6
0
def test_detail():
    eventid = 'ci3144585'  #northridge
    url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/detail/%s.geojson' % eventid
    event = DetailEvent(url)
    assert str(
        event
    ) == 'ci3144585 1994-01-17 12:30:55.390000 (34.213,-118.537) 18.2 km M6.7'
    assert event.hasProduct('shakemap')
    assert event.hasProduct('foo') == False
    assert event.hasProperty('foo') == False
    assert event.hasProperty('time')
    try:
        event['foo']
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getNumVersions('foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getProducts('foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getProducts('shakemap', source='foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    assert event.toDict()['magnitude'] == 6.7

    eventid = 'nc72282711'  #Napa 2014 eq, multiple origins and MTs.
    # cievent = get_event_by_id(eventid,catalog='ci')
    # usevent = get_event_by_id(eventid,catalog='us')
    # atevent = get_event_by_id(eventid,catalog='at')
    event = get_event_by_id(eventid)

    phases = event.getProducts('phase-data', source='all')

    ncdict = event.toDict(catalog='nc')
    usdict = event.toDict(catalog='us')
    atdict = event.toDict(catalog='at')

    try:
        event.toDict(catalog='foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    assert ncdict['depth'] == 11.12
    assert usdict['depth'] == 11.25
    assert atdict['depth'] == 9.0

    ncdict_allmags = event.toDict(get_all_magnitudes=True)
    assert ncdict_allmags['magtype3'] == 'Ml'

    ncdict_alltensors = event.toDict(get_tensors='all')
    assert ncdict_alltensors['us_Mwb_mrr'] == 7.63e+16
    ncdict_allfocals = event.toDict(get_focals='all')
    assert ncdict_allfocals['nc_np1_strike'] == '345.0'

    assert event.getNumVersions('shakemap') > 0
    assert isinstance(event.getProducts('shakemap')[0], Product)
    assert event.latitude == 38.2151667
    assert event.longitude == -122.3123333
    assert event.depth == 11.12
    assert event.id == eventid
    assert event.time == datetime(2014, 8, 24, 10, 20, 44, 70000)
    assert 'sources' in event.properties
    assert event['mag'] == 6.02

    #test all of the different functionality of the getProducts() method
    #first, test default behavior (get the most preferred product):
    event = get_event_by_id('nc21323712',
                            includesuperseded=True)  #2003 Central California
    pref_shakemap = event.getProducts('shakemap')[0]
    assert pref_shakemap.source == 'atlas'
    assert pref_shakemap.update_time >= datetime(2017, 4, 12, 10, 50, 9,
                                                 368000)
    assert pref_shakemap.preferred_weight >= 100000000

    #get the first Atlas shakemap
    first_shakemap = event.getProducts('shakemap',
                                       version=VersionOption.FIRST,
                                       source='atlas')[0]
    assert first_shakemap.source == 'atlas'
    assert first_shakemap.update_time >= datetime(2015, 2, 4, 6, 1, 33, 400000)
    assert first_shakemap.preferred_weight >= 81

    #get the first nc shakemap
    first_shakemap = event.getProducts('shakemap',
                                       version=VersionOption.FIRST,
                                       source='nc')[0]
    assert first_shakemap.source == 'nc'
    assert first_shakemap.update_time >= datetime(2017, 3, 8, 20, 12, 59,
                                                  380000)
    assert first_shakemap.preferred_weight >= 231

    #get the last version of the nc shakemaps
    last_shakemap = event.getProducts('shakemap',
                                      version=VersionOption.LAST,
                                      source='nc')[0]
    assert last_shakemap.source == 'nc'
    assert last_shakemap.update_time >= datetime(2017, 3, 17, 17, 40, 26,
                                                 576000)
    assert last_shakemap.preferred_weight >= 231

    #get all the nc versions of the shakemap
    shakemaps = event.getProducts('shakemap',
                                  version=VersionOption.ALL,
                                  source='nc')
    for shakemap4 in shakemaps:
        assert shakemap4.source == 'nc'

    #get all versions of all shakemaps
    shakemaps = event.getProducts('shakemap',
                                  version=VersionOption.ALL,
                                  source='all')
    assert event.getNumVersions('shakemap') == len(shakemaps)
def test_detail():
    eventid = 'ci3144585' #northridge
    url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/detail/%s.geojson' % eventid
    event = DetailEvent(url)
    assert str(event) == 'ci3144585 1994-01-17 12:30:55.390000 (34.213,-118.537) 18.2 km M6.7'
    assert event.hasProduct('shakemap')
    assert event.hasProduct('foo') == False
    assert event.hasProperty('foo') == False
    assert event.hasProperty('time')
    try:
        event['foo']
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getNumVersions('foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getProducts('foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getProducts('shakemap',source='foo')
        assert 1 == 2
    except AttributeError as ae:
        pass
    
    assert event.toDict()['magnitude'] == 6.7

    eventid = 'nc72282711' #Napa 2014 eq, multiple origins and MTs.
    # cievent = get_event_by_id(eventid,catalog='ci')
    # usevent = get_event_by_id(eventid,catalog='us')
    # atevent = get_event_by_id(eventid,catalog='at')
    event = get_event_by_id(eventid)

    phases = event.getProducts('phase-data',source='all')
    
    ncdict = event.toDict(catalog='nc')
    usdict = event.toDict(catalog='us')
    atdict = event.toDict(catalog='at')

    try:
        event.toDict(catalog='foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    assert ncdict['depth'] == 11.12
    assert usdict['depth'] == 11.25
    assert atdict['depth'] == 9.0

    ncdict_allmags = event.toDict(get_all_magnitudes=True)
    assert ncdict_allmags['magtype3'] == 'Ml'

    ncdict_alltensors = event.toDict(get_tensors='all')
    assert ncdict_alltensors['us_Mwb_mrr'] == 7.63e+16
    ncdict_allfocals = event.toDict(get_focals='all')
    assert ncdict_allfocals['nc_np1_strike'] == '345.0'

    assert event.getNumVersions('shakemap') > 0
    assert isinstance(event.getProducts('shakemap')[0],Product)
    assert event.latitude == 38.2151667
    assert event.longitude == -122.3123333
    assert event.depth == 11.12
    assert event.id == eventid
    assert event.time == datetime(2014, 8, 24, 10, 20, 44, 70000)
    assert 'sources' in event.properties
    assert event['mag'] == 6.02

    #test all of the different functionality of the getProducts() method
    #first, test default behavior (get the most preferred product):
    event = get_event_by_id('nc21323712',includesuperseded=True) #2003 Central California
    pref_shakemap = event.getProducts('shakemap')[0]
    assert pref_shakemap.source == 'atlas'
    assert pref_shakemap.update_time >= datetime(2017, 4, 12, 10, 50, 9, 368000)
    assert pref_shakemap.preferred_weight >= 100000000

    #get the first Atlas shakemap
    first_shakemap = event.getProducts('shakemap',version=VersionOption.FIRST,source='atlas')[0]
    assert first_shakemap.source == 'atlas'
    assert first_shakemap.update_time >= datetime(2015, 2, 4, 6, 1, 33, 400000)
    assert first_shakemap.preferred_weight >= 81

    #get the first nc shakemap
    first_shakemap = event.getProducts('shakemap',version=VersionOption.FIRST,source='nc')[0]
    assert first_shakemap.source == 'nc'
    assert first_shakemap.update_time >= datetime(2017, 3, 8, 20, 12, 59, 380000)
    assert first_shakemap.preferred_weight >= 231
    
    #get the last version of the nc shakemaps
    last_shakemap = event.getProducts('shakemap',version=VersionOption.LAST,source='nc')[0]
    assert last_shakemap.source == 'nc'
    assert last_shakemap.update_time >= datetime(2017, 3, 17, 17, 40, 26, 576000)
    assert last_shakemap.preferred_weight >= 231

    #get all the nc versions of the shakemap
    shakemaps = event.getProducts('shakemap',version=VersionOption.ALL,source='nc')
    for shakemap4 in shakemaps:
        assert shakemap4.source == 'nc'

    #get all versions of all shakemaps
    shakemaps = event.getProducts('shakemap',version=VersionOption.ALL,source='all')
    assert event.getNumVersions('shakemap') == len(shakemaps)
Exemple #8
0
def _get_dyfi_dataframe(detail_or_url):
    if isinstance(detail_or_url, str):
        detail = DetailEvent(detail_or_url)
    else:
        detail = detail_or_url

    if not detail.hasProduct('dyfi'):
        msg = '%s has no DYFI product at this time.' % detail.url
        dataframe = None
        return (dataframe, msg)

    dyfi = detail.getProducts('dyfi')[0]

    # search the dyfi product, see which of the geocoded
    # files (1km or 10km) it has.  We're going to select the data from
    # whichever of the two has more entries with >= 3 responses,
    # preferring 1km if there is a tie.
    df_10k = pd.DataFrame({'a': []})
    df_1k = pd.DataFrame({'a': []})

    # get 10km data set, if exists
    if len(dyfi.getContentsMatching('dyfi_geo_10km.geojson')):
        bytes_10k, _ = dyfi.getContentBytes('dyfi_geo_10km.geojson')
        tmp_df = _parse_geocoded(bytes_10k)
        if tmp_df is not None:
            df_10k = tmp_df[tmp_df['nresp'] >= MIN_RESPONSES]

    # get 1km data set, if exists
    if len(dyfi.getContentsMatching('dyfi_geo_1km.geojson')):
        bytes_1k, _ = dyfi.getContentBytes('dyfi_geo_1km.geojson')
        tmp_df = _parse_geocoded(bytes_1k)
        if tmp_df is not None:
            df_1k = tmp_df[tmp_df['nresp'] >= MIN_RESPONSES]

    if len(df_1k) >= len(df_10k):
        df = df_1k
    else:
        df = df_10k

    if not len(df):
        # try to get a text file data set
        if not len(dyfi.getContentsMatching('cdi_geo.txt')):
            return (None, 'No geocoded datasets are available for this event.')

        # the dataframe we want has columns:
        # 'intensity', 'distance', 'lat', 'lon', 'station', 'nresp'
        # the cdi geo file has:
        # Geocoded box, CDI, No. of responses, Hypocentral distance,
        # Latitude, Longitude, Suspect?, City, State

        # download the text file, turn it into a dataframe
        bytes_geo, _ = dyfi.getContentBytes('cdi_geo.txt')
        text_geo = bytes_geo.decode('utf-8')
        lines = text_geo.split('\n')
        columns = lines[0].split(':')[1].split(',')
        columns = [col.strip() for col in columns]
        fileio = StringIO(text_geo)
        df = pd.read_csv(fileio, skiprows=1, names=columns)
        if 'ZIP/Location' in columns:
            df = df.rename(index=str, columns=OLD_DYFI_COLUMNS_REPLACE)
        else:
            df = df.rename(index=str, columns=DYFI_COLUMNS_REPLACE)
        df = df.drop(['Suspect?', 'City', 'State'], axis=1)
        df = df[df['nresp'] >= MIN_RESPONSES]

    df['netid'] = 'DYFI'
    df['source'] = "USGS (Did You Feel It?)"
    df.columns = df.columns.str.upper()
    return (df, '')
Exemple #9
0
def test_detail():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'classes_detailevent.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'ci3144585'  # northridge
        fmt = ('https://earthquake.usgs.gov/earthquakes/feed/v1.0/'
               'detail/%s.geojson')
        url = fmt % eventid
        event = DetailEvent(url)
        assert str(event) == ('ci3144585 1994-01-17 12:30:55.390000 '
                              '(34.213,-118.537) 18.2 km M6.7')
        assert event.hasProduct('shakemap')
        assert event.hasProduct('foo') is False
        assert event.hasProperty('foo') is False
        assert event.hasProperty('time')
        try:
            event['foo']
            assert 1 == 2
        except AttributeError:
            pass

        try:
            event.getNumVersions('foo')
            assert 1 == 2
        except ProductNotFoundError:
            pass

        try:
            event.getProducts('foo')
            assert 1 == 2
        except ProductNotFoundError:
            pass

        try:
            event.getProducts('shakemap', source='foo')
            assert 1 == 2
        except ProductNotFoundError:
            pass

        assert event.toDict()['magnitude'] == 6.7

        eventid = 'nc72282711'  # Napa 2014 eq, multiple origins and MTs.
        # cievent = get_event_by_id(eventid,catalog='ci')
        # usevent = get_event_by_id(eventid,catalog='us')
        # atevent = get_event_by_id(eventid,catalog='at')
        event = get_event_by_id(eventid)

        # smoke test
        _ = event.getProducts('phase-data', source='all')

        ncdict = event.toDict(catalog='nc')
        usdict = event.toDict(catalog='us')
        atdict = event.toDict(catalog='at')

        try:
            event.toDict(catalog='foo')
            assert 1 == 2
        except ProductNotFoundError:
            pass

        assert ncdict['depth'] == 11.12
        assert usdict['depth'] == 11.25
        assert atdict['depth'] == 9.0

        ncdict_allmags = event.toDict(get_all_magnitudes=True)
        allmags = []
        for key, value in ncdict_allmags.items():
            if key.startswith('magtype'):
                allmags.append(value)
        cmpmags = [
            'Md', 'Ml', 'Ms_20', 'Mw', 'Mwb', 'Mwc', 'Mwr', 'Mww', 'mb', 'mw'
        ]
        assert sorted(allmags) == sorted(cmpmags)

        ncdict_alltensors = event.toDict(get_tensors='all')
        assert ncdict_alltensors['us_Mwb_mrr'] == 7.63e+16
        ncdict_allfocals = event.toDict(get_focals='all')
        assert ncdict_allfocals['nc_np1_strike'] == 345.0

        assert event.getNumVersions('shakemap') > 0
        assert isinstance(event.getProducts('shakemap')[0], Product)
        assert event.latitude == 38.2151667
        assert event.longitude == -122.3123333
        assert event.depth == 11.12
        assert event.id == eventid
        assert event.time == datetime(2014, 8, 24, 10, 20, 44, 70000)
        assert 'sources' in event.properties
        assert event['mag'] == 6.02

        # test all of the different functionality of the getProducts() method
        # first, test default behavior (get the most preferred product):
        # 2003 Central California
        event = get_event_by_id('nc21323712', includesuperseded=True)
        pref_shakemap = event.getProducts('shakemap')[0]
        assert pref_shakemap.source == 'atlas'
        assert pref_shakemap.update_time >= datetime(2017, 4, 12, 10, 50, 9,
                                                     368000)
        assert pref_shakemap.preferred_weight >= 100000000

        # get the first Atlas shakemap
        first_shakemap = event.getProducts('shakemap',
                                           version='first',
                                           source='atlas')[0]
        assert first_shakemap.source == 'atlas'
        assert first_shakemap.update_time >= datetime(2015, 2, 4, 6, 1, 33,
                                                      400000)
        assert first_shakemap.preferred_weight >= 81

        # get the first nc shakemap
        first_shakemap = event.getProducts('shakemap',
                                           version='first',
                                           source='nc')[0]
        assert first_shakemap.source == 'nc'
        assert first_shakemap.update_time >= datetime(2017, 3, 8, 20, 12, 59,
                                                      380000)
        assert first_shakemap.preferred_weight >= 231

        # get the last version of the nc shakemaps
        last_shakemap = event.getProducts('shakemap',
                                          version='last',
                                          source='nc')[0]
        assert last_shakemap.source == 'nc'
        assert last_shakemap.update_time >= datetime(2017, 3, 17, 17, 40, 26,
                                                     576000)
        assert last_shakemap.preferred_weight >= 231

        # get all the nc versions of the shakemap
        shakemaps = event.getProducts('shakemap', version='all', source='nc')
        for shakemap4 in shakemaps:
            assert shakemap4.source == 'nc'

        # get all versions of all shakemaps
        shakemaps = event.getProducts('shakemap', version='all', source='all')
        assert event.getNumVersions('shakemap') == len(shakemaps)
Exemple #10
0
def test_detail():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "classes_detailevent.yaml")
    with vcr.use_cassette(tape_file):
        eventid = "ci3144585"  # northridge
        fmt = "https://earthquake.usgs.gov/earthquakes/feed/v1.0/" "detail/%s.geojson"
        url = fmt % eventid
        event = DetailEvent(url)
        assert str(event) == ("ci3144585 1994-01-17 12:30:55.390000 "
                              "(34.213,-118.537) 18.2 km M6.7")
        assert event.hasProduct("shakemap")
        assert event.hasProduct("foo") is False
        assert event.hasProperty("foo") is False
        assert event.hasProperty("time")
        try:
            event["foo"]
            assert 1 == 2
        except AttributeError:
            pass

        try:
            event.getNumVersions("foo")
            assert 1 == 2
        except ProductNotFoundError:
            pass

        try:
            event.getProducts("foo")
            assert 1 == 2
        except ProductNotFoundError:
            pass

        try:
            event.getProducts("shakemap", source="foo")
            assert 1 == 2
        except ProductNotFoundError:
            pass

        assert event.toDict()["magnitude"] == 6.7

        eventid = "nc72282711"  # Napa 2014 eq, multiple origins and MTs.
        # cievent = get_event_by_id(eventid,catalog='ci')
        # usevent = get_event_by_id(eventid,catalog='us')
        # atevent = get_event_by_id(eventid,catalog='at')
        event = get_event_by_id(eventid)

        # smoke test
        _ = event.getProducts("phase-data", source="all")

        ncdict = event.toDict(catalog="nc")
        usdict = event.toDict(catalog="us")
        atdict = event.toDict(catalog="at")

        try:
            event.toDict(catalog="foo")
            assert 1 == 2
        except ProductNotFoundError:
            pass

        assert ncdict["depth"] == 11.12
        assert usdict["depth"] == 11.25
        assert atdict["depth"] == 9.0

        ncdict_allmags = event.toDict(get_all_magnitudes=True)
        allmags = []
        for key, value in ncdict_allmags.items():
            if key.startswith("magtype"):
                allmags.append(value)
        cmpmags = [
            "Md", "Ml", "Ms_20", "Mw", "Mwb", "Mwc", "Mwr", "Mww", "mb", "mw"
        ]
        assert sorted(allmags) == sorted(cmpmags)

        ncdict_alltensors = event.toDict(get_tensors="all")
        assert ncdict_alltensors["us_Mwb_mrr"] == 7.63e16
        ncdict_allfocals = event.toDict(get_focals="all")
        assert ncdict_allfocals["nc_np1_strike"] == 345.0

        assert event.getNumVersions("shakemap") > 0
        assert isinstance(event.getProducts("shakemap")[0], Product)
        assert event.latitude == 38.2151667
        assert event.longitude == -122.3123333
        assert event.depth == 11.12
        assert event.id == eventid
        assert event.time == datetime(2014, 8, 24, 10, 20, 44, 70000)
        assert "sources" in event.properties
        assert event["mag"] == 6.02

        # test all of the different functionality of the getProducts() method
        # first, test default behavior (get the most preferred product):
        # 2003 Central California
        event = get_event_by_id("nc21323712", includesuperseded=True)
        pref_shakemap = event.getProducts("shakemap")[0]
        assert pref_shakemap.source == "atlas"
        assert pref_shakemap.update_time >= datetime(2017, 4, 12, 10, 50, 9,
                                                     368000)
        assert pref_shakemap.preferred_weight >= 100000000

        # get the first Atlas shakemap
        first_shakemap = event.getProducts("shakemap",
                                           version="first",
                                           source="atlas")[0]
        assert first_shakemap.source == "atlas"
        assert first_shakemap.update_time >= datetime(2015, 2, 4, 6, 1, 33,
                                                      400000)
        assert first_shakemap.preferred_weight >= 81

        # get the first nc shakemap
        first_shakemap = event.getProducts("shakemap",
                                           version="first",
                                           source="nc")[0]
        assert first_shakemap.source == "nc"
        assert first_shakemap.update_time >= datetime(2017, 3, 8, 20, 12, 59,
                                                      380000)
        assert first_shakemap.preferred_weight >= 231

        # get the last version of the nc shakemaps
        last_shakemap = event.getProducts("shakemap",
                                          version="last",
                                          source="nc")[0]
        assert last_shakemap.source == "nc"
        assert last_shakemap.update_time >= datetime(2017, 3, 17, 17, 40, 26,
                                                     576000)
        assert last_shakemap.preferred_weight >= 231

        # get all the nc versions of the shakemap
        shakemaps = event.getProducts("shakemap", version="all", source="nc")
        for shakemap4 in shakemaps:
            assert shakemap4.source == "nc"

        # get all versions of all shakemaps
        shakemaps = event.getProducts("shakemap", version="all", source="all")
        assert event.getNumVersions("shakemap") == len(shakemaps)