Exemplo n.º 1
0
 def getEventsBeforeDate(self, beforedate):
     events = []
     event_folders = self.getAllEventFolders()
     for event_folder in event_folders:
         version_folder = self.getLastVersion(event_folder)
         json_folder = os.path.join(version_folder,'json')
         pdata = PagerData()
         pdata.loadFromJSON(json_folder)
         if pdata.time < beforedate:
             events.append(pdata.id)
     return events
Exemplo n.º 2
0
    def query(self,start_time=datetime.datetime(1800,1,1),end_time=datetime.datetime.utcnow(),
              mag_threshold=0.0,alert_threshold='green',version='last',eventid=None):
        levels = {'green':0,
                  'yellow':1,
                  'orange':2,
                  'red':3}
        if eventid is not None:
            all_event_folders = [self.getEventFolder(eventid)]
            version = 'all'
        else:
            all_event_folders = self.getAllEventFolders()
        event_data = []
        do_process_time = False
        if eventid is not None:
            do_process_time = True
        
        df = pd.DataFrame(columns=PagerData.getSeriesColumns(processtime=do_process_time))
        jsonfolders = []
        for event_folder in all_event_folders:
            vnums = self.getVersionNumbers(event_folder)
            if version == 'first':
                vnum = vnums[0]
                jsonfolders.append(os.path.join(event_folder,'version.%03d' % vnum,'json'))
            elif version == 'last':
                vnum = vnums[-1]
                jsonfolders.append(os.path.join(event_folder,'version.%03d' % vnum,'json'))
            elif version == 'eight':
                for vnum in vnums:
                    jsonfolder = os.path.join(event_folder,'version.%03d' % vnum,'json')
                    pdata = PagerData()
                    try:
                        pdata.loadFromJSON(jsonfolder)
                    except:
                        continue
                    if pdata.processing_time >= pdata.time + datetime.timedelta(seconds=EIGHT_HOURS):
                        break
                    jsonfolders.append(jsonfolder)
            elif version == 'all':
                for vnum in vnums:
                    jsonfolder = os.path.join(event_folder,'version.%03d' % vnum,'json')
                    jsonfolders.append(jsonfolder)
            else:
                raise PagerException('version option "%s" not supported.' % version)

        broken = []
        for jsonfolder in jsonfolders:
            pdata = PagerData()
            vnum = 1000
            while vnum > 1:
                try:
                    pdata.loadFromJSON(jsonfolder)
                    vnum = 0
                except:
                    #handle the case where the most recent version of the event has some 
                    #sort of error causing it to miss
                    root,jsonfolder = os.path.split(jsonfolder)
                    root2,vfolder = os.path.split(root)
                    vt,vnums = vfolder.split('.')
                    vnum = int(vnums) - 1
                    jsonfolder = os.path.join(root2,'%s.%03d' % (vt,vnum),'json')
                
            if not pdata._is_validated:
                broken.append(jsonfolder)
            meetsLevel = levels[pdata.summary_alert] >= levels[alert_threshold]
            meetsMag = pdata.magnitude >= mag_threshold
            if pdata.time >= start_time and pdata.time <= end_time and meetsLevel and meetsMag:
                row = pdata.toSeries(processtime=do_process_time)
                df = df.append(row,ignore_index=True)
        df.Version = df.Version.astype(int)
        df.Elapsed = df.Elapsed.astype(int)
        df = df.sort_values('EventTime')
        df = df.set_index('EventID')
        return (df,broken)
Exemplo n.º 3
0
def test():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml')
    ecofile = os.path.join(homedir, '..', 'data', 'economy.xml')
    cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt')
    event = 'northridge'
    shakefile = os.path.join(homedir, '..', 'data',
                             'eventdata', event, '%s_grid.xml' % event)
    popfile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_gpw.flt' % event)
    isofile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_isogrid.bil' % event)
    urbanfile = os.path.join(homedir, '..', 'data',
                             'eventdata', 'northridge', 'northridge_urban.bil')
    oceanfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json')
    oceangridfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil')
    timezonefile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp')

    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')

    tdir = tempfile.mkdtemp()
    basename = os.path.join(tdir, 'output')

    exp = Exposure(popfile, 2012, isofile)
    results = exp.calcExposure(shakefile)
    shakegrid = exp.getShakeGrid()
    popgrid = exp.getPopulationGrid()

    pdffile, pngfile, mapcities = draw_contour(
        shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename)
    shutil.rmtree(tdir)

    popyear = 2012

    shake_tuple = getHeaderData(shakefile)
    tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH

    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbanfile, isofile)
    semiloss, resfat, nonresfat = semi.getLosses(shakefile)

    popgrowth = PopulationGrowth.fromDefault()
    econexp = EconExposure(popfile, 2012, isofile)
    fatmodel = EmpiricalLoss.fromDefaultFatality()
    expobject = Exposure(popfile, 2012, isofile, popgrowth)

    expdict = expobject.calcExposure(shakefile)
    fatdict = fatmodel.getLosses(expdict)
    econexpdict = econexp.calcExposure(shakefile)
    ecomodel = EmpiricalLoss.fromDefaultEconomic()
    ecodict = ecomodel.getLosses(expdict)
    shakegrid = econexp.getShakeGrid()
    pagerversion = 1
    cities = Cities.loadFromGeoNames(cityfile)
    impact1 = '''Red alert level for economic losses. Extensive damage is probable 
    and the disaster is likely widespread. Estimated economic losses are less 
    than 1% of GDP of Italy. Past events with this alert level have required 
    a national or international level response.'''
    impact2 = '''Orange alert level for shaking-related fatalities. Significant 
    casualties are likely.'''
    structcomment = '''Overall, the population in this region resides in structures 
    that are a mix of vulnerable and earthquake resistant construction. The predominant 
    vulnerable building types are unreinforced brick with mud and mid-rise nonductile 
    concrete frame with infill construction.'''
    histeq = [1, 2, 3]
    struct_comment = '''Overall, the population in this region resides
    in structures that are resistant to earthquake
    shaking, though some vulnerable structures
    exist.'''
    secondary_comment = '''Recent earthquakes in this area have caused secondary hazards 
    such as landslides that might have contributed to losses.'''
    hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador 
    on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 
    at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n', '')

    location = 'At the top of the world.'
    is_released = True

    doc = PagerData()
    eventcode = shakegrid.getEventDict()['event_id']
    versioncode = eventcode
    doc.setInputs(shakegrid, timezonefile, pagerversion,
                  versioncode, eventcode, tsunami, location, is_released)
    doc.setExposure(expdict, econexpdict)
    doc.setModelResults(fatmodel, ecomodel,
                        fatdict, ecodict,
                        semiloss, resfat, nonresfat)
    doc.setComments(impact1, impact2, struct_comment,
                    hist_comment, secondary_comment)
    doc.setMapInfo(cityfile, mapcities)
    doc.validate()

    # let's test the property methods
    tdoc(doc, shakegrid, impact1, impact2,
         expdict, struct_comment, hist_comment)

    # see if we can save this to a bunch of files then read them back in
    try:
        tdir = tempfile.mkdtemp()
        doc.saveToJSON(tdir)
        newdoc = PagerData()
        newdoc.loadFromJSON(tdir)
        tdoc(newdoc, shakegrid, impact1, impact2,
             expdict, struct_comment, hist_comment)

        # test the xml saving method
        xmlfile = doc.saveToLegacyXML(tdir)
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 4
0
    def query(self, start_time=datetime.datetime(1800, 1, 1), end_time=datetime.datetime.utcnow(),
              mag_threshold=0.0, alert_threshold='green', version='last', eventid=None):
        """Query PAGER file for events matching input parameters.

        :param start_time:
          Datetime indicating the minimum date/time for the search.
        :param end_time:
          Datetime indicating the maximum date/time for the search.
        :param mag_thresh:
          Minimum magnitude threshold.
        :param alert_threshold:
          Minimum alert level threshold ('green','yellow','orange','red').
        :param version:
          Which version(s) to select from events: 
            - 'all' Get all versions.
            - 'last' Get last version.
            - 'eight' Get first version that was created more than 8 hours after origin time.
        :param eventid:
          Return version(s) for specific event ID.
        :returns:
          Pandas dataframe containing columns:
            - 'EventID' - event ID
            - 'Impacted Country ($)' Country with largest dollar losses.
            - 'Version' - Version number
            - 'EventTime' - Origin Time
            - 'Lat' - Origin latitude.
            - 'Lon' - Origin longitude.
            - 'Depth' - Origin depth.
            - 'Mag' - Event magnitude.
            - 'MaxMMI' - Maximum MMI value (felt by at least 1000 people)
            - 'FatalityAlert' - Fatality alert level ('green','yellow','orange','red')
            - 'EconomicAlert' - Economic alert level ('green','yellow','orange','red')
            - 'SummaryAlert' - Summary alert level ('green','yellow','orange','red')
            - 'Elapsed' - Elapsed time (minutes) between origin time and version.
        """
        levels = {'green': 0,
                  'yellow': 1,
                  'orange': 2,
                  'red': 3}
        if eventid is not None:
            all_event_folders = [self.getEventFolder(eventid)]
            version = 'all'
        else:
            all_event_folders = self.getAllEventFolders()
        event_data = []
        do_process_time = False
        if eventid is not None:
            do_process_time = True

        df = pd.DataFrame(columns=PagerData.getSeriesColumns(
            processtime=do_process_time))
        jsonfolders = []
        for event_folder in all_event_folders:
            vnums = self.getVersionNumbers(event_folder)
            if version == 'first':
                vnum = vnums[0]
                jsonfolders.append(os.path.join(
                    event_folder, 'version.%03d' % vnum, 'json'))
            elif version == 'last':
                vnum = vnums[-1]
                jsonfolders.append(os.path.join(
                    event_folder, 'version.%03d' % vnum, 'json'))
            elif version == 'eight':
                for vnum in vnums:
                    jsonfolder = os.path.join(
                        event_folder, 'version.%03d' % vnum, 'json')
                    pdata = PagerData()
                    try:
                        pdata.loadFromJSON(jsonfolder)
                    except:
                        continue
                    if pdata.processing_time >= pdata.time + datetime.timedelta(seconds=EIGHT_HOURS):
                        break
                    jsonfolders.append(jsonfolder)
            elif version == 'all':
                for vnum in vnums:
                    jsonfolder = os.path.join(
                        event_folder, 'version.%03d' % vnum, 'json')
                    jsonfolders.append(jsonfolder)
            else:
                raise PagerException(
                    'version option "%s" not supported.' % version)

        broken = []
        for jsonfolder in jsonfolders:
            pdata = PagerData()
            vnum = 1000
            while vnum > 1:
                try:
                    pdata.loadFromJSON(jsonfolder)
                    vnum = 0
                except:
                    # handle the case where the most recent version of the event has some
                    # sort of error causing it to miss
                    root, jsonfolder = os.path.split(jsonfolder)
                    root2, vfolder = os.path.split(root)
                    vt, vnums = vfolder.split('.')
                    vnum = int(vnums) - 1
                    jsonfolder = os.path.join(
                        root2, '%s.%03d' % (vt, vnum), 'json')

            if not pdata._is_validated:
                broken.append(jsonfolder)
            try:
              meetsLevel = levels[pdata.summary_alert] >= levels[alert_threshold]
            except Exception as e:
              x = 1
            meetsMag = pdata.magnitude >= mag_threshold
            if pdata.time >= start_time and pdata.time <= end_time and meetsLevel and meetsMag:
                row = pdata.toSeries(processtime=do_process_time)
                df = df.append(row, ignore_index=True)
        df.Version = df.Version.astype(int)
        df.Elapsed = df.Elapsed.astype(int)
        df = df.sort_values('EventTime')
        df = df.set_index('EventID')
        return (df, broken)