예제 #1
0
def test_hazards():
    clat = 0.37
    clon = -79.94
    mag = 7.8
    expocat = ExpoCat.fromDefault()
    minicat = expocat.selectByRadius(clat, clon, SEARCH_RADIUS)
    hazards = get_secondary_hazards(minicat, mag)
    comment = get_secondary_comment(clat, clon, mag)
    for hazard in hazards:
        print('Looking for %s in comment string...' % hazard)
        assert comment.find(hazard) > -1
예제 #2
0
def main(pargs, config):
    # get the users home directory
    homedir = os.path.expanduser("~")

    # handle cancel messages
    if pargs.cancel:
        # we presume that pargs.gridfile in this context is an event ID.
        msg = _cancel(pargs.gridfile, config)
        print(msg)
        return True

    # what kind of thing is gridfile?
    is_file = os.path.isfile(pargs.gridfile)
    is_url, url_gridfile = _is_url(pargs.gridfile)
    is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config)
    if is_file:
        gridfile = pargs.gridfile
    elif is_url:
        gridfile = url_gridfile
    elif is_pdl:
        gridfile = pdl_gridfile
    else:
        print("ShakeMap Grid file %s does not exist." % pargs.gridfile)
        return False

    pager_folder = os.path.join(homedir, config["output_folder"])
    pager_archive = os.path.join(homedir, config["archive_folder"])

    admin = PagerAdmin(pager_folder, pager_archive)

    # stdout will now be logged as INFO, stderr will be logged as WARNING
    mail_host = config["mail_hosts"][0]
    mail_from = config["mail_from"]
    developers = config["developers"]
    logfile = os.path.join(pager_folder, "pager.log")
    plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug)
    logger = plog.getLogger()

    try:
        eid = None
        pager_version = None
        # get all the basic event information and print it, if requested
        shake_tuple = getHeaderData(gridfile)
        eid = shake_tuple[1]["event_id"].lower()
        etime = shake_tuple[1]["event_timestamp"]
        if not len(eid):
            eid = shake_tuple[0]["event_id"].lower()
        network = shake_tuple[1]["event_network"].lower()
        if network == "":
            network = "us"
        if not eid.startswith(network):
            eid = network + eid

        # Create a ComcatInfo object to hopefully tell us a number of things about this event
        try:
            ccinfo = ComCatInfo(eid)
            location = ccinfo.getLocation()
            tsunami = ccinfo.getTsunami()
            authid, allids = ccinfo.getAssociatedIds()
            authsource, othersources = ccinfo.getAssociatedSources()
        except:  # fail over to what we can determine locally
            location = shake_tuple[1]["event_description"]
            tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH
            authid = eid
            authsource = network
            allids = []

        # location field can be empty (None), which breaks a bunch of things
        if location is None:
            location = ""

        # Check to see if user wanted to override default tsunami criteria
        if pargs.tsunami != "auto":
            if pargs.tsunami == "on":
                tsunami = True
            else:
                tsunami = False

        # check to see if this event is a scenario
        is_scenario = False
        shakemap_type = shake_tuple[0]["shakemap_event_type"]
        if shakemap_type == "SCENARIO":
            is_scenario = True

        # if event is NOT a scenario and event time is in the future,
        # flag the event as a scenario and yell about it.
        if etime > datetime.datetime.utcnow():
            is_scenario = True
            logger.warning(
                "Event origin time is in the future! Flagging this as a scenario."
            )

        if is_scenario:
            if re.search("scenario", location.lower()) is None:
                location = "Scenario " + location

        # create the event directory (if it does not exist), and start logging there
        logger.info("Creating event directory")
        event_folder = admin.createEventFolder(authid, etime)

        # Stop processing if there is a "stop" file in the event folder
        stopfile = os.path.join(event_folder, "stop")
        if os.path.isfile(stopfile):
            fmt = '"stop" file found in %s.  Stopping processing, returning with 1.'
            logger.info(fmt % (event_folder))
            return True

        pager_version = get_pager_version(event_folder)
        version_folder = os.path.join(event_folder, "version.%03d" % pager_version)
        os.makedirs(version_folder)
        event_logfile = os.path.join(version_folder, "event.log")

        # this will turn off the global rotating log file
        # and switch to the one in the version folder.
        plog.setVersionHandler(event_logfile)

        # Copy the grid.xml file to the version folder
        # sometimes (usu when testing) the input grid isn't called grid.xml.  Rename it here.
        version_grid = os.path.join(version_folder, "grid.xml")
        shutil.copyfile(gridfile, version_grid)

        # Check to see if the tsunami flag has been previously set
        tsunami_toggle = {"on": 1, "off": 0}
        tsunami_file = os.path.join(event_folder, "tsunami")
        if os.path.isfile(tsunami_file):
            tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()]

        # get the rest of the event info
        etime = shake_tuple[1]["event_timestamp"]
        elat = shake_tuple[1]["lat"]
        elon = shake_tuple[1]["lon"]
        emag = shake_tuple[1]["magnitude"]

        # get the year of the event
        event_year = shake_tuple[1]["event_timestamp"].year

        # find the population data collected most closely to the event_year
        pop_year, popfile = _get_pop_year(
            event_year, config["model_data"]["population_data"]
        )
        logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile))

        # Get exposure results
        logger.info("Calculating population exposure.")
        isofile = config["model_data"]["country_grid"]
        expomodel = Exposure(popfile, pop_year, isofile)
        exposure = None
        exposure = expomodel.calcExposure(gridfile)

        # incidentally grab the country code of the epicenter
        numcode = expomodel._isogrid.getValue(elat, elon)
        if np.isnan(numcode):
            cdict = None
        else:
            cdict = Country().getCountry(int(numcode))
        if cdict is None:
            ccode = "UK"
        else:
            ccode = cdict["ISO2"]

        logger.info("Country code at epicenter is %s" % ccode)

        # get fatality results, if requested
        logger.info("Calculating empirical fatalities.")
        fatmodel = EmpiricalLoss.fromDefaultFatality()
        fatdict = fatmodel.getLosses(exposure)

        # get economic results, if requested
        logger.info("Calculating economic exposure.")
        econexpmodel = EconExposure(popfile, pop_year, isofile)
        ecomodel = EmpiricalLoss.fromDefaultEconomic()
        econexposure = econexpmodel.calcExposure(gridfile)
        ecodict = ecomodel.getLosses(econexposure)
        shakegrid = econexpmodel.getShakeGrid()

        # Get semi-empirical losses
        logger.info("Calculating semi-empirical fatalities.")
        urbanfile = config["model_data"]["urban_rural_grid"]
        if not os.path.isfile(urbanfile):
            raise PagerException("Urban-rural grid file %s does not exist." % urbanfile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile)
        semiloss, resfat, nonresfat = semi.getLosses(gridfile)

        # get all of the other components of PAGER
        logger.info("Getting all comments.")
        # get the fatality and economic comments
        impact1, impact2 = get_impact_comments(
            fatdict, ecodict, econexposure, event_year, ccode
        )
        # get comment describing vulnerable structures in the region.
        struct_comment = get_structure_comment(resfat, nonresfat, semi)
        # get the comment describing historic secondary hazards
        secondary_comment = get_secondary_comment(elat, elon, emag)
        # get the comment describing historical comments in the region
        historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict)

        # generate the probability plots
        logger.info("Drawing probability plots.")
        fat_probs_file, eco_probs_file = _draw_probs(
            fatmodel, fatdict, ecomodel, ecodict, version_folder
        )

        # generate the exposure map
        exposure_base = os.path.join(version_folder, "exposure")
        logger.info("Generating exposure map...")
        oceanfile = config["model_data"]["ocean_vectors"]
        oceangrid = config["model_data"]["ocean_grid"]
        cityfile = config["model_data"]["city_file"]
        borderfile = config["model_data"]["border_vectors"]
        shake_grid = expomodel.getShakeGrid()
        pop_grid = expomodel.getPopulationGrid()
        pdf_file, png_file, mapcities = draw_contour(
            shake_grid,
            pop_grid,
            oceanfile,
            oceangrid,
            cityfile,
            exposure_base,
            borderfile,
            is_scenario=is_scenario,
        )
        logger.info("Generated exposure map %s" % pdf_file)

        # figure out whether this event has been "released".
        is_released = _get_release_status(
            pargs,
            config,
            fatmodel,
            fatdict,
            ecomodel,
            ecodict,
            shake_tuple,
            event_folder,
        )

        # Create a data object to encapsulate everything we know about the PAGER
        # results, and then serialize that to disk in the form of a number of JSON files.
        logger.info("Making PAGER Data object.")
        doc = PagerData()
        timezone_file = config["model_data"]["timezones_file"]
        elapsed = pargs.elapsed
        doc.setInputs(
            shakegrid,
            timezone_file,
            pager_version,
            shakegrid.getEventDict()["event_id"],
            authid,
            tsunami,
            location,
            is_released,
            elapsed=elapsed,
        )
        logger.info("Setting inputs.")
        doc.setExposure(exposure, econexposure)
        logger.info("Setting exposure.")
        doc.setModelResults(
            fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat
        )
        logger.info("Setting comments.")
        doc.setComments(
            impact1, impact2, struct_comment, historical_comment, secondary_comment
        )
        logger.info("Setting map info.")
        doc.setMapInfo(cityfile, mapcities)
        logger.info("Validating.")
        doc.validate()

        # if we have determined that the event is a scenario (origin time is in the future)
        # and the shakemap is not flagged as such, set the shakemap type in the
        # pagerdata object to be 'SCENARIO'.
        if is_scenario:
            doc.setToScenario()

        json_folder = os.path.join(version_folder, "json")
        os.makedirs(json_folder)
        logger.info("Saving output to JSON.")
        doc.saveToJSON(json_folder)
        logger.info("Saving output to XML.")
        doc.saveToLegacyXML(version_folder)

        logger.info("Creating onePAGER pdf...")
        onepager_pdf, error = create_onepager(doc, version_folder)
        if onepager_pdf is None:
            raise PagerException("Could not create onePAGER output: \n%s" % error)

        # copy the contents.xml file to the version folder
        contentsfile = get_data_path("contents.xml")
        if contentsfile is None:
            raise PagerException("Could not find contents.xml file.")
        shutil.copy(contentsfile, version_folder)

        # send pdf as attachment to internal team of PAGER users
        if not is_released and not is_scenario:
            message_pager(config, onepager_pdf, doc)

        # run transfer, as appropriate and as specified by config
        # the PAGER product eventsource and eventsourcecode should
        # match the input ShakeMap settings for these properties.
        # This can possibly cause confusion if a regional ShakeMap is
        # trumped with one from NEIC, but this should happen less often
        # than an NEIC origin being made authoritative over a regional one.
        eventsource = network
        eventsourcecode = eid
        res, msg = transfer(
            config,
            doc,
            eventsourcecode,
            eventsource,
            version_folder,
            is_scenario=is_scenario,
        )
        logger.info(msg)
        if not res:
            logger.critical('Error transferring PAGER content. "%s"' % msg)

        print("Created onePAGER pdf %s" % onepager_pdf)
        logger.info("Created onePAGER pdf %s" % onepager_pdf)

        logger.info("Done.")
        return True
    except Exception as e:
        f = io.StringIO()
        traceback.print_exc(file=f)
        msg = e
        msg = "%s\n %s" % (str(msg), f.getvalue())
        hostname = socket.gethostname()
        msg = msg + "\n" + "Error occurred on %s\n" % (hostname)
        if gridfile is not None:
            msg = msg + "\n" + "Error on file: %s\n" % (gridfile)
        if eid is not None:
            msg = msg + "\n" + "Error on event: %s\n" % (eid)
        if pager_version is not None:
            msg = msg + "\n" + "Error on version: %i\n" % (pager_version)
        f.close()
        logger.critical(msg)
        logger.info("Sent error to email")
        return False