Example #1
0
def test_region_serialization():
    memurl = 'sqlite://'
    schemadir = get_data_path('schema')
    session = emailschema.create_db(memurl, schemadir)

    ca_coords = [[[-125.771484, 41.957448], [-112.412109, 42.672339],
                  [-112.060547, 31.791221], [-126.298828, 31.416944],
                  [-125.771484, 41.957448]]]

    lon, lat = (-119.443359, 37.149371)
    regiondict = {
        'type': 'Feature',
        'properties': {
            'code': 'US_States-California',
            'desc': 'California'
        },
        'geometry': {
            'type': 'Polygon',
            'coordinates': ca_coords
        }
    }

    region = emailschema.Region()
    region.fromDict(session, regiondict)
    assert region.containsPoint(lat, lon) == True

    regiondict2 = region.toDict()
    assert regiondict == regiondict2

    session.close()
Example #2
0
def test_create_db(userfile=None, orgfile=None):
    memurl = 'sqlite://'
    #create a user in a dictionary
    threshold = {'alertscheme': 'eis', 'value': 'red'}
    region = {'name': 'UNSOUEU'}
    profile = {'regions': [region], 'thresholds': [threshold]}
    address = {
        'email': '*****@*****.**',
        'is_primary': True,
        'priority': 1,
        'format': 'long',
        'profiles': [profile]
    }
    userdict = {
        'lastname': 'Luthor',
        'firstname': 'Alexander',
        'createdon': datetime.utcnow().strftime(emailschema.TIME_FORMAT),
        'org': 'USGS',
        'addresses': [address]
    }
    tdir = tempfile.mkdtemp()
    if userfile is None:
        userfile = os.path.join(tdir, 'users.json')
        f = open(userfile, 'wt')
        users = [userdict]
        json.dump(users, f)
        f.close()
    try:
        schemadir = get_data_path('schema')
        session = emailschema.create_db(memurl,
                                        schemadir,
                                        users_jsonfile=userfile,
                                        orgs_jsonfile=orgfile)
        print('Testing contents of in-memory database...')
        assert session.query(emailschema.User).count() == 1
        assert session.query(emailschema.Region).count() == 47
        print('Passed test.')
    except:
        shutil.rmtree(tdir)
Example #3
0
def test_delete_cascade():
    memurl = 'sqlite://'
    schemadir = get_data_path('schema')
    session = emailschema.create_db(memurl, schemadir)

    #create a user in a dictionary
    threshold = {'alertscheme': 'eis', 'value': 'red'}
    region = {'name': 'UN_Regions-UNSOUEU'}
    profile = {'regions': [region], 'thresholds': [threshold]}
    address = {
        'email': '*****@*****.**',
        'is_primary': True,
        'priority': 1,
        'format': 'long',
        'profiles': [profile]
    }
    userdict = {
        'lastname': 'Luthor',
        'firstname': 'Alexander',
        'createdon': datetime.utcnow().strftime(emailschema.TIME_FORMAT),
        'org': 'USGS',
        'addresses': [address]
    }

    users_before_add = session.query(emailschema.User).count()
    addresses_before_add = session.query(emailschema.Address).count()
    print('Testing deleting users...')
    assert users_before_add == 0
    assert addresses_before_add == 0
    print('No users before insert.')
    user = emailschema.User()
    #inflate the user from the dictionary
    user.fromDict(session, userdict)
    session.add(user)
    users_after_add = session.query(emailschema.User).count()
    addresses_after_add = session.query(emailschema.Address).count()
    assert users_after_add == 1
    assert addresses_after_add == 1
    print('One user, one address after insert.')
    session.delete(user)
    session.commit()
    users_after_delete = session.query(emailschema.User).count()
    addresses_after_delete = session.query(emailschema.Address).count()
    assert users_after_delete == 0
    assert addresses_after_delete == 0
    print('No users, no addresses after deleting user.')

    #test deleting cascades with events
    event = emailschema.Event(eventcode='us2017abcd')
    version = emailschema.Version(versioncode='us2017abcd',
                                  time=datetime.utcnow(),
                                  country='US',
                                  lat=34.15,
                                  lon=-118.13,
                                  depth=10.0,
                                  magnitude=6.5,
                                  number=1,
                                  fatlevel=1,
                                  ecolevel=2,
                                  summarylevel=2,
                                  released=True,
                                  was_pending=False,
                                  processtime=datetime.utcnow(),
                                  maxmmi=7.1)

    print('Test cascade deletes with events and versions...')
    events_before_add = session.query(emailschema.Event).count()
    versions_before_add = session.query(emailschema.Version).count()
    assert events_before_add == 0
    assert versions_before_add == 0

    event.versions.append(version)
    session.add(event)
    session.commit()

    events_after_add = session.query(emailschema.Event).count()
    versions_after_add = session.query(emailschema.Version).count()

    assert events_after_add == 1
    assert versions_after_add == 1

    session.delete(event)
    session.commit()

    events_after_delete = session.query(emailschema.Event).count()
    versions_after_delete = session.query(emailschema.Version).count()

    assert events_after_delete == 0
    assert versions_after_delete == 0

    session.close()
Example #4
0
def test_get_polygon():
    memurl = 'sqlite://'
    schemadir = get_data_path('schema')
    session = emailschema.create_db(memurl, schemadir)

    POINTS = {
        'UNEASTAFR': [(-18.319329, 36.408691)],
        'UNNORAFR': [(29.051368, 20.478516)],
        'UNWESTAS': [(23.216107, 45.263672)],
        'UNWESTAFR': [(10.806328, -11.513672)],
        'UNSEAS': [(1.202915, 116.279297)],
        'UNNOREU': [(61.76013, 14.853516)],
        'UNCENTAMER': [(11.817621, -84.177246)],
        'UNMIC': [(13.444304, 144.793731)],
        'UNMIDAFR': [(-9.115656, 17.094727)],
        'UNSOUAMER': [(-21.314964, -59.150391)],
        'UNSOUEU': [(40.101185, -2.504883)],
        'UNWESTEU': [(47.567261, 3.999023)],
        'UNMEL': [(-9.975613, 149.128418)],
        'UNAUSNZ': [(-23.427969, 134.912109)],
        'UNCARIB': [(18.847812, -70.466309)],
        'UNSOUAFR': [(-31.814563, 24.038086)],
        'UNEASTEU': [(50.392761, 25.708008)],
        'UNEASTAS': [(29.59973, 111.577148)],
        'UNNORAMER': [(38.505191, -100.019531)],
        'UNCENTAS': [(40.033924, 66.225586)],
        'UNSOUAS': [(14.067317, 77.607422)],
        'UNPOL': [(-21.178986, -175.198242)],
        'FEMA01': [(44.585577, -69.147949)],
        'FEMA05': [(46.342188, -88.791504)],
        'FEMA10': [(44.80425, -120.651855), (65.924072, -151.347656)],
        'FEMA07': [(38.577158, -97.668457)],
        'FEMA02': [(42.645071, -74.992676)],
        'FEMA08': [(43.384092, -107.556152)],
        'FEMA09': [(37.434522, -120.695801), (13.444304, 144.793731),
                   (-14.30164, -170.696181)],
        'FEMA03': [(37.747915, -78.112793)],
        'FEMA06': [(30.215168, -98.195801)],
        'FEMA04': [(33.703207, -84.276123)],
        'SWAN': [(22.093275, -4.262695), (-29.71191, 21.181641)],
        'LAC': [(-26.613086, -61.083984)],
        'EAP': [(-28.042895, 140.449219), (-17.751956, -149.315186)],
        'SA': [(17.595594, 76.311035)],
        'ECA': [(6.389001, 42.84668)],
        'EMCA': [(49.772396, 16.479492), (55.463285, -105.732422)],
        'Cont_US': [(37.974515, -104.501953)],
        'US_Terr': [(19.639354, -155.577393)],
        'Not_US': [(-10.541821, 25.136719)],
        'USNORTHCOM': [(25.433353, -103.535156)],
        'USEUCOM': [(51.940032, 10.700684)],
        'USPACOM': [(21.441245, -157.922974)],
        'USSOUTHCOM': [(-1.331972, -60.073242)],
        'USAFRICOM': [(14.551684, 21.269531)],
        'USCENTCOM': [(21.767152, 49.350586)]
    }

    for regioncode, plist in POINTS.items():
        region = session.query(emailschema.Region).filter(
            emailschema.Region.name == regioncode).first()
        if region is None:
            raise Exception('Could not find region %s in database!' %
                            regioncode)
        for point in plist:
            lat, lon = point
            print('Checking region %s...' % regioncode)
            if not region.containsPoint(lat, lon):
                raise Exception(
                    'Region %s does not contain point (%.4f,%.4f).' %
                    (regioncode, lat, lon))

    session.close()
Example #5
0
def test_user_serialization():
    memurl = 'sqlite://'
    schemadir = get_data_path('schema')
    session = emailschema.create_db(memurl, schemadir)

    #create a user in a dictionary
    threshold = {'alertscheme': 'eis', 'value': 'red'}
    region = {'name': 'UN_Regions-UNSOUEU'}
    profile = {'regions': [region], 'thresholds': [threshold]}
    address = {
        'email': '*****@*****.**',
        'is_primary': True,
        'priority': 1,
        'format': 'long',
        'profiles': [profile]
    }
    userdict = {
        'lastname': 'Luthor',
        'firstname': 'Alexander',
        'createdon': datetime.utcnow().strftime(emailschema.TIME_FORMAT),
        'org': 'USGS',
        'addresses': [address]
    }

    user = emailschema.User()
    #inflate the user from the dictionary
    user.fromDict(session, userdict)

    #deflate the user into a dictionary
    userdict2 = user.toDict()

    #make sure the input/output dictionaries have the same content
    assert userdict['lastname'] == userdict2['lastname']
    assert userdict['firstname'] == userdict2['firstname']
    assert userdict['createdon'] == userdict2['createdon']
    assert userdict['org'] == userdict2['org']

    assert userdict['addresses'][0]['email'] == userdict2['addresses'][0][
        'email']
    assert userdict['addresses'][0]['is_primary'] == userdict2['addresses'][0][
        'is_primary']
    assert userdict['addresses'][0]['priority'] == userdict2['addresses'][0][
        'priority']
    assert userdict['addresses'][0]['format'] == userdict2['addresses'][0][
        'format']

    rname = userdict['addresses'][0]['profiles'][0]['regions'][0]['name']
    rname2 = userdict2['addresses'][0]['profiles'][0]['regions'][0]['name']
    assert rname == rname2

    tname = userdict['addresses'][0]['profiles'][0]['thresholds'][0][
        'alertscheme']
    tname2 = userdict2['addresses'][0]['profiles'][0]['thresholds'][0][
        'alertscheme']
    assert tname == tname2

    tvalue = userdict['addresses'][0]['profiles'][0]['thresholds'][0]['value']
    tvalue2 = userdict2['addresses'][0]['profiles'][0]['thresholds'][0][
        'value']
    assert tvalue == tvalue2

    session.close()
Example #6
0
def main(pargs, config):
    # get the users home directory
    homedir = os.path.expanduser("~")

    # handle cancel messages
    if pargs.cancel:
        # we presume that pargs.gridfile in this context is an event ID.
        msg = _cancel(pargs.gridfile, config)
        print(msg)
        return True

    # what kind of thing is gridfile?
    is_file = os.path.isfile(pargs.gridfile)
    is_url, url_gridfile = _is_url(pargs.gridfile)
    is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config)
    if is_file:
        gridfile = pargs.gridfile
    elif is_url:
        gridfile = url_gridfile
    elif is_pdl:
        gridfile = pdl_gridfile
    else:
        print("ShakeMap Grid file %s does not exist." % pargs.gridfile)
        return False

    pager_folder = os.path.join(homedir, config["output_folder"])
    pager_archive = os.path.join(homedir, config["archive_folder"])

    admin = PagerAdmin(pager_folder, pager_archive)

    # stdout will now be logged as INFO, stderr will be logged as WARNING
    mail_host = config["mail_hosts"][0]
    mail_from = config["mail_from"]
    developers = config["developers"]
    logfile = os.path.join(pager_folder, "pager.log")
    plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug)
    logger = plog.getLogger()

    try:
        eid = None
        pager_version = None
        # get all the basic event information and print it, if requested
        shake_tuple = getHeaderData(gridfile)
        eid = shake_tuple[1]["event_id"].lower()
        etime = shake_tuple[1]["event_timestamp"]
        if not len(eid):
            eid = shake_tuple[0]["event_id"].lower()
        network = shake_tuple[1]["event_network"].lower()
        if network == "":
            network = "us"
        if not eid.startswith(network):
            eid = network + eid

        # Create a ComcatInfo object to hopefully tell us a number of things about this event
        try:
            ccinfo = ComCatInfo(eid)
            location = ccinfo.getLocation()
            tsunami = ccinfo.getTsunami()
            authid, allids = ccinfo.getAssociatedIds()
            authsource, othersources = ccinfo.getAssociatedSources()
        except:  # fail over to what we can determine locally
            location = shake_tuple[1]["event_description"]
            tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH
            authid = eid
            authsource = network
            allids = []

        # location field can be empty (None), which breaks a bunch of things
        if location is None:
            location = ""

        # Check to see if user wanted to override default tsunami criteria
        if pargs.tsunami != "auto":
            if pargs.tsunami == "on":
                tsunami = True
            else:
                tsunami = False

        # check to see if this event is a scenario
        is_scenario = False
        shakemap_type = shake_tuple[0]["shakemap_event_type"]
        if shakemap_type == "SCENARIO":
            is_scenario = True

        # if event is NOT a scenario and event time is in the future,
        # flag the event as a scenario and yell about it.
        if etime > datetime.datetime.utcnow():
            is_scenario = True
            logger.warning(
                "Event origin time is in the future! Flagging this as a scenario."
            )

        if is_scenario:
            if re.search("scenario", location.lower()) is None:
                location = "Scenario " + location

        # create the event directory (if it does not exist), and start logging there
        logger.info("Creating event directory")
        event_folder = admin.createEventFolder(authid, etime)

        # Stop processing if there is a "stop" file in the event folder
        stopfile = os.path.join(event_folder, "stop")
        if os.path.isfile(stopfile):
            fmt = '"stop" file found in %s.  Stopping processing, returning with 1.'
            logger.info(fmt % (event_folder))
            return True

        pager_version = get_pager_version(event_folder)
        version_folder = os.path.join(event_folder, "version.%03d" % pager_version)
        os.makedirs(version_folder)
        event_logfile = os.path.join(version_folder, "event.log")

        # this will turn off the global rotating log file
        # and switch to the one in the version folder.
        plog.setVersionHandler(event_logfile)

        # Copy the grid.xml file to the version folder
        # sometimes (usu when testing) the input grid isn't called grid.xml.  Rename it here.
        version_grid = os.path.join(version_folder, "grid.xml")
        shutil.copyfile(gridfile, version_grid)

        # Check to see if the tsunami flag has been previously set
        tsunami_toggle = {"on": 1, "off": 0}
        tsunami_file = os.path.join(event_folder, "tsunami")
        if os.path.isfile(tsunami_file):
            tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()]

        # get the rest of the event info
        etime = shake_tuple[1]["event_timestamp"]
        elat = shake_tuple[1]["lat"]
        elon = shake_tuple[1]["lon"]
        emag = shake_tuple[1]["magnitude"]

        # get the year of the event
        event_year = shake_tuple[1]["event_timestamp"].year

        # find the population data collected most closely to the event_year
        pop_year, popfile = _get_pop_year(
            event_year, config["model_data"]["population_data"]
        )
        logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile))

        # Get exposure results
        logger.info("Calculating population exposure.")
        isofile = config["model_data"]["country_grid"]
        expomodel = Exposure(popfile, pop_year, isofile)
        exposure = None
        exposure = expomodel.calcExposure(gridfile)

        # incidentally grab the country code of the epicenter
        numcode = expomodel._isogrid.getValue(elat, elon)
        if np.isnan(numcode):
            cdict = None
        else:
            cdict = Country().getCountry(int(numcode))
        if cdict is None:
            ccode = "UK"
        else:
            ccode = cdict["ISO2"]

        logger.info("Country code at epicenter is %s" % ccode)

        # get fatality results, if requested
        logger.info("Calculating empirical fatalities.")
        fatmodel = EmpiricalLoss.fromDefaultFatality()
        fatdict = fatmodel.getLosses(exposure)

        # get economic results, if requested
        logger.info("Calculating economic exposure.")
        econexpmodel = EconExposure(popfile, pop_year, isofile)
        ecomodel = EmpiricalLoss.fromDefaultEconomic()
        econexposure = econexpmodel.calcExposure(gridfile)
        ecodict = ecomodel.getLosses(econexposure)
        shakegrid = econexpmodel.getShakeGrid()

        # Get semi-empirical losses
        logger.info("Calculating semi-empirical fatalities.")
        urbanfile = config["model_data"]["urban_rural_grid"]
        if not os.path.isfile(urbanfile):
            raise PagerException("Urban-rural grid file %s does not exist." % urbanfile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile)
        semiloss, resfat, nonresfat = semi.getLosses(gridfile)

        # get all of the other components of PAGER
        logger.info("Getting all comments.")
        # get the fatality and economic comments
        impact1, impact2 = get_impact_comments(
            fatdict, ecodict, econexposure, event_year, ccode
        )
        # get comment describing vulnerable structures in the region.
        struct_comment = get_structure_comment(resfat, nonresfat, semi)
        # get the comment describing historic secondary hazards
        secondary_comment = get_secondary_comment(elat, elon, emag)
        # get the comment describing historical comments in the region
        historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict)

        # generate the probability plots
        logger.info("Drawing probability plots.")
        fat_probs_file, eco_probs_file = _draw_probs(
            fatmodel, fatdict, ecomodel, ecodict, version_folder
        )

        # generate the exposure map
        exposure_base = os.path.join(version_folder, "exposure")
        logger.info("Generating exposure map...")
        oceanfile = config["model_data"]["ocean_vectors"]
        oceangrid = config["model_data"]["ocean_grid"]
        cityfile = config["model_data"]["city_file"]
        borderfile = config["model_data"]["border_vectors"]
        shake_grid = expomodel.getShakeGrid()
        pop_grid = expomodel.getPopulationGrid()
        pdf_file, png_file, mapcities = draw_contour(
            shake_grid,
            pop_grid,
            oceanfile,
            oceangrid,
            cityfile,
            exposure_base,
            borderfile,
            is_scenario=is_scenario,
        )
        logger.info("Generated exposure map %s" % pdf_file)

        # figure out whether this event has been "released".
        is_released = _get_release_status(
            pargs,
            config,
            fatmodel,
            fatdict,
            ecomodel,
            ecodict,
            shake_tuple,
            event_folder,
        )

        # Create a data object to encapsulate everything we know about the PAGER
        # results, and then serialize that to disk in the form of a number of JSON files.
        logger.info("Making PAGER Data object.")
        doc = PagerData()
        timezone_file = config["model_data"]["timezones_file"]
        elapsed = pargs.elapsed
        doc.setInputs(
            shakegrid,
            timezone_file,
            pager_version,
            shakegrid.getEventDict()["event_id"],
            authid,
            tsunami,
            location,
            is_released,
            elapsed=elapsed,
        )
        logger.info("Setting inputs.")
        doc.setExposure(exposure, econexposure)
        logger.info("Setting exposure.")
        doc.setModelResults(
            fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat
        )
        logger.info("Setting comments.")
        doc.setComments(
            impact1, impact2, struct_comment, historical_comment, secondary_comment
        )
        logger.info("Setting map info.")
        doc.setMapInfo(cityfile, mapcities)
        logger.info("Validating.")
        doc.validate()

        # if we have determined that the event is a scenario (origin time is in the future)
        # and the shakemap is not flagged as such, set the shakemap type in the
        # pagerdata object to be 'SCENARIO'.
        if is_scenario:
            doc.setToScenario()

        json_folder = os.path.join(version_folder, "json")
        os.makedirs(json_folder)
        logger.info("Saving output to JSON.")
        doc.saveToJSON(json_folder)
        logger.info("Saving output to XML.")
        doc.saveToLegacyXML(version_folder)

        logger.info("Creating onePAGER pdf...")
        onepager_pdf, error = create_onepager(doc, version_folder)
        if onepager_pdf is None:
            raise PagerException("Could not create onePAGER output: \n%s" % error)

        # copy the contents.xml file to the version folder
        contentsfile = get_data_path("contents.xml")
        if contentsfile is None:
            raise PagerException("Could not find contents.xml file.")
        shutil.copy(contentsfile, version_folder)

        # send pdf as attachment to internal team of PAGER users
        if not is_released and not is_scenario:
            message_pager(config, onepager_pdf, doc)

        # run transfer, as appropriate and as specified by config
        # the PAGER product eventsource and eventsourcecode should
        # match the input ShakeMap settings for these properties.
        # This can possibly cause confusion if a regional ShakeMap is
        # trumped with one from NEIC, but this should happen less often
        # than an NEIC origin being made authoritative over a regional one.
        eventsource = network
        eventsourcecode = eid
        res, msg = transfer(
            config,
            doc,
            eventsourcecode,
            eventsource,
            version_folder,
            is_scenario=is_scenario,
        )
        logger.info(msg)
        if not res:
            logger.critical('Error transferring PAGER content. "%s"' % msg)

        print("Created onePAGER pdf %s" % onepager_pdf)
        logger.info("Created onePAGER pdf %s" % onepager_pdf)

        logger.info("Done.")
        return True
    except Exception as e:
        f = io.StringIO()
        traceback.print_exc(file=f)
        msg = e
        msg = "%s\n %s" % (str(msg), f.getvalue())
        hostname = socket.gethostname()
        msg = msg + "\n" + "Error occurred on %s\n" % (hostname)
        if gridfile is not None:
            msg = msg + "\n" + "Error on file: %s\n" % (gridfile)
        if eid is not None:
            msg = msg + "\n" + "Error on event: %s\n" % (eid)
        if pager_version is not None:
            msg = msg + "\n" + "Error on version: %i\n" % (pager_version)
        f.close()
        logger.critical(msg)
        logger.info("Sent error to email")
        return False