def __init__(self, ratedict, default_rate=DEFAULT_RATE): """Initialize Population growth with dictionary containing rates over given time spans, per country. :param ratedict: dictionary like: {841: {'end': [1955, 1960, 1965], 'rate': [0.01, 0.02, 0.03], 'start': [1950, 1955, 1960]}, 124: {'end': [1955, 1960, 1965], 'rate': [0.02, 0.03, 0.04], 'start': [1950, 1955, 1960]}} Where 841 and 842 in this case are country codes (US and Canada), and the three "columns" for each country are the year start of each time interval, the year end of each time interval, and the growth rates for those time intervals. :param default_rate: Value to be used for growth rate when input country codes are not found in ratedict. """ # check the fields in the ratedict for key, value in ratedict.items(): if 'start' not in value or 'end' not in value or 'rate' not in value: raise PagerException( 'All country rate dictionaries must contain keys "start","end","rate"' ) if not (len(value['start']) == len(value['end']) == len( value['rate'])): raise PagerException( 'Length of start/end year arrays must match length of rate arrays.' ) self._dataframe = pd.DataFrame(ratedict) self._default = default_rate
def validate(self): if not self._input_set: raise PagerException('You must call setInputs() first.') if not self._exposure_set: raise PagerException('You must call setExposure() first.') if not self._models_set: raise PagerException('You must call setExposure() first.') if not self._comments_set: raise PagerException('You must call setComments() first.') if not self._mapinfo_set: raise PagerException('You must call setComments() first.') self._pagerdict['event_info'] = self._setEvent() self._pagerdict['pager'] = self._setPager() self._pagerdict['shake_info'] = self._setShakeInfo() self._pagerdict['alerts'] = self._setAlerts() self._pagerdict['population_exposure'] = self._setPopulationExposure() self._pagerdict['economic_exposure'] = self._setEconomicExposure() self._pagerdict['model_results'] = self._setModelResults() print('In pagerdata, getting city table.') self._pagerdict['city_table'] = self._getCityTable() print('In pagerdata, getting historical earthquakes.') self._pagerdict[ 'historical_earthquakes'] = self._getHistoricalEarthquakes() print('In pagerdata, getting comments.') self._pagerdict['comments'] = self._getComments() self._is_validated = True
def fromDict(self, session, addressdict): reqfields = set( ['email', 'is_primary', 'priority', 'profiles', 'format']) if reqfields <= set(addressdict.keys()): pass else: missing = list(reqfields - set(addressdict.keys())) raise PagerException('Missing required fields for address: %s' % str(missing)) # set the fields for the address object self.email = addressdict['email'] self.is_primary = addressdict['is_primary'] self.priority = addressdict['priority'] self.format = addressdict['format'] if not len(addressdict['profiles']): print( 'Warning: Address %s has NO profiles in the JSON file. Continuing.' % self.email) for profiledict in addressdict['profiles']: profile = Profile() try: profile.fromDict(session, profiledict) except PagerException as pe: raise PagerException( 'Error: "%s" when loading profile for address %s.' % (str(pe), self.email)) if not len(profile.thresholds): print( 'Warning: Address %s has NO thresholds in one of the profiles. Continuing.' % self.email) self.profiles.append(profile)
def fromDict(self,session,userdict): reqfields = set(['lastname','firstname','createdon','org','addresses']) if reqfields <= set(userdict.keys()): pass else: missing = list(reqfields - set(userdict.keys())) raise PagerException('Missing required fields for user: %s' % str(missing)) #set the user fields self.lastname = userdict['lastname'] self.firstname = userdict['firstname'] self.createdon = datetime.strptime(userdict['createdon'],TIME_FORMAT) #will this be a string or a datetime? org = session.query(Organization).filter(Organization.shortname == userdict['org']).first() if org is None: raise PagerException('No organization named %s exists in the database.' % userdict['org']) self.organization = org self.addresses = [] for addressdict in userdict['addresses']: address = Address() address.fromDict(session,addressdict) self.addresses.append(address) #first add this user to the session session.add(self) #then commit all the changes session.commit()
def fromDict(self,session,thresholddict): tvalue = thresholddict['value'] scheme = session.query(AlertScheme).filter(AlertScheme.name == thresholddict['alertscheme']).first() if scheme is None: raise PagerException('No alert scheme named %s exists in the database.' % thresholddict['alertscheme']) if not scheme.isdiscrete: if scheme.valuetype == 'Float': tvalue = float(tvalue) else: tvalue = int(tvalue) if tvalue < scheme.minlevel or tvalue > scheme.maxlevel: raise PagerException('Threshold for %s is outside range.' % scheme.name) self.alertscheme = scheme self.value = thresholddict['value']
def __init__(self,pager_folder,archive_folder): if not os.path.isdir(pager_folder): raise PagerException('PAGER data output folder %s does not exist.' % pager_folder) self._pager_folder = pager_folder if not os.path.isdir(archive_folder): os.makedirs(archive_folder) self._archive_folder = archive_folder
def archive(self,events=[],all_events=False,events_before=None): if all_events ==True and events_before is not None: raise PagerException('You cannot choose to archive all events and some events based on time.') narchived = 0 nerrors = 0 if all_events: events = self.getAllEvents() for eventid in events: result = self.archiveEvent(eventid) if result: narchived += 1 else: nerrors += 1 else: for eventid in events: eventfolder = self.getEventFolder(eventid) if events_before is not None: t,etimestr = eventfolder.split('_') etime = datetime.datetime.strptime(etimestr,DATETIMEFMT) if etime < events_before: result = self.archiveEvent(eventid) if result: narchived += 1 else: nerrors += 1 else: continue else: result = self.archiveEvent(eventid) if result: narchived += 1 else: nerrors += 1 return (narchived,nerrors)
def runPager(self, versionfolder, release=False, cancel=False, tsunami='auto'): """Run the PAGER program with (optional) command line arguments. :param versionfolder: Folder containing desired version of PAGER to be re-run. :param release: Boolean indicating whether PAGER version should be 'released' (if orange or red and currently pending) :param cancel: Boolean indicating whether to send a delete message through PDL for this PAGER product. :param tsunami: String with values 'on', 'off', or 'auto'. See pager command line documentation. :returns: Tuple of: - True if PAGER run was successful, False if not - stdout output from PAGER command line call. - stderr output from PAGER command line call. """ gridfile = os.path.join(versionfolder, 'grid.xml') pagerbin = find_executable('pager') if pagerbin is None: raise PagerException( 'Could not find PAGER executable on this system.') pagercmd = pagerbin + ' %s' % gridfile if release: pagercmd += ' --release' if cancel: pagercmd += ' --cancel' pagercmd += ' --tsunami=%s' % tsunami res, stdout, stderr = get_command_output(pagercmd) return (res, stdout, stderr)
def fromDict(self,session,regiondict): reqfields = set(['type','geometry','properties']) if reqfields <= set(regiondict.keys()): pass else: missing = list(reqfields - set(regiondict.keys())) raise PagerException('Missing required fields for region: %s' % str(missing)) regioninfo = regiondict['properties']['code'] rgroupname,regioncode = regioninfo.split('-') regiondesc = regiondict['properties']['desc'] #try to find this region in the database regiongroup = session.query(RegionGroup).filter(RegionGroup.groupname == rgroupname).first() if regiongroup is None: regiongroup = RegionGroup(groupname=rgroupname) poly = regiondict['geometry'] polybytes = bytes(json.dumps(poly),'utf-8') tshape = shape(poly) if not tshape.is_valid: x = 1 xmin,ymin,xmax,ymax = tshape.bounds self.name = regioncode self.desc = regiondesc self.poly = polybytes self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax self.regiongroup = regiongroup session.add(self) session.commit()
def getPopulationGrid(self): """Return the internal population grid. :returns: Population grid. """ if self._popgrid is None: raise PagerException('calcExposure() method must be called first.') return self._popgrid
def getCountryGrid(self): """Return the Grid2D object containing ISO numeric country codes. :returns: Grid2D object containing ISO numeric country codes. """ if self._isogrid is None: raise PagerException('calcExposure() method must be called first.') return self._isogrid
def getShakeGrid(self): """Return the MultiGrid object containing ShakeMap data. :returns: MultiGrid object containing ShakeMap data. """ if self._shakegrid is None: raise PagerException('calcExposure() method must be called first.') return self._shakegrid
def saveToJSON(self, jsonfolder): if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') #one file to contain event summary, pager summary, and shakemap summary info event_info_file = os.path.join(jsonfolder, 'event.json') f = open(event_info_file, 'wt') infodict = { 'event': self._pagerdict['event_info'], 'pager': self._pagerdict['pager'], 'shakemap': self._pagerdict['shake_info'] } json.dump(infodict, f) f.close() #one file for alert information alert_info_file = os.path.join(jsonfolder, 'alerts.json') f = open(alert_info_file, 'wt') json.dump(self._pagerdict['alerts'], f) f.close() #one file for exposure information (population and economic) exposure_info_file = os.path.join(jsonfolder, 'exposures.json') f = open(exposure_info_file, 'wt') expdict = { 'population_exposure': self._pagerdict['population_exposure'], 'economic_exposure': self._pagerdict['economic_exposure'] } json.dump(expdict, f) f.close() #one file for loss model results loss_info_file = os.path.join(jsonfolder, 'losses.json') f = open(loss_info_file, 'wt') json.dump(self._pagerdict['model_results'], f) f.close() #one file for the table of affected cities city_file = os.path.join(jsonfolder, 'cities.json') f = open(city_file, 'wt') self._pagerdict['city_table'].to_json(f, orient='records') f.close() #one file for the table of historical earthquakes (if any) historical_info_file = os.path.join(jsonfolder, 'historical_earthquakes.json') f = open(historical_info_file, 'wt') json.dump(self._pagerdict['historical_earthquakes'], f) f.close() #one file for all comments comment_file = os.path.join(jsonfolder, 'comments.json') f = open(comment_file, 'wt') json.dump(self._pagerdict['comments'], f) f.close()
def getSecondaryComment(self): """Return a paragraph describing the history of secondary hazards in the region. :returns: Paragraph of text describing the history of secondary hazards in the region. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return self._pagerdict['comments']['secondary_comment']
def getStructureComment(self): """Return a paragraph describing the vulnerability of buildings in the most impacted country. :returns: Paragraph of text describing the vulnerability of buildings in the most impacted country. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return self._pagerdict['comments']['struct_comment']
def getTotalExposure(self): """Return the array of aggregated (all countries) population exposure to shaking. :returns: List of aggregated (all countries) population exposure to shaking. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return self._pagerdict['population_exposure']['aggregated_exposure']
def getElapsed(self): """Return the string that summarizes the time elapsed between origin time and time of PAGER run. :returns: string summarizing time elapsed between origin time and time of PAGER run. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return self._pagerdict['pager']['elapsed_time']
def getSoftwareVersion(self): """Return the Software version used to create this data structure. :returns: String describing PAGER software version. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return self._pagerdict['pager']['software_version']
def fromDict(self,session,profiledict): reqfields = set(['regions','thresholds']) if reqfields <= set(profiledict.keys()): pass else: missing = list(reqfields - set(profiledict.keys())) raise PagerException('Missing required fields for profile: %s' % str(missing)) for regiondict in profiledict['regions']: rgroup,rname = regiondict['name'].split('-') region = session.query(Region).filter(Region.name == rname).first() if region is None: raise PagerException('No region named %s found in the database.' % regiondict['name']) self.regions.append(region) for thresholddict in profiledict['thresholds']: threshold = Threshold() threshold.fromDict(session,thresholddict) self.thresholds.append(threshold)
def getHistoricalComment(self): """Return a string describing the most impactful historical earthquake near the current event. :returns: string describing the most impactful historical earthquake near the current event. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return self._pagerdict['comments']['historical_comment']
def fromDict(self,session,orgdict): reqfields = set(['name','shortname']) if reqfields <= set(orgdict.keys()): pass else: missing = list(reqfields - set(orgdict.keys())) raise PagerException('Missing required fields for user: %s' % str(missing)) self.shortname = orgdict['shortname'] self.name = orgdict['name'] session.add(self) session.commit()
def getImpactComments(self): """Return a tuple of the two impact comments. :returns: Tuple of impact comments, where first is most impactful, second is least. In cases where the impact levels for fatalities and economic losses are the same, the second comment will be empty. """ if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') return (self._pagerdict['comments']['impact1'], self._pagerdict['comments']['impact2'])
def fromDict(self,session,schemedict): reqfields = set(['name','adesc','valuetype','isdiscrete','minlevel','maxlevel']) if reqfields <= set(schemedict.keys()): pass else: missing = list(reqfields - set(schemedict.keys())) raise PagerException('Missing required fields for alert scheme: %s' % str(missing)) tscheme = AlertScheme(name=schemedict['name'], adesc=schemedict['adesc'], valuetype=schemedict['valuetype'], isdiscrete=schemedict['isdiscrete'], minlevel=schemedict['minlevel'], maxlevel=schemedict['maxlevel']) session.add(tscheme)
def __init__(self, pager_folder, archive_folder): """Create PagerAdmin object. :param pager_folder: Top level PAGER output data folder. :param archive_folder: Folder where archived PAGER events should be written. """ if not os.path.isdir(pager_folder): raise PagerException('PAGER data output folder %s does not exist.' % pager_folder) self._pager_folder = pager_folder if not os.path.isdir(archive_folder): os.makedirs(archive_folder) self._archive_folder = archive_folder
def read_config(): """Read in configuration parameters from config .py file. returns: Dictionary containing configuration parameters. raises: PagerException if config file does not exist. """ # get config file name, make sure it exists configfilename = get_config_file() if configfilename is None: raise PagerException('Config file could not be found at %s.' % configfilename) config = yaml.load(open(configfilename, 'rt')) return config
def archive(self, events=[], all_events=False, events_before=None): """Archive a list of events to archive directory. :param events: List of event IDs to archive. :param all_events: Boolean indicating whether all events should be archived, in which case events can be empty. :param events_before: Datetime indicating time before which all events should be archived. :returns: Tuple of number of archived events, and number of errors (events that did not exist) """ if all_events == True and events_before is not None: raise PagerException( 'You cannot choose to archive all events and some events based on time.' ) narchived = 0 nerrors = 0 if all_events: events = self.getAllEvents() for eventid in events: result = self.archiveEvent(eventid) if result: narchived += 1 else: nerrors += 1 else: for eventid in events: eventfolder = self.getEventFolder(eventid) if events_before is not None: t, etimestr = eventfolder.split('_') etime = datetime.datetime.strptime(etimestr, DATETIMEFMT) if etime < events_before: result = self.archiveEvent(eventid) if result: narchived += 1 else: nerrors += 1 else: continue else: result = self.archiveEvent(eventid) if result: narchived += 1 else: nerrors += 1 return (narchived, nerrors)
def saveToLegacyXML(self, versionfolder): if not self._is_validated: raise PagerException( 'PagerData object has not yet been validated.') pager = self.__renderPager() pager = self.__renderEvent(pager) pager = self.__renderAlerts(pager) pager = self.__renderExposure(pager) pager = self.__renderCities(pager) pager = self.__renderComments(pager) pager = self.__renderHistory(pager) outfile = os.path.join(versionfolder, 'pager.xml') f = open(outfile, 'wt') xmlstr = etree.tostring(pager, pretty_print=True) f.write(xmlstr.decode('utf-8')) f.close() return outfile
def getRates(self, ccode): """Return population growth rates for a given country code. :param ccode: Numeric country code. :param year: Integer year to be used to find growth rate (will be between start and end years, or before first start year or after last end year). :returns: Tuple of two lists of (start_years,rates). """ if ccode not in self._dataframe.columns: raise PagerException( 'Country %s not found in PopulationGrowth data structure.' % ccode) starts = np.array(self._dataframe[ccode]['start']) rates = np.array(self._dataframe[ccode]['rate']) return (starts, rates)
def __init__(self,model_list,losstype='fatality'): """Instantiate EmpiricalLoss class. :param model_list: List of LognormalModel objects. The names of these will be used as keys for the getModel() method. :param losstype: One of 'fatality' or 'economic'. :returns: EmpiricalLoss instance. """ if losstype not in ['fatality','economic']: raise PagerException('losstype must be one of ("fatality","economic").') self._loss_type = losstype self._model_dict = {} for model in model_list: self._model_dict[model.name] = model self._country = Country() #object that can translate between different ISO country representations. self._overrides = {} #dictionary of manually set rates (not necessarily lognormal)
def write_config(config, make_backup=True): """Write out config parameters. :param config: Dictionary with configuration parameters. :param make_backup: Boolean indicating whether a backup of the current config file should be made before writing new one. """ # get config file name, make sure it exists configfilename = os.path.join(os.path.expanduser('~'), '.losspager', 'config.yml') if not os.path.isfile(configfilename): raise PagerException('Config file could not be found at %s.' % configfilename) backup_name = os.path.join(os.path.expanduser('~'), '.losspager', 'config.yml.bak') shutil.copyfile(configfilename, backup_name) f = open(configfilename, 'wt') f.write(yaml.dump(config)) f.close()