def sendMail(receiver_email, message): #message["To"] = receiver_email # Requirements in order to send an email with smtp smtp_server = "smtp.office365.com" port = 587 # For starttls # Create a secure SSL context context = ssl.create_default_context() # Try to log in to server and send email try: server = smtplib.SMTP(smtp_server, port) server.ehlo() # Can be omitted server.starttls(context=context) # Secure the connection server.ehlo() # Can be omitted server.login(EMAIL_USERNAME, EMAIL_PASSWORD) server.sendmail(FROM_EMAIL, receiver_email, message) #.as_string()) except Exception as e: # Print any error messages to stdout print(e) logger.info(e) finally: server.quit()
def createSchema(): con, cur, db = get_db() try: cur.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA_NAME_INPUT) con.commit() except psycopg2.ProgrammingError as e: logger.info(e)
def main(): logger.info('Started Cron') startTime = time.time() print(str(datetime.datetime.now())) try: if RUN_GLOFAS: storeHistoric() for fcStep, days in LEAD_TIMES.items(): fc = Forecast(fcStep, days) if RUN_GLOFAS: fc.glofasData.process() if RUN_RAINFALL: fc.rainfallData.process() if CALCULATE_EXTENT: fc.floodExtent.calculate() if CALCULATE_EXTENT and CALCULATE_EXPOSURE: fc.floodExtent.callAllExposure() fc.db.upload() fc.db.processDynamicDataDb() notify() except Exception as e: # If a fatal exception occurs during the cronjob # logs full stack trace and sends email logger.exception("Fatal error occured during the process") traceback.print_exc() elapsedTime = str(time.time() - startTime) print(elapsedTime) logger.info('Finished Cron in seconds %s', elapsedTime)
def download(self): if RAINFALL_DUMMY == False: downloadDone = False timeToTryDownload = 43200 timeToRetry = 600 start = time.time() end = start + timeToTryDownload while downloadDone == False and time.time() < end: try: self.download_GFS_forecast() downloadDone = True self.downloaded = True except urllib.error.URLError: logger.info("GFS download failed. " "Trying again in 10 minutes") time.sleep(timeToRetry) if downloadDone == False: raise ValueError('GFS download failed for ' + str(timeToTryDownload / 3600) + ' hours, no new dataset was found') else: self.inputPath = PIPELINE_DATA + 'input/rainfall_dummy/'
def callAllExposure(self): logger.info('Started calculating affected of %s', self.disasterExtentRaster) for indicator, values in self.EXPOSURE_DATA_SOURCES.items(): print('indicator: ', indicator) self.inputRaster = GEOSERVER_INPUT + values['source'] + ".tif" self.outputRaster = GEOSERVER_OUTPUT + "0/" + \ values['source'] + self.leadTimeLabel self.calcAffected(self.disasterExtentRaster, indicator, values['rasterValue']) result = { 'countryCodeISO3': self.countryCodeISO3, 'exposurePlaceCodes': self.stats, 'leadTime': self.leadTimeLabel, 'dynamicIndicator': indicator + '_affected', 'adminLevel': SETTINGS[self.countryCodeISO3]['admin_level'] } self.statsPath = PIPELINE_OUTPUT + 'calculated_affected/affected_' + \ self.leadTimeLabel + '_' + self.countryCodeISO3 + '_' + indicator + '.json' with open(self.statsPath, 'w') as fp: json.dump(result, fp) logger.info("Saved stats for %s", self.statsPath)
def getFloodInfo(): con, cur, db = get_db() sqlString = ''' select t1.name ,population_affected ,lead_time ,case when fc_prob>=0.8 then 'Maximum alert' when fc_prob>=0.7 then 'Medium alert' when fc_prob>=0.6 then 'Minimum alert' end as fc_prob from "IBF-pipeline-output".data_adm2 t0 left join "IBF-static-input"."ZMB_Geo_level2" t1 on t0.pcode = t1.pcode_level2 where current_prev = 'Current' and fc_trigger = 1 order by population_affected desc ''' try: cur.execute(sqlString) con.commit() except psycopg2.ProgrammingError as e: logger.info(e) if cur.statusmessage == 'SELECT 0': theData = [] else: theData = cur.fetchall() isFlood = len(theData) > 0 theInfo = {"flood": isFlood, "data": theData} return theInfo
def download(self): downloadDone = False timeToTryDownload = 43200 timeToRetry = 600 start = time.time() end = start + timeToTryDownload while downloadDone == False and time.time() < end: try: if self.countryCodeISO3 == 'ZMB': #Temporarily keep using FTP for Zambia self.makeFtpRequest() else: self.makeApiRequest() downloadDone = True except Exception as exception: error = 'Download data failed. Trying again in {} minutes.\n{}'.format( timeToRetry // 60, exception) print(error) logger.info(error) time.sleep(timeToRetry) if downloadDone == False: raise ValueError('GLofas download failed for ' + str(timeToTryDownload / 3600) + ' hours, no new dataset was found')
def download(self): if GLOFAS_DUMMY == False: downloadDone = False timeToTryDownload = 43200 timeToRetry = 600 start = time.time() end = start + timeToTryDownload while downloadDone == False and time.time() < end: try: self.makeFtpRequest() downloadDone = True except urllib.error.URLError: logger.info( "Glofas unzip failed, probably because download failed. " "Trying again in 10 minutes") time.sleep(timeToRetry) if downloadDone == False: raise ValueError('GLofas download failed for ' + str(timeToTryDownload / 3600) + ' hours, no new dataset was found') else: self.inputPath = PIPELINE_DATA + 'input/glofas_dummy/'
def storeHistoric(): i = NUMBER_OF_HISTORIC_FORECASTS - 1 while i > 0 : src = GEOSERVER_DATA + "output/" + str(i-1) dst = GEOSERVER_DATA + "output/" + str(i) copytree(src, dst) logger.info('storeHistoric: Coppied from' + src + " to " + dst) i = i -1
def downloadGeoDataFromDb(self, schema, table, countryCodeISO3=None): try: self.con, self.cur, self.db = get_db() sql = "SELECT * FROM \"" + schema + "\".\"" + table + "\"" if countryCodeISO3 != None: sql = sql + " WHERE \"countryCodeISO3\"=\'" + self.countryCodeISO3 + "\'" admin_gdf = gpd.read_postgis(sql, self.con) except psycopg2.ProgrammingError as e: logger.info(e) return admin_gdf
def main(): logging.basicConfig(filename='setup.log', level=logger.info) logger.info('Started ...') extractGlofasData() processGlofasData() currentFloodExtentPaths = makeFloodExtent() print(currentFloodExtentPaths) uploadDynamicToDb('triggers_rp_per_station', 'triggers_rp') uploadDynamicToDb('calculated_affected', 'affected') processDynamicDataDb() logger.info('Finished ...')
def connect(): global con, cur try: con = psycopg2.connect(("dbname='"+DB_SETTINGS['db']+"' user='******'user']+"' password='******'password']+ "' host='"+DB_SETTINGS['host']+"' port='"+DB_SETTINGS['port']+"'")) cur = con.cursor() db = cur.execute except psycopg2.OperationalError as e: if con: con.rollback() logger.info(e) sys.exit
def calcAffected(self, floodExtentRaster): shapesFlood = self.loadTiffAsShapes(floodExtentRaster) if shapesFlood != []: affectedImage, affectedMeta = self.clipTiffWithShapes(self.inputRaster, shapesFlood) with rasterio.open(self.outputRaster, "w", **affectedMeta) as dest: dest.write(affectedImage) logger.info("Wrote to " + self.outputRaster) adminBoundaries = EXPOSURE_BOUNDARY_DATA # source = self.source[self.source.find('/')+1:] self.stats = self.calcStatsPerAdmin(adminBoundaries, self.indicator, shapesFlood)
def calcAffected(self, floodExtentRaster): shapesFlood = self.loadTiffAsShapes(floodExtentRaster) if shapesFlood != []: affectedImage, affectedMeta = self.clipTiffWithShapes( self.inputRaster, shapesFlood) with rasterio.open(self.outputRaster, "w", **affectedMeta) as dest: dest.write(affectedImage) logger.info("Wrote to " + self.outputRaster) adminBoundaries = "data/input/vector/ZMB_adm4_mapshaper_reproj.shp" source = self.type[self.type.find('/') + 1:] self.stats = self.calcStatsPerAdmin(adminBoundaries, source, shapesFlood)
def processStaticDataDb(): con, cur, db = get_db() sql_file = open('lib/setup/processStaticDataPostgres.sql', 'r', encoding='utf-8') sql = sql_file.read() sql_file.close() try: cur.execute(sql) con.commit() print('SQL EXECUTED') except psycopg2.ProgrammingError as e: logger.info(e) print('SQL FAILED')
def processDynamicDataDb(self): sql_file = open('lib/pipeline/createApiTables.sql', 'r', encoding='utf-8') sql_create_api_tables = sql_file.read() sql_file.close() try: self.con, self.cur, self.db = get_db() self.cur.execute(sql_create_api_tables) self.con.commit() self.con.close() print('SQL EXECUTED') except psycopg2.ProgrammingError as e: logger.info(e) print('SQL FAILED', e)
def processDynamicDataDb(self): sql_file = open('lib/cronJob/processDynamicDataPostgres.sql', 'r', encoding='utf-8') sql = sql_file.read() sql_file.close() try: self.con, self.cur, self.db = get_db() self.cur.execute(sql) self.con.commit() self.con.close() print('SQL EXECUTED') except psycopg2.ProgrammingError as e: logger.info(e) print('SQL FAILED', e)
def callAllExposure(self): logger.info('Started calculating affected of %s', self.disasterExtentRaster) for indicator, values in self.EXPOSURE_DATA_SOURCES.items(): print('indicator: ', indicator) self.inputRaster = GEOSERVER_INPUT + values['source'] + ".tif" self.outputRaster = GEOSERVER_OUTPUT + "0/" + values[ 'source'] + self.leadTimeLabel self.calcAffected(self.disasterExtentRaster, indicator, values['rasterValue']) with open(self.statsPath, 'w') as fp: json.dump(self.stats, fp) logger.info("Saved stats for %s", self.statsPath)
def downloadDataFromDb(self, schema, table, country_code = None): try: self.con, self.cur, self.db = get_db() sql = "SELECT * FROM \""+schema+"\".\""+table+"\"" if country_code != None: sql = sql + " WHERE country_code=\'"+self.country_code+"\'" self.cur.execute(sql) data = self.cur.fetchall() self.cur.execute("SELECT * FROM \""+schema+"\".\""+table+"\" LIMIT 0") colnames = [desc[0] for desc in self.cur.description] self.con.commit() self.con.close() except psycopg2.ProgrammingError as e: logger.info(e) return data,colnames
def getFloodInfo(countryCodeISO3): con, cur, db = get_db() sqlString = ''' select aa.name,population_affected,aad."leadTime" as lead_time , case when gst."forecastProbability">=0.8 then 'Maximum alert' when gst."forecastProbability">=0.7 then 'Medium alert' when gst."forecastProbability">=0.6 then 'Minimum alert' else '' end as fc_prob from ( select "countryCodeISO3" ,"placeCode" ,"leadTime" ,value as population_affected from "IBF-app"."admin-area-dynamic-data" where date = current_date and indicator = 'population_affected' and value > 0 ) aad left join "IBF-app"."admin-area" aa on aad."placeCode" = aa."placeCode" left join "IBF-app"."glofas-station" gs on aa."glofasStation" = gs."stationCode" left join "IBF-app"."glofas-station-forecast" gst on gs.id = gst."glofasStationId" and gst.date = current_date and aad."leadTime" = gst."leadTime" where aad."countryCodeISO3" = \'''' + countryCodeISO3 + '''\' and aad.population_affected > 0 order by population_affected desc ''' try: cur.execute(sqlString) con.commit() except psycopg2.ProgrammingError as e: logger.info(e) if cur.statusmessage=='SELECT 0': theData = [] else: theData = cur.fetchall() isFlood = len(theData) > 0 theInfo = { "flood": isFlood, "data": theData } return theInfo
def notify(): if EMAIL_NOTIFICATION: the_client = EmailClient(MC_API, MC_USER) floodInfo = getFloodInfo() if floodInfo["flood"] or EMAIL_WITHOUT_TRIGGER: formattedInfo = formatInfo(floodInfo) the_client.sendNotification(formattedInfo) # msg = MIMEMultipart() # msg['Subject'] = formattedInfo['subject'] # part = MIMEText(formattedInfo['html'], "html") # msg.attach(part) # sendMail(EMAIL_LIST_HARDCODE,msg.as_string()) else: logger.info("Email notificatin are turned off")
def calcAffected(self, disasterExtentRaster, indicator, rasterValue): disasterExtentShapes = self.loadTiffAsShapes(disasterExtentRaster) if disasterExtentShapes != []: try: affectedImage, affectedMeta = self.clipTiffWithShapes( self.inputRaster, disasterExtentShapes) with rasterio.open(self.outputRaster, "w", **affectedMeta) as dest: dest.write(affectedImage) except ValueError: print('Rasters do not overlap') logger.info("Wrote to " + self.outputRaster) self.ADMIN_AREA_GDF.to_file(self.ADMIN_AREA_GDF_TMP_PATH) stats = self.calcStatsPerAdmin( indicator, disasterExtentShapes, rasterValue) for item in stats: self.stats.append(item)
def getFloodInfo(countryCode): con, cur, db = get_db() sqlString = ''' select t1.name ,t0.population_affected ,t0.lead_time ,case when t0.fc_prob>=0.8 then 'Maximum alert' when t0.fc_prob>=0.7 then 'Medium alert' when t0.fc_prob>=0.6 then 'Minimum alert' end as fc_prob from "IBF-pipeline-output".data_adm2 t0 left join ( select * from "IBF-API"."Admin_area_data2" union all select * from "IBF-API"."Admin_area_data1" ) t1 on t0.pcode = t1.pcode and t0.lead_time = t1.lead_time and t0.current_prev = t1.current_prev where t0.country_code = \'''' + countryCode + '''\' and t0.current_prev = 'Current' and t0.fc_trigger = 1 order by population_affected desc ''' try: cur.execute(sqlString) con.commit() except psycopg2.ProgrammingError as e: logger.info(e) if cur.statusmessage=='SELECT 0': theData = [] else: theData = cur.fetchall() isFlood = len(theData) > 0 theInfo = { "flood": isFlood, "data": theData } return theInfo
def main(): logger.info('Started Setup') #Folder structure createSubFolders() #Postgres database createSchema() for COUNTRY_CODE in COUNTRY_CODES: COUNTRY_SETTINGS = SETTINGS[COUNTRY_CODE] uploadStaticToDb(COUNTRY_CODE + '_glofas_stations', COUNTRY_SETTINGS['trigger_levels']) uploadStaticToDb(COUNTRY_CODE + '_waterstation_per_district', COUNTRY_SETTINGS['district_mapping']) # uploadStaticToDb(COUNTRY_CODE + '_redcross_branches',COUNTRY_SETTINGS['redcross_branches']) processStaticDataDb() logger.info('Finished Setup')
def notify(countryCode): if SETTINGS_SECRET[countryCode]["notify_email"]: floodInfo = getFloodInfo(countryCode) if floodInfo["flood"] or EMAIL_WITHOUT_TRIGGER: formattedInfo = formatInfo(floodInfo, countryCode) if not EMAIL_HARDCODE: mailchimpClient = EmailClient(MC_API, MC_USER) mailchimpClient.sendNotification(formattedInfo, countryCode) else: msg = MIMEMultipart() msg['Subject'] = formattedInfo['subject'] part = MIMEText(formattedInfo['html'], "html") msg.attach(part) sendMailAlternative(EMAIL_LIST_HARDCODE, msg.as_string()) else: logger.info("Email notificatin are turned off for this country")
def callAllExposure(self): logger.info('Started calculating affected of %s', self.outputPathMerge) print(self.fcStep, " - fcStep") for source, rasterValue in EXPOSURE_DATA_SOURCES.items(): print(source) exposure = Exposure(source, rasterValue, self.fcStep) exposure.calcAffected(self.outputPathMerge) for item in exposure.stats: self.stats.append(item) print(self.statsPath) with open(self.statsPath, 'w') as fp: json.dump(self.stats, fp) logger.info("Saved stats for %s", self.statsPath)
def main(): logger.info('Started Test') startTime = time.time() print(str(datetime.datetime.now())) try: for leadTimeLabel, leadTimeValue in LEAD_TIMES.items(): fc = Forecast(leadTimeLabel, leadTimeValue) fc.rainfallData.process() except Exception as e: # If a fatal exception occurs during the cronjob # logs full stack trace and sends email logger.exception("Fatal error occured during the process") traceback.print_exc() elapsedTime = str(time.time() - startTime) print(elapsedTime) logger.info('Finished Test in seconds %s', elapsedTime)
def callAllExposure(self): logger.info('Started calculating affected of %s', self.outputPathMerge) print(self.fcStep, " - fcStep") for indicator, values in self.EXPOSURE_DATA_SOURCES.items(): print(indicator) exposure = Exposure(indicator, values['source'], values['rasterValue'], self.fcStep, self.country_code) exposure.calcAffected(self.outputPathMerge) for item in exposure.stats: self.stats.append(item) print(self.statsPath) with open(self.statsPath, 'w') as fp: json.dump(self.stats, fp) logger.info("Saved stats for %s", self.statsPath)
def uploadDynamicToDb(self, table, jsonData): logger.info("Uploading from %s to %s", table, jsonData) #Load (static) threshold values per station and add date-column df = pd.read_json(jsonData, orient='records') current_date = CURRENT_DATE.strftime('%Y-%m-%d') df['date'] = current_date #Delete existing entries with same date try: self.con, self.cur, self.db = get_db() sql = "DELETE FROM " + SCHEMA_NAME + "." + table + " WHERE date=\'" + current_date + "\'" self.cur.execute(sql) self.con.commit() self.con.close() except psycopg2.ProgrammingError as e: logger.info(e) #Append new data for current date df.to_sql(table, self.engine, if_exists='append', schema=SCHEMA_NAME) print(table + ' uploaded')
def processDynamicDataDb(self): sql_file = open('lib/cronJob/processDynamicDataPostgresTrigger.sql', 'r', encoding='utf-8') sql_trigger = sql_file.read() sql_file.close() sql_file = open('lib/cronJob/processEventDistricts.sql', 'r', encoding='utf-8') sql_event_districts = sql_file.read() sql_file.close() sql_file = open('lib/cronJob/processDynamicDataPostgresExposure.sql', 'r', encoding='utf-8') sql_exposure = sql_file.read() sql_file.close() try: self.con, self.cur, self.db = get_db() self.cur.execute(sql_trigger) self.cur.execute(sql_event_districts) self.cur.execute(psql.SQL(sql_exposure)) self.con.commit() self.con.close() print('SQL EXECUTED') except psycopg2.ProgrammingError as e: logger.info(e) print('SQL FAILED', e)