def run(self): print(self.base_url.format( self.params['sdate'], self.params['edate'])) rss = self.download(self.base_url.format( self.params['sdate'], self.params['edate']), filename=self.prefix + ".rss") db = ogc_server_settings.datastore_db ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(os.path.join(self.tmp_dir, rss)), table=self.prefix)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() try: c.execute( 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'. format(tb=self.prefix)) except Exception as e: c.close() raise e self.post_geoserver_vector(self.prefix) if not style_exists(self.prefix): with open(os.path.join( script_dir, 'resources/gdacs.sld')) as sld: self.set_default_style(self.prefix, self.prefix, sld.read()) self.update_geonode(self.prefix, title=self.layer_title, description=self.description, store=datastore) self.truncate_gs_cache(self.prefix) self.cleanup()
def run(self, days=1): tifs = self.download(days=days) for tif_file in tifs: projected_tif = self.convert(tif_file) dst_file = self.data_dir.format(gsd=GS_DATA_DIR, ws=self.workspace, layer=self.layer_name, file=projected_tif) dst_dir = os.path.dirname(dst_file) if not os.path.exists(dst_dir): os.makedirs(dst_dir) if dst_file.endswith('.tif'): shutil.move(os.path.join(self.tmp_dir, projected_tif), dst_file) self.post_geoserver(dst_file, self.layer_name) layer_title, imgtime = self.parse_name(tifs[-1]) self.drop_old_hourly_images(imgtime, self.layer_name) self.drop_old_daily_images(imgtime, self.layer_name) if not style_exists(self.layer_name): with open(os.path.join(script_dir, 'resources/gpm.sld')) as sld: self.set_default_style(self.layer_name, self.layer_name, sld.read()) self.update_geonode(self.layer_name, title=layer_title, description=self.description, store=self.layer_name, bounds=('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326')) self.truncate_gs_cache(self.layer_name) self.cleanup()
def run(self, days=1): """ Download, convert, and import into GeoNode/Geoserver the last x days of AirNow API Grib images. :param days: number of days to process :return: None """ gribs = self.download(days=days) for grib_file in gribs: layer_title, layer_name, imgtime = self.parse_name(grib_file) tif_out = self.convert(grib_file, imgtime, layer_name) dst_file = self.data_dir.format(gsd=GS_DATA_DIR, ws=self.workspace, layer=layer_name, file=tif_out) dst_dir = os.path.dirname(dst_file) if not os.path.exists(dst_dir): os.makedirs(dst_dir) if dst_file.endswith('.tif'): shutil.move(os.path.join(self.tmp_dir, tif_out), dst_file) self.post_geoserver(dst_file, layer_name) self.drop_old_hourly_images(imgtime, layer_name) self.drop_old_daily_images(imgtime, layer_name) if not style_exists(layer_name): with open(os.path.join( script_dir, 'resources/airnow.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=layer_title, store=layer_name) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self): """ Run the processor :return: None """ for indicator in self.indicators: csv_dict = self.download(indicator) station_csv = csv_dict['Station'] if os.path.getsize(os.path.join(self.tmp_dir, station_csv)) > 0: self.update_station_table(station_csv) result_csv = csv_dict['Result'] datastore = ogc_server_settings.server.get('DATASTORE') if os.path.getsize(os.path.join(self.tmp_dir, result_csv)) > 0: self.update_indicator_table(result_csv) layer_name = '{}{}{}'.format(self.prefix, self.safe_name(indicator), self.suffix) layer_title = 'Water Quality - {} - Updated {}'.format( indicator, datetime.datetime.now().strftime('%Y-%m-%d')) if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join( script_dir, 'resources/{}.sld'.format(layer_name))) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=layer_title, store=datastore) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self, now=None): """ Retrieve and process the latest global air temperature image from forecast.io """ if not now: now = datetime.datetime.utcnow() raw_name = "{prefix}_{hour}.tif".format( prefix=self.prefix, hour='{0:02d}'.format(now.hour)) try: raw_file = self.download( "{url}{year}/{month}/{day}/{hour}.tif".format( url=self.base_url, year=str(now.year), month='{0:02d}'.format(now.month), day='{0:02d}'.format(now.day), hour='{0:02d}'.format(now.hour)), filename=raw_name) except requests.HTTPError: # Try the previous hour: now = now - datetime.timedelta(hours=1) raw_file = self.download( "{url}{year}/{month}/{day}/{hour}.tif".format( url=self.base_url, year=str(now.year), month='{0:02d}'.format(now.month), day='{0:02d}'.format(now.day), hour='{0:02d}'.format(now.hour)), filename=raw_name) tif_file = self.convert(raw_file, now) dst_file = self.data_dir.format(gsd=GS_DATA_DIR, ws=self.workspace, layer=self.layer_name, file=tif_file) dst_dir = os.path.dirname(dst_file) if not os.path.exists(dst_dir): os.makedirs(dst_dir) if dst_file.endswith('.tif'): shutil.move(os.path.join(self.tmp_dir, tif_file), dst_file) self.post_geoserver(dst_file, self.layer_name) if not style_exists(self.layer_name): with open(os.path.join(script_dir, 'resources/forecastio.sld')) as sld: self.set_default_style(self.layer_name, self.layer_name, sld.read()) self.drop_old_hourly_images(now, self.layer_name) self.drop_old_daily_images(now, self.layer_name) self.update_geonode(self.layer_name, title=self.parse_name(now), description=self.description, store=self.layer_name, bounds=('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326')) self.truncate_gs_cache(self.layer_name) self.cleanup()
def run(self, rss_file=None): """ Retrieve the latest USGS earthquake data and append to all PostGIS earthquake tables, then remove old data :return: """ if not rss_file: rss = self.download(self.base_url.format(self.params['sdate'], self.params['edate']), filename=self.prefix + '.rss') rss_file = os.path.join(self.tmp_dir, rss) json_data = None with open(rss_file) as json_file: json_data = json.load(json_file) for feature in json_data['features']: time_original = datetime.datetime.utcfromtimestamp( feature['properties']['time']/1000) updated_original = datetime.datetime.utcfromtimestamp( feature['properties']['updated']/1000) feature['properties']['time'] = time_original.strftime( "%Y-%m-%d %H:%M:%S") feature['properties']['updated'] = updated_original.strftime( "%Y-%m-%d %H:%M:%S") with open(rss_file, 'w') as modified_file: json.dump(json_data, modified_file) db = ogc_server_settings.datastore_db for table, title in zip(self.tables, self.titles): ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(rss_file), table=table)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(table, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() q = 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);' try: c.execute(q.format(tb=table)) except Exception: c.close() self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/usgs.sld')) as sld: self.set_default_style(table, table, sld.read()) self.update_geonode(table, title="Earthquakes - {}".format(title), description=self.description, store=datastore) self.truncate_gs_cache(table) self.purge_old_data() self.cleanup()
def run(self): self.process() layer_name = self.prefix datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join(script_dir, 'resources/aqicn.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title='Air Quality Index', store=datastore) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self): """ Retrieve the layers and import into Geonode """ for layer in self.layers: try: table = '{}{}'.format(self.prefix, layer['table']) lyr_file = os.path.join(self.tmp_dir, self.download(layer['url'], filename=table)) with open(lyr_file) as inf: result = inf.readline(24) if result.startswith('{"processingTime":'): logger.info('Output is being generated,' 'will try again in 60 seconds') time.sleep(60) lyr_file = os.path.join( self.tmp_dir, self.download(layer['url'], filename=table)) db = ogc_server_settings.datastore_db ogr2ogr_exec("-overwrite -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {lyr} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], lyr="{}".format(lyr_file), table=table)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(table, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/{}.sld'.format( layer['sld']))) as sld: sld_text = sld.read().format(table=layer['table'], title=layer['name']) self.set_default_style(table, table, sld_text) self.update_geonode(table, title=layer['name'], description=layer['description'], store=datastore) self.truncate_gs_cache(table) except Exception: logger.error('Error with layer {}'.format(layer['name'])) logger.error(traceback.format_exc()) self.cleanup()
def import_landscan(self, landscan_tiff): """ Imports landscan to geonode """ self.post_geoserver(landscan_tiff, self.layer) if not style_exists(self.layer): with open(os.path.join( script_dir, 'resources/landscan.sld')) as sld: self.set_default_style(self.layer, self.layer, sld.read()) self.update_geonode(self.layer, title=self.layer, store=self.layer, description=self.description) self.truncate_gs_cache(self.layer)
def import_current(self): """ Retrieve and process the GFMS image closest to the current date/time. """ img_url = self.get_most_current() img_file = self.download(img_url) tif_file = self.convert(img_file) new_title = self.parse_title(tif_file) self.post_geoserver(tif_file, self.layer_current) if not style_exists(self.layer_current): with open(os.path.join( script_dir, 'resources/gfms.sld')) as sld: self.set_default_style(self.layer_current, self.layer_current, sld.read()) self.update_geonode(self.layer_current, title=new_title, store=self.layer_current) self.truncate_gs_cache(self.layer_current)
def run(self): if not table_exists(self.prefix): postgres_query(WHISP_TABLE.format(table=self.prefix), commit=True) self.import_archive() self.scrape() if not layer_exists(self.prefix, ogc_server_settings.server.get('DATASTORE'), DEFAULT_WORKSPACE): self.post_geoserver_vector(self.prefix) if not style_exists(self.prefix): with open(os.path.join(script_dir, 'resources/whisp.sld')) as sld: self.set_default_style(self.prefix, self.prefix, sld.read()) self.update_geonode(self.prefix, title=self.title, description=self.description) self.truncate_gs_cache(self.prefix) self.cleanup()
def run(self): self.process() layer_name = self.prefix datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join(script_dir, 'resources/aqicn.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode( layer_name, title='Air Quality Index', description=self.description, store=datastore, extra_keywords=['category:Climatology Meteorology Atmosphere']) self.truncate_gs_cache(layer_name) self.cleanup()
def import_future(self): """ Retrieve and process the GFMS image furthest into the future. """ img_url = self.get_latest_future() img_file = self.download(img_url) tif_file = self.convert(img_file) new_title = self.parse_title(tif_file) self.post_geoserver(tif_file, self.layer_future) if not style_exists(self.layer_future): with open(os.path.join(script_dir, 'resources/gfms.sld')) as sld: self.set_default_style(self.layer_future, self.layer_future, sld.read()) self.update_geonode(self.layer_future, title=new_title, store=self.layer_future) self.truncate_gs_cache(self.layer_future)
def run(self): """ Retrieve and process all SPEI image files listed in the SPEIProcess object's spei_files property. """ for layer_name in self.spei_files.keys(): self.download("{}{}.nc".format(self.base_url, layer_name)) tif_file = self.convert(layer_name) self.post_geoserver(tif_file, layer_name) if not style_exists(layer_name): with open(os.path.join(script_dir, 'resources/spei.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=self.spei_files[layer_name], store=layer_name) self.truncate_gs_cache(layer_name) self.cleanup()
def publish(self, tif, name, title, desc): """ Publish to Geoserver and Geonode :param tif: File path/name of TIF image :param name: layer name :param title: layer title :param desc: layer description :return: None """ category = TopicCategory.objects.get( identifier='climatologyMeteorologyAtmosphere') if "Diurnal" in title: style = "worldclim_diurnal" elif "Isotherm" in title: style = "worldclim_isotherm" elif "Temperature" in title: if "Seasonality" in title: style = "worldclim_temp_seasonality" else: style = "worldclim_temp" elif "Precipitation" in title: if "Annual" in title: style = "worldclim_precip_annual" elif "Seasonality" in title: style = "worldclim_precip_seasonality" else: style = "worldclim_precip" else: style = "worldclim_bio" self.post_geoserver(tif, name) with open(os.path.join(script_dir, 'resources/{}.sld'.format(style))) as sld: self.set_default_style(name, style, sld.read(), create=not style_exists(style)) self.truncate_gs_cache(name) self.update_geonode( name, title, description=desc, category=category, store=name, extra_keywords=['category:Climatology Meteorology Atmosphere'])
def update_layer(self, layer): """ Create or update the MMWR layer in GeoNode :param layer: Layer to update (weekly or archive) :return: None """ csvfile = "{}.csv".format(self.prefix) vrt_file = os.path.join(self.tmp_dir, '{}.vrt'.format(self.prefix)) csvt_file = os.path.join(self.tmp_dir, '{}.csvt'.format(self.prefix)) if not os.path.exists(vrt_file): with open(vrt_file, 'w') as vrt: vrt.write(vrt_content.format( name=csvfile.replace('.csv', ''), csv=os.path.join(self.tmp_dir, csvfile))) if not os.path.exists(csvt_file): with open(csvt_file, 'w') as csvt: csvt.write(csvt_content) db = ogc_server_settings.datastore_db table = '{}_{}'.format(self.prefix, layer).lower() option = 'overwrite' if layer.lower() == 'weekly' else 'append' ogr2ogr_exec("-{option} -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {vrt} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], vrt="{}".format(vrt_file), option=option, table=table)) if not layer_exists(table, ogc_server_settings.server.get('DATASTORE'), DEFAULT_WORKSPACE): constraint = 'ALTER TABLE {table} ADD CONSTRAINT ' \ '{table}_unique UNIQUE (place, report_date)'\ .format(table=table) postgres_query(constraint, commit=True) self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/mmwr.sld')) as sldfile: sld = sldfile.read().format(layername=table) self.set_default_style(table, table, sld) self.update_geonode( table, title='{} {}'.format(self.base_title, layer), description=self.description) self.truncate_gs_cache(table)
def import_current(self): """ Retrieve and process the GFMS image closest to the current date/time. """ img_url = self.get_most_current() img_file = self.download(img_url) tif_file = self.convert(img_file) new_title = self.parse_title(tif_file) self.post_geoserver(tif_file, self.layer_current) if not style_exists(self.layer_current): with open(os.path.join(script_dir, 'resources/gfms.sld')) as sld: self.set_default_style(self.layer_current, self.layer_current, sld.read()) self.update_geonode(self.layer_current, title=new_title, store=self.layer_current, description=self.description) self.truncate_gs_cache(self.layer_current)
def run(self): """ Retrieve and process all SPEI image files listed in the SPEIProcess object's spei_files property. """ for layer_name in self.spei_files.keys(): self.download("{}{}.nc".format(self.base_url, layer_name)) tif_file = self.convert(layer_name) self.post_geoserver(tif_file, layer_name) if not style_exists(layer_name): with open(os.path.join(script_dir, 'resources/spei.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=self.spei_files[layer_name], description=self.description, store=layer_name) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self): """ Retrieve and process the latest NetCDF file. """ gzfile = self.download(self.base_url, '{}.nc.gz'.format(self.layer_name)) ncfile = gunzip(os.path.join(self.tmp_dir, gzfile)) cdf_file = self.convert(ncfile) bands = get_band_count(cdf_file) img_list = self.get_mosaic_filenames(self.layer_name) for band in range(1, bands + 1): band_date = re.sub('[\-\.]+', '', self.get_date(band).isoformat()) img_name = '{}_{}T000000000Z.tif'.format(self.layer_name, band_date) if img_name not in img_list: band_tif = self.extract_band(cdf_file, band, img_name) dst_file = self.data_dir.format(gsd=GS_DATA_DIR, ws=self.workspace, layer=self.layer_name, file=img_name) dst_dir = os.path.dirname(dst_file) if not os.path.exists(dst_dir): os.makedirs(dst_dir) if dst_file.endswith('.tif'): shutil.move(os.path.join(self.tmp_dir, band_tif), dst_file) self.post_geoserver(dst_file, self.layer_name) if not style_exists(self.layer_name): with open(os.path.join(script_dir, 'resources/gistemp.sld')) as sld: self.set_default_style(self.layer_name, self.layer_name, sld.read().format(latest_band=bands)) self.update_geonode( self.layer_name, title=self.get_title(bands), description=self.abstract, store=self.layer_name, bounds=('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326'), extra_keywords=['category:Climatology Meteorology Atmosphere']) self.truncate_gs_cache(self.layer_name) self.cleanup()
def publish(self, tif, name, title, desc): """ Publish to Geoserver and Geonode :param tif: File path/name of TIF image :param name: layer name :param title: layer title :param desc: layer description :return: None """ category = TopicCategory.objects.get( identifier='climatologyMeteorologyAtmosphere') if "Diurnal" in title: style = "worldclim_diurnal" elif "Isotherm" in title: style = "worldclim_isotherm" elif "Temperature" in title: if "Seasonality" in title: style = "worldclim_temp_seasonality" else: style = "worldclim_temp" elif "Precipitation" in title: if "Annual" in title: style = "worldclim_precip_annual" elif "Seasonality" in title: style = "worldclim_precip_seasonality" else: style = "worldclim_precip" else: style = "worldclim_bio" self.post_geoserver(tif, name) with open(os.path.join( script_dir, 'resources/{}.sld'.format(style))) as sld: self.set_default_style(name, style, sld.read(), create=not style_exists(style)) self.truncate_gs_cache(name) self.update_geonode(name, title, description=desc, category=category, store=name)
def run(self): """ Retrieve and process the latest NetCDF file. """ ncfile = self.download( self.base_url, filename='{}.nc'.format(self.layer_name)) cdf_file = self.convert(os.path.join(self.tmp_dir, ncfile)) bands = get_band_count(cdf_file) img_list = self.get_mosaic_filenames(self.layer_name) for band in range(1, bands + 1): band_date = re.sub('[\-\.]+', '', self.get_date(band).isoformat()) img_name = '{}_{}T000000000Z.tif'.format(self.layer_name, band_date) if img_name not in img_list: band_tif = self.extract_band(cdf_file, band, img_name) dst_file = self.data_dir.format(gsd=GS_DATA_DIR, ws=self.workspace, layer=self.layer_name, file=img_name) dst_dir = os.path.dirname(dst_file) if not os.path.exists(dst_dir): os.makedirs(dst_dir) if dst_file.endswith('.tif'): shutil.move(os.path.join(self.tmp_dir, band_tif), dst_file) self.post_geoserver(dst_file, self.layer_name) if not style_exists(self.layer_name): with open(os.path.join(script_dir, 'resources/cmap.sld')) as sld: self.set_default_style(self.layer_name, self.layer_name, sld.read().format(latest_band=bands)) self.update_geonode(self.layer_name, title=self.get_title(bands), description=self.abstract, store=self.layer_name, bounds=('-178.75', '178.75', '-88.75', '88.75', 'EPSG:4326')) self.truncate_gs_cache(self.layer_name) self.cleanup()
def run(self): print(self.base_url.format(self.params['sdate'], self.params['edate'])) rss = self.download(self.base_url.format(self.params['sdate'], self.params['edate']), filename=self.prefix + ".rss") db = ogc_server_settings.datastore_db ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(os.path.join(self.tmp_dir, rss)), table=self.prefix)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() try: c.execute( 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'. format(tb=self.prefix)) except Exception as e: c.close() raise e self.post_geoserver_vector(self.prefix) if not style_exists(self.prefix): with open(os.path.join(script_dir, 'resources/gdacs.sld')) as sld: self.set_default_style(self.prefix, self.prefix, sld.read()) self.update_geonode(self.prefix, title=self.layer_title, description=self.description, store=datastore, extra_keywords=['category:Disaster Alerts']) self.truncate_gs_cache(self.prefix) self.cleanup()
def run(self): """ Run the processor :return: None """ for indicator in self.indicators: csv_dict = self.download(indicator) station_csv = csv_dict['Station'] if os.path.getsize(os.path.join(self.tmp_dir, station_csv)) > 0: self.update_station_table(station_csv) result_csv = csv_dict['Result'] datastore = ogc_server_settings.server.get('DATASTORE') if os.path.getsize(os.path.join(self.tmp_dir, result_csv)) > 0: self.update_indicator_table(result_csv) layer_name = '{}{}{}'.format(self.prefix, self.safe_name(indicator), self.suffix) layer_title = 'Water Quality - {} - Updated {}'.format( indicator, datetime.datetime.now().strftime('%Y-%m-%d')) if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join( script_dir, 'resources/{}.sld'.format(layer_name))) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=layer_title, description=self.description, store=datastore, extra_keywords=['category:Water Quality']) self.truncate_gs_cache(layer_name) self.cleanup()