def run(self): """ Run the processor :return: None """ for indicator in self.indicators: csv_dict = self.download(indicator) station_csv = csv_dict['Station'] if os.path.getsize(os.path.join(self.tmp_dir, station_csv)) > 0: self.update_station_table(station_csv) result_csv = csv_dict['Result'] datastore = ogc_server_settings.server.get('DATASTORE') if os.path.getsize(os.path.join(self.tmp_dir, result_csv)) > 0: self.update_indicator_table(result_csv) layer_name = '{}{}{}'.format(self.prefix, self.safe_name(indicator), self.suffix) layer_title = 'Water Quality - {} - Updated {}'.format( indicator, datetime.datetime.now().strftime('%Y-%m-%d')) if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join( script_dir, 'resources/{}.sld'.format(layer_name))) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=layer_title, store=datastore) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self): print(self.base_url.format(self.params['sdate'], self.params['edate'])) rss = self.download(self.base_url.format(self.params['sdate'], self.params['edate']), filename=self.prefix + ".rss") db = ogc_server_settings.datastore_db ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(os.path.join(self.tmp_dir, rss)), table=self.prefix)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() try: c.execute( 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'. format(tb=self.prefix)) except Exception as e: c.close() raise e self.post_geoserver_vector(self.prefix) if not style_exists(self.prefix): with open(os.path.join(script_dir, 'resources/gdacs.sld')) as sld: self.set_default_style(self.prefix, self.prefix, sld.read()) self.update_geonode(self.prefix, title=self.layer_title, description=self.description, store=datastore) self.truncate_gs_cache(self.prefix) self.cleanup()
def run(self): print(self.base_url.format( self.params['sdate'], self.params['edate'])) rss = self.download(self.base_url.format( self.params['sdate'], self.params['edate']), filename=self.prefix + ".rss") db = ogc_server_settings.datastore_db ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(os.path.join(self.tmp_dir, rss)), table=self.prefix)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() try: c.execute( 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'. format(tb=self.prefix)) except Exception as e: c.close() raise e self.post_geoserver_vector(self.prefix) if not style_exists(self.prefix): with open(os.path.join( script_dir, 'resources/gdacs.sld')) as sld: self.set_default_style(self.prefix, self.prefix, sld.read()) self.update_geonode(self.prefix, title=self.layer_title, description=self.description, store=datastore) self.truncate_gs_cache(self.prefix) self.cleanup()
def run(self): """ Run the processor :return: None """ for indicator in self.indicators: csv_dict = self.download(indicator) station_csv = csv_dict['Station'] if os.path.getsize(os.path.join(self.tmp_dir, station_csv)) > 0: self.update_station_table(station_csv) result_csv = csv_dict['Result'] datastore = ogc_server_settings.server.get('DATASTORE') if os.path.getsize(os.path.join(self.tmp_dir, result_csv)) > 0: self.update_indicator_table(result_csv) layer_name = '{}{}{}'.format(self.prefix, self.safe_name(indicator), self.suffix) layer_title = 'Water Quality - {} - Updated {}'.format( indicator, datetime.datetime.now().strftime('%Y-%m-%d')) if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open( os.path.join( script_dir, 'resources/{}.sld'.format(layer_name))) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title=layer_title, description=self.description, store=datastore) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self, rss_file=None): """ Retrieve the latest USGS earthquake data and append to all PostGIS earthquake tables, then remove old data :return: """ if not rss_file: rss = self.download(self.base_url.format(self.params['sdate'], self.params['edate']), filename=self.prefix + '.rss') rss_file = os.path.join(self.tmp_dir, rss) json_data = None with open(rss_file) as json_file: json_data = json.load(json_file) for feature in json_data['features']: time_original = datetime.datetime.utcfromtimestamp( feature['properties']['time'] / 1000) updated_original = datetime.datetime.utcfromtimestamp( feature['properties']['updated'] / 1000) feature['properties']['time'] = time_original.strftime( "%Y-%m-%d %H:%M:%S") feature['properties']['updated'] = updated_original.strftime( "%Y-%m-%d %H:%M:%S") with open(rss_file, 'w') as modified_file: json.dump(json_data, modified_file) db = ogc_server_settings.datastore_db for table, title in zip(self.tables, self.titles): ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(rss_file), table=table)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(table, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() q = 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);' try: c.execute(q.format(tb=table)) except Exception: c.close() self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join(script_dir, 'resources/usgs.sld')) as sld: self.set_default_style(table, table, sld.read()) self.update_geonode(table, title="Earthquakes - {}".format(title), description=self.description, store=datastore) self.truncate_gs_cache(table) self.purge_old_data() self.cleanup()
def run(self, rss_file=None): """ Retrieve the latest USGS earthquake data and append to all PostGIS earthquake tables, then remove old data :return: """ if not rss_file: rss = self.download(self.base_url.format(self.params['sdate'], self.params['edate']), filename=self.prefix + '.rss') rss_file = os.path.join(self.tmp_dir, rss) json_data = None with open(rss_file) as json_file: json_data = json.load(json_file) for feature in json_data['features']: time_original = datetime.datetime.utcfromtimestamp( feature['properties']['time']/1000) updated_original = datetime.datetime.utcfromtimestamp( feature['properties']['updated']/1000) feature['properties']['time'] = time_original.strftime( "%Y-%m-%d %H:%M:%S") feature['properties']['updated'] = updated_original.strftime( "%Y-%m-%d %H:%M:%S") with open(rss_file, 'w') as modified_file: json.dump(json_data, modified_file) db = ogc_server_settings.datastore_db for table, title in zip(self.tables, self.titles): ogr2ogr_exec("-append -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {rss} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], rss="{}".format(rss_file), table=table)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(table, datastore, DEFAULT_WORKSPACE): c = connections[datastore].cursor() q = 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);' try: c.execute(q.format(tb=table)) except Exception: c.close() self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/usgs.sld')) as sld: self.set_default_style(table, table, sld.read()) self.update_geonode(table, title="Earthquakes - {}".format(title), description=self.description, store=datastore) self.truncate_gs_cache(table) self.purge_old_data() self.cleanup()
def run(self): """ Retrieve the layers and import into Geonode """ for layer in self.layers: try: table = '{}{}'.format(self.prefix, layer['table']) lyr_file = os.path.join(self.tmp_dir, self.download(layer['url'], filename=table)) with open(lyr_file) as inf: result = inf.readline(24) if result.startswith('{"processingTime":'): logger.info('Output is being generated,' 'will try again in 60 seconds') time.sleep(60) lyr_file = os.path.join( self.tmp_dir, self.download(layer['url'], filename=table)) db = ogc_server_settings.datastore_db ogr2ogr_exec("-overwrite -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {lyr} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], lyr="{}".format(lyr_file), table=table)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(table, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/{}.sld'.format( layer['sld']))) as sld: sld_text = sld.read().format(table=layer['table'], title=layer['name']) self.set_default_style(table, table, sld_text) keywords = self.layer_category_mapping[layer['table']] self.update_geonode( table, title=layer['name'], description=layer['description'], store=datastore, extra_keywords=[keywords]) self.truncate_gs_cache(table) except Exception: logger.error('Error with layer {}'.format(layer['name'])) logger.error(traceback.format_exc()) self.cleanup()
def run(self): self.process() layer_name = self.prefix datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join(script_dir, 'resources/aqicn.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title='Air Quality Index', store=datastore) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self): """ Retrieve the layers and import into Geonode """ for layer in self.layers: try: table = '{}{}'.format(self.prefix, layer['table']) lyr_file = os.path.join(self.tmp_dir, self.download(layer['url'], filename=table)) with open(lyr_file) as inf: result = inf.readline(24) if result.startswith('{"processingTime":'): logger.info('Output is being generated,' 'will try again in 60 seconds') time.sleep(60) lyr_file = os.path.join( self.tmp_dir, self.download(layer['url'], filename=table)) db = ogc_server_settings.datastore_db ogr2ogr_exec("-overwrite -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {lyr} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], lyr="{}".format(lyr_file), table=table)) datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(table, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/{}.sld'.format( layer['sld']))) as sld: sld_text = sld.read().format(table=layer['table'], title=layer['name']) self.set_default_style(table, table, sld_text) self.update_geonode(table, title=layer['name'], description=layer['description'], store=datastore) self.truncate_gs_cache(table) except Exception: logger.error('Error with layer {}'.format(layer['name'])) logger.error(traceback.format_exc()) self.cleanup()
def update_layer(self, layer): """ Create or update the MMWR layer in GeoNode :param layer: Layer to update (weekly or archive) :return: None """ csvfile = "{}.csv".format(self.prefix) vrt_file = os.path.join(self.tmp_dir, '{}.vrt'.format(self.prefix)) csvt_file = os.path.join(self.tmp_dir, '{}.csvt'.format(self.prefix)) if not os.path.exists(vrt_file): with open(vrt_file, 'w') as vrt: vrt.write( vrt_content.format(name=csvfile.replace('.csv', ''), csv=os.path.join(self.tmp_dir, csvfile))) if not os.path.exists(csvt_file): with open(csvt_file, 'w') as csvt: csvt.write(csvt_content) db = ogc_server_settings.datastore_db table = '{}_{}'.format(self.prefix, layer).lower() option = 'overwrite' if layer.lower() == 'weekly' else 'append' ogr2ogr_exec("-{option} -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {vrt} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], vrt="{}".format(vrt_file), option=option, table=table)) if not layer_exists(table, ogc_server_settings.server.get('DATASTORE'), DEFAULT_WORKSPACE): constraint = 'ALTER TABLE {table} ADD CONSTRAINT ' \ '{table}_unique UNIQUE (place, report_date)'\ .format(table=table) postgres_query(constraint, commit=True) self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join(script_dir, 'resources/mmwr.sld')) as sldfile: sld = sldfile.read().format(layername=table) self.set_default_style(table, table, sld) self.update_geonode(table, title='{} {}'.format(self.base_title, layer), description=self.description, extra_keywords=['category:Population']) self.truncate_gs_cache(table)
def run(self): self.process() layer_name = self.prefix datastore = ogc_server_settings.server.get('DATASTORE') if not layer_exists(layer_name, datastore, DEFAULT_WORKSPACE): self.post_geoserver_vector(layer_name) if not style_exists(layer_name): with open(os.path.join(script_dir, 'resources/aqicn.sld')) as sld: self.set_default_style(layer_name, layer_name, sld.read()) self.update_geonode(layer_name, title='Air Quality Index', description=self.description, store=datastore) self.truncate_gs_cache(layer_name) self.cleanup()
def run(self): if not table_exists(self.prefix): postgres_query(WHISP_TABLE.format(table=self.prefix), commit=True) self.import_archive() self.scrape() if not layer_exists(self.prefix, ogc_server_settings.server.get('DATASTORE'), DEFAULT_WORKSPACE): self.post_geoserver_vector(self.prefix) if not style_exists(self.prefix): with open(os.path.join(script_dir, 'resources/whisp.sld')) as sld: self.set_default_style(self.prefix, self.prefix, sld.read()) self.update_geonode(self.prefix, title=self.title, description=self.description) self.truncate_gs_cache(self.prefix) self.cleanup()
def update_layer(self, layer): """ Create or update the MMWR layer in GeoNode :param layer: Layer to update (weekly or archive) :return: None """ csvfile = "{}.csv".format(self.prefix) vrt_file = os.path.join(self.tmp_dir, '{}.vrt'.format(self.prefix)) csvt_file = os.path.join(self.tmp_dir, '{}.csvt'.format(self.prefix)) if not os.path.exists(vrt_file): with open(vrt_file, 'w') as vrt: vrt.write(vrt_content.format( name=csvfile.replace('.csv', ''), csv=os.path.join(self.tmp_dir, csvfile))) if not os.path.exists(csvt_file): with open(csvt_file, 'w') as csvt: csvt.write(csvt_content) db = ogc_server_settings.datastore_db table = '{}_{}'.format(self.prefix, layer).lower() option = 'overwrite' if layer.lower() == 'weekly' else 'append' ogr2ogr_exec("-{option} -skipfailures -f PostgreSQL \ \"PG:host={db_host} user={db_user} password={db_pass} \ dbname={db_name}\" {vrt} -nln {table}".format( db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"], db_name=db["NAME"], vrt="{}".format(vrt_file), option=option, table=table)) if not layer_exists(table, ogc_server_settings.server.get('DATASTORE'), DEFAULT_WORKSPACE): constraint = 'ALTER TABLE {table} ADD CONSTRAINT ' \ '{table}_unique UNIQUE (place, report_date)'\ .format(table=table) postgres_query(constraint, commit=True) self.post_geoserver_vector(table) if not style_exists(table): with open(os.path.join( script_dir, 'resources/mmwr.sld')) as sldfile: sld = sldfile.read().format(layername=table) self.set_default_style(table, table, sld) self.update_geonode( table, title='{} {}'.format(self.base_title, layer), description=self.description) self.truncate_gs_cache(table)