Example #1
0
 def run(self):
     print(self.base_url.format(self.params['sdate'], self.params['edate']))
     rss = self.download(self.base_url.format(self.params['sdate'],
                                              self.params['edate']),
                         filename=self.prefix + ".rss")
     db = ogc_server_settings.datastore_db
     ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
         \"PG:host={db_host} user={db_user} password={db_pass} \
         dbname={db_name}\" {rss} -nln {table}".format(
         db_host=db["HOST"],
         db_user=db["USER"],
         db_pass=db["PASSWORD"],
         db_name=db["NAME"],
         rss="{}".format(os.path.join(self.tmp_dir, rss)),
         table=self.prefix))
     datastore = ogc_server_settings.server.get('DATASTORE')
     if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE):
         c = connections[datastore].cursor()
         try:
             c.execute(
                 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'.
                 format(tb=self.prefix))
         except Exception as e:
             c.close()
             raise e
         self.post_geoserver_vector(self.prefix)
     if not style_exists(self.prefix):
         with open(os.path.join(script_dir, 'resources/gdacs.sld')) as sld:
             self.set_default_style(self.prefix, self.prefix, sld.read())
     self.update_geonode(self.prefix,
                         title=self.layer_title,
                         description=self.description,
                         store=datastore)
     self.truncate_gs_cache(self.prefix)
     self.cleanup()
Example #2
0
 def run(self):
     print(self.base_url.format(
         self.params['sdate'], self.params['edate']))
     rss = self.download(self.base_url.format(
         self.params['sdate'], self.params['edate']),
         filename=self.prefix + ".rss")
     db = ogc_server_settings.datastore_db
     ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
         \"PG:host={db_host} user={db_user} password={db_pass} \
         dbname={db_name}\" {rss} -nln {table}".format(
         db_host=db["HOST"], db_user=db["USER"],
         db_pass=db["PASSWORD"], db_name=db["NAME"],
         rss="{}".format(os.path.join(self.tmp_dir, rss)),
         table=self.prefix))
     datastore = ogc_server_settings.server.get('DATASTORE')
     if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE):
         c = connections[datastore].cursor()
         try:
             c.execute(
                 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'.
                 format(tb=self.prefix))
         except Exception as e:
             c.close()
             raise e
         self.post_geoserver_vector(self.prefix)
     if not style_exists(self.prefix):
         with open(os.path.join(
                 script_dir, 'resources/gdacs.sld')) as sld:
             self.set_default_style(self.prefix, self.prefix, sld.read())
     self.update_geonode(self.prefix, title=self.layer_title,
                         description=self.description, store=datastore)
     self.truncate_gs_cache(self.prefix)
     self.cleanup()
Example #3
0
    def run(self, rss_file=None):
        """
        Retrieve the latest USGS earthquake data and append to all PostGIS
        earthquake tables, then remove old data
        :return:
        """
        if not rss_file:
            rss = self.download(self.base_url.format(self.params['sdate'],
                                                     self.params['edate']),
                                filename=self.prefix + '.rss')
            rss_file = os.path.join(self.tmp_dir, rss)

        json_data = None
        with open(rss_file) as json_file:
            json_data = json.load(json_file)
            for feature in json_data['features']:
                time_original = datetime.datetime.utcfromtimestamp(
                    feature['properties']['time'] / 1000)
                updated_original = datetime.datetime.utcfromtimestamp(
                    feature['properties']['updated'] / 1000)
                feature['properties']['time'] = time_original.strftime(
                    "%Y-%m-%d %H:%M:%S")
                feature['properties']['updated'] = updated_original.strftime(
                    "%Y-%m-%d %H:%M:%S")
        with open(rss_file, 'w') as modified_file:
            json.dump(json_data, modified_file)

        db = ogc_server_settings.datastore_db
        for table, title in zip(self.tables, self.titles):
            ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
                \"PG:host={db_host} user={db_user} password={db_pass} \
                dbname={db_name}\" {rss} -nln {table}".format(
                db_host=db["HOST"],
                db_user=db["USER"],
                db_pass=db["PASSWORD"],
                db_name=db["NAME"],
                rss="{}".format(rss_file),
                table=table))
            datastore = ogc_server_settings.server.get('DATASTORE')
            if not layer_exists(table, datastore, DEFAULT_WORKSPACE):
                c = connections[datastore].cursor()
                q = 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);'
                try:
                    c.execute(q.format(tb=table))
                except Exception:
                    c.close()
                self.post_geoserver_vector(table)
            if not style_exists(table):
                with open(os.path.join(script_dir,
                                       'resources/usgs.sld')) as sld:
                    self.set_default_style(table, table, sld.read())
            self.update_geonode(table,
                                title="Earthquakes - {}".format(title),
                                description=self.description,
                                store=datastore)
            self.truncate_gs_cache(table)
        self.purge_old_data()
        self.cleanup()
Example #4
0
    def run(self, rss_file=None):
        """
        Retrieve the latest USGS earthquake data and append to all PostGIS
        earthquake tables, then remove old data
        :return:
        """
        if not rss_file:
            rss = self.download(self.base_url.format(self.params['sdate'],
                                                     self.params['edate']),
                                filename=self.prefix + '.rss')
            rss_file = os.path.join(self.tmp_dir, rss)

        json_data = None
        with open(rss_file) as json_file:
            json_data = json.load(json_file)
            for feature in json_data['features']:
                time_original = datetime.datetime.utcfromtimestamp(
                    feature['properties']['time']/1000)
                updated_original = datetime.datetime.utcfromtimestamp(
                    feature['properties']['updated']/1000)
                feature['properties']['time'] = time_original.strftime(
                    "%Y-%m-%d %H:%M:%S")
                feature['properties']['updated'] = updated_original.strftime(
                    "%Y-%m-%d %H:%M:%S")
        with open(rss_file, 'w') as modified_file:
            json.dump(json_data, modified_file)

        db = ogc_server_settings.datastore_db
        for table, title in zip(self.tables, self.titles):
            ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
                \"PG:host={db_host} user={db_user} password={db_pass} \
                dbname={db_name}\" {rss} -nln {table}".format(
                db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"],
                db_name=db["NAME"], rss="{}".format(rss_file), table=table))
            datastore = ogc_server_settings.server.get('DATASTORE')
            if not layer_exists(table, datastore, DEFAULT_WORKSPACE):
                c = connections[datastore].cursor()
                q = 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);'
                try:
                    c.execute(q.format(tb=table))
                except Exception:
                    c.close()
                self.post_geoserver_vector(table)
            if not style_exists(table):
                with open(os.path.join(
                        script_dir, 'resources/usgs.sld')) as sld:
                    self.set_default_style(table, table, sld.read())
            self.update_geonode(table,
                                title="Earthquakes - {}".format(title),
                                description=self.description,
                                store=datastore)
            self.truncate_gs_cache(table)
        self.purge_old_data()
        self.cleanup()
Example #5
0
    def run(self):
        """
        Retrieve the layers and import into Geonode
        """

        for layer in self.layers:
            try:
                table = '{}{}'.format(self.prefix, layer['table'])
                lyr_file = os.path.join(self.tmp_dir,
                                        self.download(layer['url'],
                                                      filename=table))
                with open(lyr_file) as inf:
                    result = inf.readline(24)
                    if result.startswith('{"processingTime":'):
                        logger.info('Output is being generated,'
                                    'will try again in 60 seconds')
                        time.sleep(60)
                        lyr_file = os.path.join(
                            self.tmp_dir,
                            self.download(layer['url'], filename=table))
                db = ogc_server_settings.datastore_db
                ogr2ogr_exec("-overwrite -skipfailures -f PostgreSQL \
                    \"PG:host={db_host} user={db_user} password={db_pass} \
                    dbname={db_name}\" {lyr} -nln {table}".format(
                    db_host=db["HOST"],
                    db_user=db["USER"],
                    db_pass=db["PASSWORD"],
                    db_name=db["NAME"],
                    lyr="{}".format(lyr_file),
                    table=table))
                datastore = ogc_server_settings.server.get('DATASTORE')
                if not layer_exists(table, datastore, DEFAULT_WORKSPACE):
                    self.post_geoserver_vector(table)
                if not style_exists(table):
                    with open(os.path.join(
                            script_dir, 'resources/{}.sld'.format(
                            layer['sld']))) as sld:
                        sld_text = sld.read().format(table=layer['table'],
                                                     title=layer['name'])
                        self.set_default_style(table, table, sld_text)
                keywords = self.layer_category_mapping[layer['table']]
                self.update_geonode(
                    table,
                    title=layer['name'],
                    description=layer['description'],
                    store=datastore,
                    extra_keywords=[keywords])
                self.truncate_gs_cache(table)
            except Exception:
                logger.error('Error with layer {}'.format(layer['name']))
                logger.error(traceback.format_exc())
        self.cleanup()
Example #6
0
    def run(self):
        """
        Retrieve the layers and import into Geonode
        """

        for layer in self.layers:
            try:
                table = '{}{}'.format(self.prefix, layer['table'])
                lyr_file = os.path.join(self.tmp_dir,
                                        self.download(layer['url'],
                                                      filename=table))
                with open(lyr_file) as inf:
                    result = inf.readline(24)
                    if result.startswith('{"processingTime":'):
                        logger.info('Output is being generated,'
                                    'will try again in 60 seconds')
                        time.sleep(60)
                        lyr_file = os.path.join(
                            self.tmp_dir,
                            self.download(layer['url'], filename=table))
                db = ogc_server_settings.datastore_db
                ogr2ogr_exec("-overwrite -skipfailures -f PostgreSQL \
                    \"PG:host={db_host} user={db_user} password={db_pass} \
                    dbname={db_name}\" {lyr} -nln {table}".format(
                    db_host=db["HOST"],
                    db_user=db["USER"],
                    db_pass=db["PASSWORD"],
                    db_name=db["NAME"],
                    lyr="{}".format(lyr_file),
                    table=table))
                datastore = ogc_server_settings.server.get('DATASTORE')
                if not layer_exists(table, datastore, DEFAULT_WORKSPACE):
                    self.post_geoserver_vector(table)
                if not style_exists(table):
                    with open(os.path.join(
                            script_dir, 'resources/{}.sld'.format(
                            layer['sld']))) as sld:
                        sld_text = sld.read().format(table=layer['table'],
                                                     title=layer['name'])
                        self.set_default_style(table, table, sld_text)
                self.update_geonode(table,
                                    title=layer['name'],
                                    description=layer['description'],
                                    store=datastore)
                self.truncate_gs_cache(table)
            except Exception:
                logger.error('Error with layer {}'.format(layer['name']))
                logger.error(traceback.format_exc())
        self.cleanup()
Example #7
0
    def update_layer(self, layer):
        """
        Create or update the MMWR layer in GeoNode
        :param layer: Layer to update (weekly or archive)
        :return: None
        """
        csvfile = "{}.csv".format(self.prefix)
        vrt_file = os.path.join(self.tmp_dir, '{}.vrt'.format(self.prefix))
        csvt_file = os.path.join(self.tmp_dir, '{}.csvt'.format(self.prefix))
        if not os.path.exists(vrt_file):
            with open(vrt_file, 'w') as vrt:
                vrt.write(
                    vrt_content.format(name=csvfile.replace('.csv', ''),
                                       csv=os.path.join(self.tmp_dir,
                                                        csvfile)))
        if not os.path.exists(csvt_file):
            with open(csvt_file, 'w') as csvt:
                csvt.write(csvt_content)

        db = ogc_server_settings.datastore_db
        table = '{}_{}'.format(self.prefix, layer).lower()
        option = 'overwrite' if layer.lower() == 'weekly' else 'append'
        ogr2ogr_exec("-{option} -skipfailures -f PostgreSQL \
            \"PG:host={db_host} user={db_user} password={db_pass} \
            dbname={db_name}\" {vrt} -nln {table}".format(
            db_host=db["HOST"],
            db_user=db["USER"],
            db_pass=db["PASSWORD"],
            db_name=db["NAME"],
            vrt="{}".format(vrt_file),
            option=option,
            table=table))
        if not layer_exists(table, ogc_server_settings.server.get('DATASTORE'),
                            DEFAULT_WORKSPACE):
            constraint = 'ALTER TABLE {table} ADD CONSTRAINT ' \
                         '{table}_unique UNIQUE (place, report_date)'\
                .format(table=table)
            postgres_query(constraint, commit=True)
            self.post_geoserver_vector(table)
        if not style_exists(table):
            with open(os.path.join(script_dir,
                                   'resources/mmwr.sld')) as sldfile:
                sld = sldfile.read().format(layername=table)
                self.set_default_style(table, table, sld)
        self.update_geonode(table,
                            title='{} {}'.format(self.base_title, layer),
                            description=self.description,
                            extra_keywords=['category:Population'])
        self.truncate_gs_cache(table)
Example #8
0
    def update_layer(self, layer):
        """
        Create or update the MMWR layer in GeoNode
        :param layer: Layer to update (weekly or archive)
        :return: None
        """
        csvfile = "{}.csv".format(self.prefix)
        vrt_file = os.path.join(self.tmp_dir, '{}.vrt'.format(self.prefix))
        csvt_file = os.path.join(self.tmp_dir, '{}.csvt'.format(self.prefix))
        if not os.path.exists(vrt_file):
            with open(vrt_file, 'w') as vrt:
                vrt.write(vrt_content.format(
                    name=csvfile.replace('.csv', ''),
                    csv=os.path.join(self.tmp_dir, csvfile)))
        if not os.path.exists(csvt_file):
            with open(csvt_file, 'w') as csvt:
                csvt.write(csvt_content)

        db = ogc_server_settings.datastore_db
        table = '{}_{}'.format(self.prefix, layer).lower()
        option = 'overwrite' if layer.lower() == 'weekly' else 'append'
        ogr2ogr_exec("-{option} -skipfailures -f PostgreSQL \
            \"PG:host={db_host} user={db_user} password={db_pass} \
            dbname={db_name}\" {vrt} -nln {table}".format(
            db_host=db["HOST"], db_user=db["USER"],
            db_pass=db["PASSWORD"], db_name=db["NAME"],
            vrt="{}".format(vrt_file), option=option, table=table))
        if not layer_exists(table,
                            ogc_server_settings.server.get('DATASTORE'),
                            DEFAULT_WORKSPACE):
            constraint = 'ALTER TABLE {table} ADD CONSTRAINT ' \
                         '{table}_unique UNIQUE (place, report_date)'\
                .format(table=table)
            postgres_query(constraint, commit=True)
            self.post_geoserver_vector(table)
        if not style_exists(table):
            with open(os.path.join(
                    script_dir, 'resources/mmwr.sld')) as sldfile:
                sld = sldfile.read().format(layername=table)
                self.set_default_style(table, table, sld)
        self.update_geonode(
            table,
            title='{} {}'.format(self.base_title, layer),
            description=self.description)
        self.truncate_gs_cache(table)
Example #9
0
    def update_station_table(self, csvfile):
        """
        Insert data on water quality monitoring stations
        from a csv file into the database
        :param csvfile: CSV file containing station data
        :return: None
        """
        vrt_content = ("""<OGRVRTDataSource>
                <OGRVRTLayer name="{name}">
                    <SrcDataSource>{csv}</SrcDataSource>
                    <GeometryType>wkbPoint</GeometryType>
                    <LayerSRS>WGS84</LayerSRS>
                    <GeometryField encoding="PointFromColumns"
                    x="LongitudeMeasure" y="LatitudeMeasure"/>
                </OGRVRTLayer>
            </OGRVRTDataSource>
            """)
        station_table = self.station_table
        needs_index = not table_exists(station_table)

        db = ogc_server_settings.datastore_db
        vrt_file = os.path.join(self.tmp_dir, csvfile.replace('.csv', '.vrt'))
        csv_name = os.path.basename(csvfile).replace(".csv", "")
        if not os.path.exists(vrt_file):
            with open(vrt_file, 'w') as vrt:
                vrt.write(
                    vrt_content.format(name=csv_name,
                                       csv=os.path.join(self.tmp_dir,
                                                        csvfile)))
        ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
            \"PG:host={db_host} user={db_user} password={db_pass} \
            dbname={db_name}\" {vrt} -nln {table}".format(
            db_host=db["HOST"],
            db_user=db["USER"],
            db_pass=db["PASSWORD"],
            db_name=db["NAME"],
            vrt="{}".format(vrt_file),
            table=station_table))
        if needs_index:
            sql = 'ALTER TABLE {} '.format(station_table) + \
                  'ADD CONSTRAINT monitoringlocationidentifier_key ' + \
                  'UNIQUE (monitoringlocationidentifier)'
            logger.debug(sql)
            postgres_query(sql, commit=True)
Example #10
0
    def update_station_table(self, csvfile):
        """
        Insert data on water quality monitoring stations
        from a csv file into the database
        :param csvfile: CSV file containing station data
        :return: None
        """
        vrt_content = (
            """<OGRVRTDataSource>
                <OGRVRTLayer name="{name}">
                    <SrcDataSource>{csv}</SrcDataSource>
                    <GeometryType>wkbPoint</GeometryType>
                    <LayerSRS>WGS84</LayerSRS>
                    <GeometryField encoding="PointFromColumns"
                    x="LongitudeMeasure" y="LatitudeMeasure"/>
                </OGRVRTLayer>
            </OGRVRTDataSource>
            """)
        station_table = self.station_table
        needs_index = not table_exists(station_table)

        db = ogc_server_settings.datastore_db
        vrt_file = os.path.join(self.tmp_dir, csvfile.replace('.csv', '.vrt'))
        csv_name = os.path.basename(csvfile).replace(".csv", "")
        if not os.path.exists(vrt_file):
            with open(vrt_file, 'w') as vrt:
                vrt.write(vrt_content.format(
                    name=csv_name, csv=os.path.join(self.tmp_dir, csvfile)))
        ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
            \"PG:host={db_host} user={db_user} password={db_pass} \
            dbname={db_name}\" {vrt} -nln {table}".format(
            db_host=db["HOST"], db_user=db["USER"], db_pass=db["PASSWORD"],
            db_name=db["NAME"], vrt="{}".format(vrt_file), table=station_table))
        if needs_index:
            sql = 'ALTER TABLE {} '.format(station_table) + \
                  'ADD CONSTRAINT monitoringlocationidentifier_key ' + \
                  'UNIQUE (monitoringlocationidentifier)'
            logger.debug(sql)
            postgres_query(sql, commit=True)