Example #1
0
def import_into_db(dbfname, schemaname):
    # Import dataset into db server
    try:
        # create schema
        command = 'ogrinfo -so PG:"{dbconn}" -sql "create schema if not exists {schema}"'.format(
            dbconn=DatabaseManager.connection_details(), schema=schemaname)
        p = subprocess.Popen(
            shlex.split(command),
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        output, err = p.communicate()
        if p.returncode != 0 or len(err) > 0:
            raise Exception("Fail to create schema '{}'. {}".format(schemaname,
                                                                    err))

        # import the dataset. Set FID and geometry name.
        command = 'ogr2ogr -f PostgreSQL PG:"{dbconn}" -lco schema={schema} -lco FID=fid -lco GEOMETRY_NAME=geom  {dbfile} -skipfailures -overwrite --config PG_USE_COPY YES'.format(
            dbconn=DatabaseManager.connection_details(),
            schema=schemaname,
            dbfile=dbfname)
        p = subprocess.Popen(
            shlex.split(command),
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        output, err = p.communicate()
        if p.returncode != 0 or len(err) > 0:
            raise Exception("Fail to import dataset '{}. {}'".format(dbfname,
                                                                     err))

    except Exception as e:
        raise Exception("Failed to import dataset into DB server. {0}".format(
            str(e)))
Example #2
0
def import_into_db(dbfname, schemaname):
    # Import dataset into db server
    try:
        # create schema
        command = 'ogrinfo -so PG:"{dbconn}" -sql "create schema if not exists {schema}"'.format(
            dbconn=DatabaseManager.connection_details(), schema=schemaname)
        p = subprocess.Popen(shlex.split(command),
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        output, err = p.communicate()
        if p.returncode != 0 or len(err) > 0:
            raise Exception("Fail to create schema '{}'. {}".format(
                schemaname, err))

        # import the dataset. Set FID and geometry name.
        command = 'ogr2ogr -f PostgreSQL PG:"{dbconn}" -lco schema={schema} -lco FID=fid -lco GEOMETRY_NAME=geom  {dbfile} -skipfailures -overwrite --config PG_USE_COPY YES'.format(
            dbconn=DatabaseManager.connection_details(),
            schema=schemaname,
            dbfile=dbfname)
        p = subprocess.Popen(shlex.split(command),
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        output, err = p.communicate()
        if p.returncode != 0 or len(err) > 0:
            raise Exception("Fail to import dataset '{}. {}'".format(
                dbfname, err))

    except Exception as e:
        raise Exception("Failed to import dataset into DB server. {0}".format(
            str(e)))
Example #3
0
 def get_table_details(self, filename, tablename):
     # return table name, id column name, geometry column name, and extent.
     mddata = DatabaseManager.get_metadata(filename)
     if mddata:
         layer_info = mddata.get(tablename, None)
         if layer_info:
             return (layer_info.get('table', None),
                     layer_info.get('id_column', None),
                     layer_info.get('geometry_column', None),
                     layer_info.get('base_extent', None))
     return (None, None, None, None)
Example #4
0
 def get_table_details(self, filename, tablename):
     # return table name, id column name, geometry column name, and extent.
     mddata = DatabaseManager.get_metadata(filename)
     if mddata:
         layer_info = mddata.get(tablename, None)
         if layer_info:
             return (layer_info.get('table', None),
                     layer_info.get('id_column', None),
                     layer_info.get('geometry_column', None),
                     layer_info.get('base_extent', None))
     return (None, None, None, None)
Example #5
0
    def get_minmax_value(self, attrname, table):
        # return the min and max for a given attribute of a table.
        connect = "PG:{dbconn}".format(dbconn=DatabaseManager.connection_details())
        dbconn = ogr.Open(connect)

        sql = "select min({attr}), max({attr}) from {table}".format(attr=attrname, table=table)
        result = dbconn.ExecuteSQL(sql)

        if result is None:
            raise Exception("Error in getting min/max value for {} from table {}".format(attrname, table))

        row = result.next()
        return row.GetField(0), row.GetField(1)  # min, max
Example #6
0
    def get_minmax_value(self, attrname, table):
        # return the min and max for a given attribute of a table.
        connect = 'PG:{dbconn}'.format(
            dbconn=DatabaseManager.connection_details())
        dbconn = ogr.Open(connect)

        sql = 'select min({attr}), max({attr}) from {table}'.format(
            attr=attrname, table=table)
        result = dbconn.ExecuteSQL(sql)

        if result is None:
            raise Exception(
                "Error in getting min/max value for {} from table {}".format(
                    attrname, table))

        row = result.next()
        return row.GetField(0), row.GetField(1)  # min, max
Example #7
0
def get_layerinfo_from_db(tablename):
    # Get extent of the table specified from DB server
    command = 'PG:{dbconn}'.format(dbconn=DatabaseManager.connection_details())
    dbconn = ogr.Open(command)
    # layerdata = dbconn.ExecuteSQL("select * from
    # {table}".format(table=tablename))
    layerdata = dbconn.GetLayer(str(tablename))

    # Get a list of user defined column names
    layerdefn = layerdata.GetLayerDefn()
    fieldnames = [
        layerdefn.GetFieldDefn(i).GetName()
        for i in range(layerdefn.GetFieldCount())
    ]

    extent = None
    if len(layerdata.GetGeometryColumn()) > 0:
        extent = layerdata.GetExtent()
    dbconn.Destroy()
    return extent, fieldnames
Example #8
0
def get_layerinfo_from_db(tablename):
    # Get extent of the table specified from DB server
    command = 'PG:{dbconn}'.format(dbconn=DatabaseManager.connection_details())
    dbconn = ogr.Open(command)
    # layerdata = dbconn.ExecuteSQL("select * from
    # {table}".format(table=tablename))
    layerdata = dbconn.GetLayer(str(tablename))

    # Get a list of user defined column names
    layerdefn = layerdata.GetLayerDefn()
    fieldnames = [
        layerdefn.GetFieldDefn(i).GetName()
        for i in range(layerdefn.GetFieldCount())
    ]

    extent = None
    if len(layerdata.GetGeometryColumn()) > 0:
        extent = layerdata.GetExtent()
    dbconn.Destroy()
    return extent, fieldnames
Example #9
0
def configure_database_manager(settings):
    """ Configure the Database Manager """
    DatabaseManager.configure_from_config(settings)
Example #10
0
def configure_database_manager(settings):
    """ Configure the Database Manager """
    DatabaseManager.configure_from_config(settings)
Example #11
0
def fetch_worker(request, data_url, job):
    # get location of local data file
    job.update(
        status=FetchJob.STATUS_IN_PROGRESS,
        start_timestamp=datetime.datetime.now())

    try:
        loc = fetch_file(request, data_url)
        # FIXME: have to import here due to circular import
        from pyramid.settings import asbool
        install_to_db = asbool(request.params.get('INSTALL_TO_DB', False))

        # Check the dataset is to be imported to database server
        if install_to_db:
            datadir, filename = os.path.split(loc)
            fname, fext = os.path.splitext(filename)
            if fext == '.zip':
                # Check if shape file exists. If so, already unzip.
                with zipfile.ZipFile(loc, 'r') as zipf:
                    zipf.extractall(datadir)

            # Import dataset to postgis server
            schemaname = None
            if DatabaseManager.is_configured():
                # TODO: To support other file type?
                # Check for shape file, then for gdb directories
                dbFiles = [
                    dbfile
                    for dbfile in glob.glob(
                        os.path.join(datadir, fname, 'data/*.dbf'))
                ]
                if len(dbFiles) == 0:
                    dbFiles = [
                        dbfile
                        for dbfile in glob.glob(
                            os.path.join(datadir, fname, 'data/*.gdb.zip'))
                    ]
                if len(dbFiles) == 0:
                    raise Exception("Unsupported dataset type")

                # Import each df file into database
                metadata = {}
                for dbFilename in dbFiles:
                    dbfname = os.path.basename(dbFilename)

                    # Skip this db file if it has been imported before
                    # if DatabaseManager.get_metadata(filename) is not None:
                    #    continue

                    # Import db file into the database
                    schemaname = fname.replace('.', '_').replace('-',
                                                                 '_').lower()
                    import_into_db(dbFilename, schemaname)

                    # get the layer info as metadata.
                    md = get_dataset_metadata(schemaname, dbFilename)
                    metadata[dbfname] = md
                    # metadata.update(md)

                    # update the metadata stored in DatabaseManager
                    DatabaseManager.update_metadata(dbfname, md)

                # Save the metadata as json file for loading when visualiser
                # start
                jsonfile = open(os.path.join(datadir, "layer_info.json"), 'w')
                json.dump(metadata, jsonfile, indent=4)
                jsonfile.close()

        job.update(
            status=FetchJob.STATUS_COMPLETE,
            end_timestamp=datetime.datetime.now())
    except Exception as e:
        reason = 'Failed to fetch data from {0}. {1}'.format(data_url, str(e))
        LOG.warning(reason)
        job.update(
            status=FetchJob.STATUS_FAILED,
            end_timestamp=datetime.datetime.now(),
            reason=reason)
Example #12
0
    def add_layer_obj(self, map):
        """
        Create mapserver layer object.

        The raster data for the layer is located at filename, which
        should be an absolute path on the local filesystem.
        """
        # inspect data if we haven't yet
        self._inspect_data()
        # create a layer object
        layer = mapscript.layerObj()

        # NAME
        layer.name = "DEFAULT"  # TODO: filename?
        # TYPE
        layer.type = mapscript.MS_LAYER_POLYGON
        # STATUS
        layer.status = mapscript.MS_ON
        # mark layer as queryable
        layer.template = "query"  # anything non null and with length > 0 works here

        # Extract the base table and attribute table from layers in the format below.
        # {base_filename}-{base tablename}.{attrinute_filename}-{attribute tablename}.{column name}
        # i.e.
        # SH_Network.gdb.zip:catchment.stream_attributesv1.1.5.gdb.zip:climate.catannrad
        layers = self.request.params.get('layers', None)
        if layers is None:
            raise Exception("Missing layers parameter")

        layer.name = layers

        # Parse to get filenames and short table names so that we can get the
        # corresponding geometry and attribute tables.
        base_fname, base_table, attr_fname, attr_table, property_name = self.parse_layers(
            layers)

        if attr_fname is None or attr_table is None or property_name is None:
            raise Exception("Invalid layers '{layer}'".format(layer=layers))

        # get the attribute table and its metadata
        db_attr_table, id_col, geom_col, extent = self.get_table_details(
            attr_fname, attr_table)
        if db_attr_table is None:
            raise Exception("Invalid 'layers' parameter in request: no such table '{tablename}'".format(
                tablename=attr_table))

        # Get the corresposning base table, and its id and geometry column
        # names
        db_base_table = None
        common_col = None
        if base_table:
            db_base_table, id_col, geom_col, extent = self.get_table_details(
                base_fname, base_table)

            # Get the foreign key i.e. the joinable column
            # This is only required for joining tables
            common_col = self.request.params.get('foreignKey', None)
            if common_col is None:
                raise Exception("Missing 'foreignKey' parameter in request")
        else:
            db_base_table = db_attr_table

        if db_base_table is None or id_col is None or geom_col is None:
            raise Exception("Missing or invalid table for {layer}".format(
                layer=base_table or attr_table))

        # DATA, in the format of "<column> from <tablename> using unique fid using srid=xxxx"
        # table must have fid and geom
        # table_attribute in the form 'table-name:attrubute1'
        # TODO: Shall we check that the column and table exists??

        # Set extent to improve performance of getting data from DB server
        if extent:
            layer.setExtent(extent['xmin'], extent['ymin'],
                            extent['xmax'], extent['ymax'])

        if DatabaseManager.is_configured():
            # Connection to POSTGIS DB server
            layer.connectiontype = mapscript.MS_POSTGIS
            layer.connection = DatabaseManager.connection_details()

            # To speed up DB performance, simplify geometry for low resolution
            # i.e. <= 300000 pixel per degree.
            resolution, tolerance = self.resolution_tolerance(layer)
            the_geom = 'b.{geomcol}'.format(geomcol=geom_col)
            if resolution <= 300000 and tolerance > 0.001:
                the_geom = "ST_Simplify(b.{geomcol}, {tol}) as {geomcol}".format(
                    geomcol=geom_col, tol=tolerance)

            # Get property as value
            if db_attr_table != db_base_table:
                newtable = "(select a.{colname} as value, b.{idcol}, {geom} from {layer} a join {base} b on a.{ccol} = b.{ccol})".format(
                    colname=property_name, layer=db_attr_table, base=db_base_table, ccol=common_col, idcol=id_col, geom=the_geom)
            else:
                newtable = "(select b.{colname} as value, b.{idcol}, {geom} from {base} b)".format(
                    colname=property_name, base=db_base_table, idcol=id_col, geom=the_geom)

            srid = self.request.params.get('SRID', '4326')
            layer.data = "{geom} from {table} as new_layer using unique {idcol} using srid={srid}".format(
                geom=geom_col, table=newtable, idcol=id_col, srid=srid)

            # Defer closing connection
            layer.addProcessing("CLOSE_CONNECTION=DEFER")

        else:
            # TO DO: read from file
            raise Exception("Database server is not configured")

        # PROJECTION ... should we set this properly?
        crs = self._data['crs']
        # set the min and max of the attribute
        self._data['min'], self._data['max'] = self.get_minmax_value(
            property_name, db_attr_table)

        layer.setProjection("init={}".format(crs))

        # METADATA
        # TODO: check return value of setMetaData MS_SUCCESS/MS_FAILURE
        layer.setMetaData("gml_types", "auto")
        # Shall be the id column of the base table
        layer.setMetaData("gml_featureid", id_col)
        layer.setMetaData("gml_include_items", "all")  # allow raster queries
        layer.setMetaData("wfs_include_items", "all")
        # projection to serve
        layer.setMetaData("wfs_srs", "EPSG:4326 EPSG:3857")
        # title required for GetCapabilities
        layer.setMetaData("wfs_title", "BCCVL Layer")

        # TODO: metadata
        #       other things like title, author, attribution etc...

        # TODO: if we have a STYLES parameter we should add a STYLES element
        # here
        if not (self.request.params.get('STYLES') or
                'SLD' in self.request.params or
                'SLD_BODY' in self.request.params):
            # set some default style if the user didn't specify any' STYLE
            layer.insertClass(self.default_class_style('value'))

        ret = map.insertLayer(layer)

        sld = self.request.params.get('SLD_BODY')
        sld_url = self.request.params.get('SLD')
        if sld_url:
            map.applySLDURL(sld_url)
        elif sld:
            map.applySLD(sld)
        return ret
Example #13
0
    def add_layer_obj(self, map):
        """
        Create mapserver layer object.

        The raster data for the layer is located at filename, which
        should be an absolute path on the local filesystem.
        """
        # inspect data if we haven't yet
        self._inspect_data()
        # create a layer object
        layer = mapscript.layerObj()

        # NAME
        layer.name = "DEFAULT"  # TODO: filename?
        # TYPE
        layer.type = mapscript.MS_LAYER_POLYGON
        # STATUS
        layer.status = mapscript.MS_ON
        # mark layer as queryable
        layer.template = "query"  # anything non null and with length > 0 works here

        # Extract the base table and attribute table from typeNames
        # typeNames = {base_filename}-{base tanlename}.{attribute_filename}-{attribute tablename}
        # i.e. SH_Network.gdb-ahgfcatchment.stream_attributesv1.1.5.gdb-climate_lut
        type_name = self.request.params.get('typeNames', None)
        if type_name is None:
            raise Exception("Missing typeNames")

        # Parse to get filenames and layer names so that we can get the corresponding geometry and attribute tables.
        base_fname, base_table, attr_fname, attr_table = self.parse_typeName(
            type_name)

        if attr_fname is None or attr_table is None:
            raise Exception(
                "Invalid typeNames in request: no such layer '{layer}'".format(
                    layer=attr_table))

        # set layer name
        layer.name = type_name

        # get the attribute table and its metadata
        db_attr_table, id_col, geom_col, extent = self.get_table_details(
            attr_fname, attr_table)
        if db_attr_table is None:
            raise Exception(
                "Invalid typeNames in request: no such layer '{layer}'".format(
                    layer=attr_table))

        # Get the corresposning base table, and its id and geometry column names
        db_base_table = None
        common_col = None
        if base_table:
            db_base_table, id_col, geom_col, extent = self.get_table_details(
                base_fname, base_table)

            if db_base_table is None or id_col is None or geom_col is None:
                raise Exception(
                    "Missing or invalid base table for {layer}".format(
                        layer=base_table))

            # Get the foreign key i.e. the joinable column
            common_col = self.request.params.get('foreignKey', None)
            if common_col is None:
                raise Exception("Missing foreignKey in request")
        else:
            db_base_table = db_attr_table

        if extent:
            layer.setExtent(extent['xmin'], extent['ymin'], extent['xmax'],
                            extent['ymax'])

        # get the feature ids. It must be in the format: {layername}.fid
        featureId = self.request.params.get('featureID', '')
        fidlist = featureId.split(',')
        fid = ','.join([
            v.split('{layername}.'.format(layername=layer.name))[1]
            for v in fidlist if len(v.split('.')) > 1
        ])
        if len(fid) == 0:
            fid = None

        # get the attribute names.
        attributeNames = self.request.params.get('attributes', None)

        # DATA, in the format of "<column> from <tablename> using unique fid using srid=xxxx"
        # table must have fid and geom
        # table_attribute in the form 'table-name:attrubute1'
        if DatabaseManager.is_configured():
            # Connection to POSTGIS DB server
            layer.connectiontype = mapscript.MS_POSTGIS
            layer.connection = DatabaseManager.connection_details()

            if db_attr_table != db_base_table:
                # Get all attributes of the attribute table if not specified
                layer_info = None
                if attributeNames is None:
                    mddata = DatabaseManager.get_metadata(attr_fname)
                    if mddata:
                        layer_info = mddata.get(attr_table, None)

                    if layer_info:
                        fields = [i.lower() for i in layer_info.get('fields')]
                        # Remove the id column and foreign key
                        a_idcol = layer_info.get('id_column')
                        for item in [a_idcol.lower(), common_col.lower()]:
                            if item in fields:
                                fields.remove(item)
                        attributeNames = ','.join(fields)
                if fid:
                    newtable = "(select b.*, {attributes} from {atable} a join {base} b on a.{ccol} = b.{ccol} and b.{idcol} in ({ids}))".format(
                        attributes=attributeNames,
                        atable=db_attr_table,
                        base=db_base_table,
                        ccol=common_col,
                        idcol=id_col,
                        ids=fid,
                        geom=geom_col)
                else:
                    newtable = "(select b.*, {attributes} from {atable} a join {base} b on a.{ccol} = b.{ccol})".format(
                        attributes=attributeNames,
                        atable=db_attr_table,
                        base=db_base_table,
                        ccol=common_col,
                        idcol=id_col,
                        geom=geom_col)
            else:
                if fid:
                    newtable = "(select * from {base} where {idcol} in ({ids}))".format(
                        base=db_base_table, idcol=id_col, ids=fid)
                else:
                    newtable = "(select * from {base})".format(
                        base=db_base_table)

            srid = self.request.params.get('SRID', '4326')
            layer.data = "{geom} from {table} as new_layer using unique {idcol} using srid={srid}".format(
                geom=geom_col, table=newtable, idcol=id_col, srid=srid)

            # Defer closing connection
            layer.addProcessing("CLOSE_CONNECTION=DEFER")

        else:
            # TO DO: read from file
            raise Exception("Database is not configured.")

        # PROJECTION ... should we set this properly?
        crs = self._data['crs']
        layer.setProjection("init={}".format(crs))

        # METADATA
        # TODO: check return value of setMetaData MS_SUCCESS/MS_FAILURE
        layer.setMetaData("gml_types", "auto")
        layer.setMetaData("gml_featureid",
                          id_col)  # Shall be the id column of the base table
        layer.setMetaData("gml_include_items", "all")  # allow raster queries
        layer.setMetaData("wfs_include_items", "all")
        layer.setMetaData("wfs_srs",
                          "EPSG:4326 EPSG:3857")  # projection to serve
        layer.setMetaData("wfs_title",
                          "BCCVL Layer")  # title required for GetCapabilities

        # TODO: metadata
        #       other things like title, author, attribution etc...

        return map.insertLayer(layer)
Example #14
0
def fetch_worker(request, data_url, job):
    # get location of local data file
    job.update(status=FetchJob.STATUS_IN_PROGRESS,
               start_timestamp=datetime.datetime.now())

    try:
        loc = fetch_file(request, data_url)
        # FIXME: have to import here due to circular import
        from pyramid.settings import asbool
        install_to_db = asbool(request.params.get('INSTALL_TO_DB', False))

        # Check the dataset is to be imported to database server
        if install_to_db:
            datadir, filename = os.path.split(loc)
            fname, fext = os.path.splitext(filename)
            if fext == '.zip':
                # Check if shape file exists. If so, already unzip.
                with zipfile.ZipFile(loc, 'r') as zipf:
                    zipf.extractall(datadir)

            # Import dataset to postgis server
            schemaname = None
            if DatabaseManager.is_configured():
                # TODO: To support other file type?
                # Check for shape file, then for gdb directories
                dbFiles = [
                    dbfile for dbfile in glob.glob(
                        os.path.join(datadir, fname, 'data/*.dbf'))
                ]
                if len(dbFiles) == 0:
                    dbFiles = [
                        dbfile for dbfile in glob.glob(
                            os.path.join(datadir, fname, 'data/*.gdb.zip'))
                    ]
                if len(dbFiles) == 0:
                    raise Exception("Unsupported dataset type")

                # Import each df file into database
                metadata = {}
                for dbFilename in dbFiles:
                    dbfname = os.path.basename(dbFilename)

                    # Skip this db file if it has been imported before
                    # if DatabaseManager.get_metadata(filename) is not None:
                    #    continue

                    # Import db file into the database
                    schemaname = fname.replace('.', '_').replace('-',
                                                                 '_').lower()
                    import_into_db(dbFilename, schemaname)

                    # get the layer info as metadata.
                    md = get_dataset_metadata(schemaname, dbFilename)
                    metadata[dbfname] = md
                    # metadata.update(md)

                    # update the metadata stored in DatabaseManager
                    DatabaseManager.update_metadata(dbfname, md)

                # Save the metadata as json file for loading when visualiser
                # start
                jsonfile = open(os.path.join(datadir, "layer_info.json"), 'w')
                json.dump(metadata, jsonfile, indent=4)
                jsonfile.close()

        job.update(status=FetchJob.STATUS_COMPLETE,
                   end_timestamp=datetime.datetime.now())
    except Exception as e:
        reason = 'Failed to fetch data from {0}. {1}'.format(data_url, str(e))
        LOG.warning(reason)
        job.update(status=FetchJob.STATUS_FAILED,
                   end_timestamp=datetime.datetime.now(),
                   reason=reason)
Example #15
0
    def add_layer_obj(self, map):
        """
        Create mapserver layer object.

        The raster data for the layer is located at filename, which
        should be an absolute path on the local filesystem.
        """
        # inspect data if we haven't yet
        self._inspect_data()
        # create a layer object
        layer = mapscript.layerObj()

        # NAME
        layer.name = "DEFAULT"  # TODO: filename?
        # TYPE
        layer.type = mapscript.MS_LAYER_POLYGON
        # STATUS
        layer.status = mapscript.MS_ON
        # mark layer as queryable
        layer.template = "query"  # anything non null and with length > 0 works here

        # Extract the base table and attribute table from layers in the format below.
        # {base_filename}-{base tablename}.{attrinute_filename}-{attribute tablename}.{column name}
        # i.e.
        # SH_Network.gdb.zip:catchment.stream_attributesv1.1.5.gdb.zip:climate.catannrad
        layers = self.request.params.get('layers', None)
        if layers is None:
            raise Exception("Missing layers parameter")

        layer.name = layers

        # Parse to get filenames and short table names so that we can get the
        # corresponding geometry and attribute tables.
        base_fname, base_table, attr_fname, attr_table, property_name = self.parse_layers(
            layers)

        if attr_fname is None or attr_table is None or property_name is None:
            raise Exception("Invalid layers '{layer}'".format(layer=layers))

        # get the attribute table and its metadata
        db_attr_table, id_col, geom_col, extent = self.get_table_details(
            attr_fname, attr_table)
        if db_attr_table is None:
            raise Exception(
                "Invalid 'layers' parameter in request: no such table '{tablename}'"
                .format(tablename=attr_table))

        # Get the corresposning base table, and its id and geometry column
        # names
        db_base_table = None
        common_col = None
        if base_table:
            db_base_table, id_col, geom_col, extent = self.get_table_details(
                base_fname, base_table)

            # Get the foreign key i.e. the joinable column
            # This is only required for joining tables
            common_col = self.request.params.get('foreignKey', None)
            if common_col is None:
                raise Exception("Missing 'foreignKey' parameter in request")
        else:
            db_base_table = db_attr_table

        if db_base_table is None or id_col is None or geom_col is None:
            raise Exception("Missing or invalid table for {layer}".format(
                layer=base_table or attr_table))

        # DATA, in the format of "<column> from <tablename> using unique fid using srid=xxxx"
        # table must have fid and geom
        # table_attribute in the form 'table-name:attrubute1'
        # TODO: Shall we check that the column and table exists??

        # Set extent to improve performance of getting data from DB server
        if extent:
            layer.setExtent(extent['xmin'], extent['ymin'], extent['xmax'],
                            extent['ymax'])

        if DatabaseManager.is_configured():
            # Connection to POSTGIS DB server
            layer.connectiontype = mapscript.MS_POSTGIS
            layer.connection = DatabaseManager.connection_details()

            # To speed up DB performance, simplify geometry for low resolution
            # i.e. <= 300000 pixel per degree.
            resolution, tolerance = self.resolution_tolerance(layer)
            the_geom = 'b.{geomcol}'.format(geomcol=geom_col)
            if resolution <= 300000 and tolerance > 0.001:
                the_geom = "ST_Simplify(b.{geomcol}, {tol}) as {geomcol}".format(
                    geomcol=geom_col, tol=tolerance)

            # Get property as value
            if db_attr_table != db_base_table:
                newtable = "(select a.{colname} as value, b.{idcol}, {geom} from {layer} a join {base} b on a.{ccol} = b.{ccol})".format(
                    colname=property_name,
                    layer=db_attr_table,
                    base=db_base_table,
                    ccol=common_col,
                    idcol=id_col,
                    geom=the_geom)
            else:
                newtable = "(select b.{colname} as value, b.{idcol}, {geom} from {base} b)".format(
                    colname=property_name,
                    base=db_base_table,
                    idcol=id_col,
                    geom=the_geom)

            srid = self.request.params.get('SRID', '4326')
            layer.data = "{geom} from {table} as new_layer using unique {idcol} using srid={srid}".format(
                geom=geom_col, table=newtable, idcol=id_col, srid=srid)

            # Defer closing connection
            layer.addProcessing("CLOSE_CONNECTION=DEFER")

        else:
            # TO DO: read from file
            raise Exception("Database server is not configured")

        # PROJECTION ... should we set this properly?
        crs = self._data['crs']
        # set the min and max of the attribute
        self._data['min'], self._data['max'] = self.get_minmax_value(
            property_name, db_attr_table)

        layer.setProjection("init={}".format(crs))

        # METADATA
        # TODO: check return value of setMetaData MS_SUCCESS/MS_FAILURE
        layer.setMetaData("gml_types", "auto")
        # Shall be the id column of the base table
        layer.setMetaData("gml_featureid", id_col)
        layer.setMetaData("gml_include_items", "all")  # allow raster queries
        layer.setMetaData("wfs_include_items", "all")
        # projection to serve
        layer.setMetaData("wfs_srs", "EPSG:4326 EPSG:3857")
        # title required for GetCapabilities
        layer.setMetaData("wfs_title", "BCCVL Layer")

        # TODO: metadata
        #       other things like title, author, attribution etc...

        # TODO: if we have a STYLES parameter we should add a STYLES element
        # here
        if not (self.request.params.get('STYLES') or 'SLD'
                in self.request.params or 'SLD_BODY' in self.request.params):
            # set some default style if the user didn't specify any' STYLE
            layer.insertClass(self.default_class_style('value'))

        ret = map.insertLayer(layer)

        sld = self.request.params.get('SLD_BODY')
        sld_url = self.request.params.get('SLD')
        if sld_url:
            map.applySLDURL(sld_url)
        elif sld:
            map.applySLD(sld)
        return ret
Example #16
0
    def add_layer_obj(self, map):
        """
        Create mapserver layer object.

        The raster data for the layer is located at filename, which
        should be an absolute path on the local filesystem.
        """
        # inspect data if we haven't yet
        self._inspect_data()
        # create a layer object
        layer = mapscript.layerObj()

        # NAME
        layer.name = "DEFAULT"  # TODO: filename?
        # TYPE
        layer.type = mapscript.MS_LAYER_POLYGON
        # STATUS
        layer.status = mapscript.MS_ON
        # mark layer as queryable
        layer.template = "query"  # anything non null and with length > 0 works here

        # Extract the base table and attribute table from typeNames
        # typeNames = {base_filename}-{base tanlename}.{attribute_filename}-{attribute tablename}
        # i.e. SH_Network.gdb-ahgfcatchment.stream_attributesv1.1.5.gdb-climate_lut
        type_name = self.request.params.get("typeNames", None)
        if type_name is None:
            raise Exception("Missing typeNames")

        # Parse to get filenames and layer names so that we can get the corresponding geometry and attribute tables.
        base_fname, base_table, attr_fname, attr_table = self.parse_typeName(type_name)

        if attr_fname is None or attr_table is None:
            raise Exception("Invalid typeNames in request: no such layer '{layer}'".format(layer=attr_table))

        # set layer name
        layer.name = type_name

        # get the attribute table and its metadata
        db_attr_table, id_col, geom_col, extent = self.get_table_details(attr_fname, attr_table)
        if db_attr_table is None:
            raise Exception("Invalid typeNames in request: no such layer '{layer}'".format(layer=attr_table))

        # Get the corresposning base table, and its id and geometry column names
        db_base_table = None
        common_col = None
        if base_table:
            db_base_table, id_col, geom_col, extent = self.get_table_details(base_fname, base_table)

            if db_base_table is None or id_col is None or geom_col is None:
                raise Exception("Missing or invalid base table for {layer}".format(layer=base_table))

            # Get the foreign key i.e. the joinable column
            common_col = self.request.params.get("foreignKey", None)
            if common_col is None:
                raise Exception("Missing foreignKey in request")
        else:
            db_base_table = db_attr_table

        if extent:
            layer.setExtent(extent["xmin"], extent["ymin"], extent["xmax"], extent["ymax"])

        # get the feature ids. It must be in the format: {layername}.fid
        featureId = self.request.params.get("featureID", "")
        fidlist = featureId.split(",")
        fid = ",".join(
            [v.split("{layername}.".format(layername=layer.name))[1] for v in fidlist if len(v.split(".")) > 1]
        )
        if len(fid) == 0:
            fid = None

        # get the attribute names.
        attributeNames = self.request.params.get("attributes", None)

        # DATA, in the format of "<column> from <tablename> using unique fid using srid=xxxx"
        # table must have fid and geom
        # table_attribute in the form 'table-name:attrubute1'
        if DatabaseManager.is_configured():
            # Connection to POSTGIS DB server
            layer.connectiontype = mapscript.MS_POSTGIS
            layer.connection = DatabaseManager.connection_details()

            if db_attr_table != db_base_table:
                # Get all attributes of the attribute table if not specified
                layer_info = None
                if attributeNames is None:
                    mddata = DatabaseManager.get_metadata(attr_fname)
                    if mddata:
                        layer_info = mddata.get(attr_table, None)

                    if layer_info:
                        fields = [i.lower() for i in layer_info.get("fields")]
                        # Remove the id column and foreign key
                        a_idcol = layer_info.get("id_column")
                        for item in [a_idcol.lower(), common_col.lower()]:
                            if item in fields:
                                fields.remove(item)
                        attributeNames = ",".join(fields)
                if fid:
                    newtable = "(select b.*, {attributes} from {atable} a join {base} b on a.{ccol} = b.{ccol} and b.{idcol} in ({ids}))".format(
                        attributes=attributeNames,
                        atable=db_attr_table,
                        base=db_base_table,
                        ccol=common_col,
                        idcol=id_col,
                        ids=fid,
                        geom=geom_col,
                    )
                else:
                    newtable = "(select b.*, {attributes} from {atable} a join {base} b on a.{ccol} = b.{ccol})".format(
                        attributes=attributeNames,
                        atable=db_attr_table,
                        base=db_base_table,
                        ccol=common_col,
                        idcol=id_col,
                        geom=geom_col,
                    )
            else:
                if fid:
                    newtable = "(select * from {base} where {idcol} in ({ids}))".format(
                        base=db_base_table, idcol=id_col, ids=fid
                    )
                else:
                    newtable = "(select * from {base})".format(base=db_base_table)

            srid = self.request.params.get("SRID", "4326")
            layer.data = "{geom} from {table} as new_layer using unique {idcol} using srid={srid}".format(
                geom=geom_col, table=newtable, idcol=id_col, srid=srid
            )

            # Defer closing connection
            layer.addProcessing("CLOSE_CONNECTION=DEFER")

        else:
            # TO DO: read from file
            raise Exception("Database is not configured.")

        # PROJECTION ... should we set this properly?
        crs = self._data["crs"]
        layer.setProjection("init={}".format(crs))

        # METADATA
        # TODO: check return value of setMetaData MS_SUCCESS/MS_FAILURE
        layer.setMetaData("gml_types", "auto")
        layer.setMetaData("gml_featureid", id_col)  # Shall be the id column of the base table
        layer.setMetaData("gml_include_items", "all")  # allow raster queries
        layer.setMetaData("wfs_include_items", "all")
        layer.setMetaData("wfs_srs", "EPSG:4326 EPSG:3857")  # projection to serve
        layer.setMetaData("wfs_title", "BCCVL Layer")  # title required for GetCapabilities

        # TODO: metadata
        #       other things like title, author, attribution etc...

        return map.insertLayer(layer)