Пример #1
0
    def get_stores(self, names=None, workspaces=None):
        '''
          Returns a list of stores in the catalog. If workspaces is specified will only return stores in those workspaces.
          If names is specified, will only return stores that match.
          names can either be a comma delimited string or an array.
          Will return an empty list if no stores are found.
        '''

        if isinstance(workspaces, Workspace):
            workspaces = [workspaces]
        elif isinstance(workspaces, list) and [w for w in workspaces if isinstance(w, Workspace)]:
            # nothing
            pass
        else:
            workspaces = self.get_workspaces(names=workspaces)

        stores = []
        for ws in workspaces:
            ds_list = self.get_xml(ws.datastore_url)
            cs_list = self.get_xml(ws.coveragestore_url)
            wms_list = self.get_xml(ws.wmsstore_url)
            stores.extend([datastore_from_index(self, ws, n) for n in ds_list.findall("dataStore")])
            stores.extend([coveragestore_from_index(self, ws, n) for n in cs_list.findall("coverageStore")])
            stores.extend([wmsstore_from_index(self, ws, n) for n in wms_list.findall("wmsStore")])

        if names is None:
            names = []
        elif isinstance(names, basestring):
            names = [s.strip() for s in names.split(',') if s.strip()]

        if stores and names:
            return ([store for store in stores if store.name in names])

        return stores
Пример #2
0
 def get_stores(self, workspace=None):
     if workspace is not None:
         if isinstance(workspace, basestring):
             workspace = self.get_workspace(workspace)
         ds_list = self.get_xml(workspace.datastore_url)
         cs_list = self.get_xml(workspace.coveragestore_url)
         wms_list = self.get_xml(workspace.wmsstore_url)
         datastores = [
             datastore_from_index(self, workspace, n)
             for n in ds_list.findall("dataStore")
         ]
         coveragestores = [
             coveragestore_from_index(self, workspace, n)
             for n in cs_list.findall("coverageStore")
         ]
         wmsstores = [
             wmsstore_from_index(self, workspace, n)
             for n in wms_list.findall("wmsStore")
         ]
         return datastores + coveragestores + wmsstores
     else:
         stores = []
         for ws in self.get_workspaces():
             a = self.get_stores(ws)
             stores.extend(a)
         return stores
Пример #3
0
    def get_stores(self, names=None, workspace=None):
        """
          Returns a list of stores in the catalog. If workspace is specified will only return stores in that workspace.
          If names is specified, will only return stores that match.
          names can either be a comma delimited string or an array.
          If names is specified will only return stores that match the name.
          Will return an empty list if no stores are found.
        """

        workspaces = []
        if workspace is not None:
            if isinstance(workspace, basestring):
                ws = self.get_workspaces(workspace)
                if ws:
                    # There can only be one workspace with this name
                    workspaces.append(ws[0])
            elif getattr(
                    workspace, 'resource_type', None
            ) is not None and workspace.resource_type == "workspace":
                workspaces.append(workspace)
        else:
            workspaces = self.get_workspaces()

        stores = []
        if workspaces:
            for ws in workspaces:
                ds_list = self.get_xml(ws.datastore_url)
                cs_list = self.get_xml(ws.coveragestore_url)
                wms_list = self.get_xml(ws.wmsstore_url)
                stores.extend([
                    datastore_from_index(self, ws, n)
                    for n in ds_list.findall("dataStore")
                ])
                stores.extend([
                    coveragestore_from_index(self, ws, n)
                    for n in cs_list.findall("coverageStore")
                ])
                stores.extend([
                    wmsstore_from_index(self, ws, n)
                    for n in wms_list.findall("wmsStore")
                ])

        if names is None:
            names = []
        elif isinstance(names, basestring):
            names = list(map(str.strip, str(names).split(',')))
        if stores and names:
            named_stores = []
            for store in stores:
                if store.name in names:
                    named_stores.append(store)
            return named_stores

        return stores
Пример #4
0
 def get_stores(self, workspace=None):
     if workspace is not None:
         ds_list = self.get_xml(workspace.datastore_url)
         cs_list = self.get_xml(workspace.coveragestore_url)
         datastores = [datastore_from_index(self, workspace, n) for n in ds_list.findall("dataStore")]
         coveragestores = [coveragestore_from_index(self, workspace, n) for n in cs_list.findall("coverageStore")]
         return datastores + coveragestores
     else:
         stores = []
         for ws in self.get_workspaces():
             a = self.get_stores(ws)
             stores.extend(a)
         return stores
Пример #5
0
    def get_store(self, name, workspace=None):
        #stores = [s for s in self.get_stores(workspace) if s.name == name]
        if workspace is None:
            store = None
            for ws in self.get_workspaces():
                found = None
                try:
                    found = self.get_store(name, ws)
                except:
                    # don't expect every workspace to contain the named store
                    pass
                if found:
                    if store:
                        raise AmbiguousRequestError(
                            "Multiple stores found named: " + name)
                    else:
                        store = found
            if not store:
                raise FailedRequestError("No store found named: " + name)
            return store
        else:  # workspace is not None
            if isinstance(workspace, basestring):
                workspace = self.get_workspace(workspace)
                if workspace is None:
                    return None
            logger.debug("datastore url is [%s]", workspace.datastore_url)
            ds_list = self.get_xml(workspace.datastore_url)
            cs_list = self.get_xml(workspace.coveragestore_url)
            datastores = [
                n for n in ds_list.findall("dataStore")
                if n.find("name").text == name
            ]
            coveragestores = [
                n for n in cs_list.findall("coverageStore")
                if n.find("name").text == name
            ]
            ds_len, cs_len = len(datastores), len(coveragestores)

            if ds_len == 1 and cs_len == 0:
                return datastore_from_index(self, workspace, datastores[0])
            elif ds_len == 0 and cs_len == 1:
                return coveragestore_from_index(self, workspace,
                                                coveragestores[0])
            elif ds_len == 0 and cs_len == 0:
                raise FailedRequestError("No store found in " +
                                         str(workspace) + " named: " + name)
            else:
                raise AmbiguousRequestError(
                    str(workspace) + " and name: " + name +
                    " do not uniquely identify a layer")
Пример #6
0
    def get_stores(self, names=None, workspaces=None):
        '''
          Returns a list of stores in the catalog. If workspaces is specified will only return stores in those workspaces.
          If names is specified, will only return stores that match.
          names can either be a comma delimited string or an array.
          Will return an empty list if no stores are found.
        '''

        if workspaces:
            if isinstance(workspaces, Workspace):
                workspaces = [workspaces]
            elif isinstance(workspaces, list) and [
                    w for w in workspaces if isinstance(w, Workspace)
            ]:
                # nothing
                pass
            else:
                workspaces = self.get_workspaces(names=workspaces)
        else:
            workspaces = []

        stores = []
        for ws in workspaces:
            ds_list = self.get_xml(ws.datastore_url)
            cs_list = self.get_xml(ws.coveragestore_url)
            wms_list = self.get_xml(ws.wmsstore_url)
            stores.extend([
                datastore_from_index(self, ws, n)
                for n in ds_list.findall("dataStore")
            ])
            stores.extend([
                coveragestore_from_index(self, ws, n)
                for n in cs_list.findall("coverageStore")
            ])
            stores.extend([
                wmsstore_from_index(self, ws, n)
                for n in wms_list.findall("wmsStore")
            ])

        if names is None:
            names = []
        elif isinstance(names, string_types):
            names = [s.strip() for s in names.split(',') if s.strip()]

        if stores and names:
            return ([store for store in stores if store.name in names])

        return stores
Пример #7
0
 def get_stores(self, workspace=None):
     if workspace is not None:
         if isinstance(workspace, basestring):
             workspace = self.get_workspace(workspace)
         ds_list = self.get_xml(workspace.datastore_url)
         cs_list = self.get_xml(workspace.coveragestore_url)
         wms_list = self.get_xml(workspace.wmsstore_url)
         datastores = [datastore_from_index(self, workspace, n) for n in ds_list.findall("dataStore")]
         coveragestores = [coveragestore_from_index(self, workspace, n) for n in cs_list.findall("coverageStore")]
         wmsstores = [wmsstore_from_index(self, workspace, n) for n in wms_list.findall("wmsStore")]
         return datastores + coveragestores + wmsstores
     else:
         stores = []
         for ws in self.get_workspaces():
             a = self.get_stores(ws)
             stores.extend(a)
         return stores
Пример #8
0
  def get_store(self, name, workspace=None):
      #stores = [s for s in self.get_stores(workspace) if s.name == name]
      if workspace is None:
          store = None
          for ws in self.get_workspaces():
              found = None
              try:
                  found = self.get_store(name, ws)
              except:
                  # don't expect every workspace to contain the named store
                  pass
              if found:
                  if store:
                      raise AmbiguousRequestError("Multiple stores found named: " + name)
                  else:
                      store = found

          if not store:
              raise FailedRequestError("No store found named: " + name)
          return store
      else: # workspace is not None
          logger.debug("datastore url is [%s]", workspace.datastore_url )
          ds_list = self.get_xml(workspace.datastore_url)
          cs_list = self.get_xml(workspace.coveragestore_url)
          ws_list = self.get_xml(workspace.wmsstore_url)
          datastores = [n for n in ds_list.findall("dataStore") if n.find("name").text == name]
          coveragestores = [n for n in cs_list.findall("coverageStore") if n.find("name").text == name]
          wmsstores = [n for n in ws_list.findall("wmsStore") if n.find("name").text == name]
          ds_len, cs_len, ws_len = len(datastores), len(coveragestores), len(wmsstores)

          if ds_len == 1 and cs_len == 0 and ws_len == 0 :
              return datastore_from_index(self, workspace, datastores[0])
          elif ds_len == 0 and cs_len == 1 and ws_len == 0 :
              return coveragestore_from_index(self, workspace, coveragestores[0])
          elif ds_len == 0 and cs_len == 0 and ws_len == 1 :
              return wmsstore_from_index(self, workspace, wmsstores[0])
          elif ds_len == 0 and cs_len == 0 and ws_len == 0 :
              raise FailedRequestError("No store found in " + str(workspace) + " named: " + name)
          else:
              raise AmbiguousRequestError(str(workspace) + " and name: " + name + " do not uniquely identify a layer")
Пример #9
0
def get_store(cat, name, workspace=None):
    # Make sure workspace is a workspace object and not a string.
    # If the workspace does not exist, continue as if no workspace had been defined.
    if isinstance(workspace, str) or isinstance(workspace, six.string_types):
        workspace = cat.get_workspace(workspace)

    if workspace is None:
        workspace = cat.get_default_workspace()

    if workspace:
        try:
            store = cat.get_xml('%s/%s.xml' %
                                (workspace.datastore_url[:-4], name))
        except FailedRequestError:
            try:
                store = cat.get_xml('%s/%s.xml' %
                                    (workspace.coveragestore_url[:-4], name))
            except FailedRequestError:
                try:
                    store = cat.get_xml('%s/%s.xml' %
                                        (workspace.wmsstore_url[:-4], name))
                except FailedRequestError:
                    raise FailedRequestError("No store found named: " + name)
        if store:
            if store.tag == 'dataStore':
                store = datastore_from_index(cat, workspace, store)
            elif store.tag == 'coverageStore':
                store = coveragestore_from_index(cat, workspace, store)
            elif store.tag == 'wmsStore':
                store = wmsstore_from_index(cat, workspace, store)

            return store
        else:
            raise FailedRequestError("No store found named: " + name)
    else:
        raise FailedRequestError("No store found named: " + name)
Пример #10
0
def create_point_col_from_lat_lon(new_table_owner, table_name, lat_column,
                                  lng_column):
    """
    Using a new DataTable and specified lat/lng column names, map the points

    :param new_table_owner:
    :param table_name:
    :param lat_column:
    :param lng_column:
    :return:
    """

    LOGGER.info('create_point_col_from_lat_lon')
    assert isinstance(new_table_owner,
                      User), "new_table_owner must be a User object"
    assert table_name is not None, "table_name cannot be None"
    assert lat_column is not None, "lat_column cannot be None"
    assert lng_column is not None, "lng_column cannot be None"

    # ----------------------------------------------------
    # Retrieve the DataTable and check for lat/lng columns
    # ----------------------------------------------------
    try:
        dt = DataTable.objects.get(table_name=table_name)
    except DataTable.DoesNotExist:
        err_msg = "Could not find DataTable with name: %s" % (table_name)
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Latitude attribute
    # ----------------------------------------------------
    lat_col_attr = dt.get_attribute_by_name(
        standardize_column_name(lat_column))
    if lat_col_attr is None:
        err_msg = 'DataTable "%s" does not have a latitude column named "%s" (formatted: %s)'\
                  % (table_name, lat_column, standardize_column_name(lat_column))
        LOGGER.error(err_msg)
        return False, err_msg

    is_valid, err_msg = is_valid_lat_lng_attribute(lat_col_attr)
    if not is_valid:
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Longitude attribute
    # ----------------------------------------------------
    lng_col_attr = dt.get_attribute_by_name(
        standardize_column_name(lng_column))
    if lng_col_attr is None:
        err_msg = 'DataTable "%s" does not have a longitude column named "%s" (formatted: %s)'\
                  % (table_name, lng_column, standardize_column_name(lng_column))
        LOGGER.error(err_msg)
        return False, err_msg

    is_valid, err_msg = is_valid_lat_lng_attribute(lng_col_attr,
                                                   lng_check=True)
    if not is_valid:
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Start mapping record
    # ----------------------------------------------------
    lat_lnt_map_record = LatLngTableMappingRecord(datatable=dt\
                                , lat_attribute=lat_col_attr\
                                , lng_attribute=lng_col_attr\
                                )

    msg('create_point_col_from_lat_lon - 2')

    # ----------------------------------------------------
    # Yank bad columns out of the DataTable
    # ----------------------------------------------------
    # See https://github.com/IQSS/dataverse/issues/2949

    (success,
     row_cnt_or_err_msg) = remove_bad_lat_lng_numbers(lat_lnt_map_record)
    if not success:
        if lat_lnt_map_record.id:
            lat_lnt_map_record.delete()
        return False, 'Failed to remove bad lat/lng values.'

    # The bad rows were not mapped
    lat_lnt_map_record.unmapped_record_count = row_cnt_or_err_msg

    # ---------------------------------------------
    # Format SQL to:
    #   (a) Add the Geometry column to the Datatable
    #   (b) Populate the column using the lat/lng attributes
    #   (c) Create column index
    # ---------------------------------------------
    # (a) Add column SQL
    alter_table_sql = "ALTER TABLE %s ADD COLUMN geom geometry(POINT,4326);" % (
        table_name)  # postgi 2.x

    # (b) Populate column SQL
    update_table_sql = "UPDATE %s SET geom = ST_SetSRID(ST_MakePoint(%s,%s),4326);" \
                    % (table_name, lng_col_attr.attribute, lat_col_attr.attribute)
    #update_table_sql = "UPDATE %s SET geom = ST_SetSRID(ST_MakePoint(cast(%s AS float), cast(%s as float)),4326);" % (table_name, lng_column, lat_column)

    # (c) Index column SQL
    create_index_sql = "CREATE INDEX idx_%s_geom ON %s USING GIST(geom);" % (
        table_name, table_name)

    # ---------------------------------------------
    # Run the SQL
    # ---------------------------------------------
    try:
        conn = psycopg2.connect(
            get_datastore_connection_string(is_dataverse_db=False))

        cur = conn.cursor()

        LOGGER.debug('Run alter table SQL: %s', alter_table_sql)
        cur.execute(alter_table_sql)

        LOGGER.debug('Run update table SQL: %s', update_table_sql)
        cur.execute(update_table_sql)

        LOGGER.debug('Run create index SQL: %s', create_index_sql)
        cur.execute(create_index_sql)

        conn.commit()
        conn.close()
    except Exception as ex_obj:
        conn.close()
        err_msg = "Error Creating Point Column from Latitude and Longitude %s" % (
            ex_obj)
        LOGGER.error(err_msg)
        return False, err_msg

    msg('create_point_col_from_lat_lon - 4')

    # ------------------------------------------------------
    # Create the Layer in GeoServer from the table
    # ------------------------------------------------------
    try:
        cat = Catalog(settings.GEOSERVER_BASE_URL + "rest",\
                    settings.GEOSERVER_CREDENTIALS[0],\
                    settings.GEOSERVER_CREDENTIALS[1])
        #      "admin", "geoserver")
        workspace = cat.get_workspace("geonode")
        ds_list = cat.get_xml(workspace.datastore_url)
        datastores = [
            datastore_from_index(cat, workspace, n)
            for n in ds_list.findall("dataStore")
        ]
        #----------------------------
        # Find the datastore
        #----------------------------
        ds = None
        from geonode.maps.utils import get_db_store_name
        for datastore in datastores:
            if datastore.name == get_db_store_name():
                ds = datastore

        if ds is None:
            err_msg = "Datastore not found: '%s' (lat/lng)" % (
                settings.DB_DATASTORE_NAME)
            return False, err_msg
        ft = cat.publish_featuretype(table_name,
                                     ds,
                                     "EPSG:4326",
                                     srs="EPSG:4326")
        cat.save(ft)
    except Exception as e:
        lat_lnt_map_record.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoServer layer for %s: %s" % (table_name,
                                                                 str(e))
        return False, err_msg

    msg('create_point_col_from_lat_lon - 5 - add style')

    # ------------------------------------------------------
    # Set the Layer's default Style
    # ------------------------------------------------------
    set_default_style_for_latlng_layer(cat, ft)

    # ------------------------------------------------------
    # Create the Layer in GeoNode from the GeoServer Layer
    # ------------------------------------------------------
    try:
        layer, created = Layer.objects.get_or_create(
            name=table_name,
            defaults={
                "workspace":
                workspace.name,
                "store":
                ds.name,
                "storeType":
                ds.resource_type,
                "typename":
                "%s:%s" %
                (workspace.name.encode('utf-8'), ft.name.encode('utf-8')),
                "title":
                dt.title or 'No title provided',
                #"name" : dt.title or 'No title provided',
                "abstract":
                dt.abstract or 'No abstract provided',
                "uuid":
                str(uuid.uuid4()),
                "owner":
                new_table_owner,
                #"bbox_x0": Decimal(ft.latlon_bbox[0]),
                #"bbox_x1": Decimal(ft.latlon_bbox[1]),
                #"bbox_y0": Decimal(ft.latlon_bbox[2]),
                #"bbox_y1": Decimal(ft.latlon_bbox[3])
            })
        #set_attributes(layer, overwrite=True)
    except Exception as e:
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoNode layer for %s: %s" % (table_name,
                                                               str(e))
        return False, err_msg

    # ----------------------------------
    # Set default permissions (public)
    # ----------------------------------
    layer.set_default_permissions()

    # ------------------------------------------------------------------
    # Create LayerAttributes for the new Layer (not done in GeoNode 2.x)
    # ------------------------------------------------------------------
    (attributes_created,
     err_msg) = create_layer_attributes_from_datatable(dt, layer)
    if not attributes_created:
        LOGGER.error(err_msg)
        layer.delete()  # Delete the layer
        return False, "Sorry there was an error creating the Datatable. (s:ll)"

    # ----------------------------------
    # Save a related LatLngTableMappingRecord
    # ----------------------------------
    lat_lnt_map_record.layer = layer

    # ----------------------------------
    # Retrieve matche/unmatched counts
    # ----------------------------------
    # Get datatable feature count - total record to map
    (success_datatable,
     datatable_feature_count) = get_layer_feature_count(dt.table_name)

    # Get layer feature count - mapped records
    (success_layer, layer_feature_count) = get_layer_feature_count(layer.name)

    # Set Record counts
    if success_layer and success_datatable:
        lat_lnt_map_record.mapped_record_count = layer_feature_count
        new_misssed_records = datatable_feature_count - layer_feature_count
        if lat_lnt_map_record.unmapped_record_count and lat_lnt_map_record.unmapped_record_count > 0:
            lat_lnt_map_record.unmapped_record_count += new_misssed_records
        else:
            lat_lnt_map_record.unmapped_record_count = new_misssed_records

    else:
        LOGGER.error('Failed to calculate Lat/Lng record counts')

    lat_lnt_map_record.save()

    return True, lat_lnt_map_record
Пример #11
0
def setup_join(new_table_owner, table_name, layer_typename,
               table_attribute_name, layer_attribute_name):
    LOGGER.info('setup_join')
    """
    Setup the Table Join in GeoNode
    """

    assert isinstance(new_table_owner,
                      User), "new_table_owner must be a User object"
    assert table_name is not None, "table_name cannot be None"
    assert layer_typename is not None, "layer_typename cannot be None"
    assert table_attribute_name is not None, "table_attribute_name cannot be None"
    assert layer_attribute_name is not None, "layer_attribute_name cannot be None"

    LOGGER.info('setup_join. Step (1): Retrieve the DataTable object')
    try:
        dt = DataTable.objects.get(table_name=table_name)
    except DataTable.DoesNotExist:
        err_msg = 'No DataTable object found for table_name "%s"' % table_name
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (2): Retrieve the Layer object')

    try:
        layer = Layer.objects.get(typename=layer_typename)
    except Layer.DoesNotExist:
        err_msg = 'No Layer object found for layer_typename "%s"' % layer_typename
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (3): Retrieve the DataTableAttribute object')
    try:
        table_attribute = DataTableAttribute.objects.get(\
                                        datatable=dt,
                                        attribute=table_attribute_name)
    except DataTableAttribute.DoesNotExist:
        err_msg = 'No DataTableAttribute object found for table/attribute (%s/%s)' \
                  % (dt,
                     table_attribute_name)
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (4): Retrieve the LayerAttribute object')
    try:
        layer_attribute = LayerAttribute.objects.get(\
                                layer=layer,
                                attribute=layer_attribute_name)
    except LayerAttribute.DoesNotExist:
        err_msg = 'No LayerAttribute object found for layer/attribute (%s/%s)'\
                  % (layer, layer_attribute_name)
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (5): Build SQL statement to create view')

    layer_name = layer.typename.split(':')[1]

    # ------------------------------------------------------------------
    # (5) Check if the layer and the table are in the same store (database)
    # ------------------------------------------------------------------
    if layer.store != dt.tablespace:
        err_msg = 'layer (%s) and tablespace (%s) must be in the same database.'\
            % (layer.store, dt.tablespace)
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------------------
    # (5a) Check if the join columns compatible
    # ------------------------------------------------------------------
    column_checker = ColumnChecker(\
                                layer_name,
                                layer_attribute.attribute,
                                dt.table_name,
                                table_attribute.attribute)
    (are_cols_compatible,
     err_msg) = column_checker.are_join_columns_compatible()
    if not are_cols_compatible:  # Doesn't look good, return an error message
        return None, err_msg

    # ------------------------------------------------------------------
    # (5b) Create SQL statement for the tablejoin
    # ------------------------------------------------------------------
    view_name = get_unique_viewname(dt.table_name, layer_name)

    # SQL to create the view
    view_sql = ('CREATE VIEW {0}'
                ' AS SELECT {1}.{2}, {3}.*'
                ' FROM {1} INNER JOIN {3}'
                ' ON {1}."{4}" = {3}."{5}";'.format(\
                    view_name,  # 0
                    layer_name, # 1
                    THE_GEOM_LAYER_COLUMN, # 2
                    dt.table_name,  # 3
                    layer_attribute.attribute, # 4
                    table_attribute.attribute)) # 5

    # Materialized view for next version of Postgres
    """view_sql = ('create materialized view {0} as'
                   ' select {1}.the_geom, {2}.*'
                   ' from {1} inner join {2}'
                   ' on {1}."{3}" = {2}."{4}";').format(\
                       view_name,
                       layer_name,
                       dt.table_name,
                       layer_attribute.attribute,
                       table_attribute.attribute)
    """
    LOGGER.info('setup_join. Step (6): Retrieve stats')

    # ------------------------------------------------------------------
    # Retrieve stats
    # ------------------------------------------------------------------
    matched_count_sql = ('select count({0}) from {1} where {1}.{0}'
                         ' in (select "{2}" from {3});').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    unmatched_count_sql = ('select count({0}) from {1} where {1}.{0}'
                           ' not in (select "{2}" from {3});').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    unmatched_list_sql = ('select {0} from {1} where {1}.{0}'
                          ' not in (select "{2}" from {3}) limit 500;').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    # ------------------------------------------------------------------
    # Create a TableJoin object
    # ------------------------------------------------------------------
    LOGGER.info('setup_join. Step (7): Create a TableJoin object')
    tj, created = TableJoin.objects.get_or_create(\
                                source_layer=layer,
                                datatable=dt,
                                table_attribute=table_attribute,
                                layer_attribute=layer_attribute,
                                view_name=view_name,
                                view_sql=view_sql)
    tj.save()
    msgt('table join created! :) %s' % tj.id)

    # ------------------------------------------------------------------
    # Create the View (and double view)
    # ------------------------------------------------------------------
    LOGGER.info('setup_join. Step (8): Create the View (and double view)')

    # Convenience method to drop a view
    #
    drop_view_by_name(view_name)

    try:
        conn = psycopg2.connect(get_datastore_connection_string())
        cur = conn.cursor()

        # Create the new view
        #
        msg('view_sql: %s' % view_sql)
        cur.execute(view_sql)
        #cur.execute(double_view_sql)   # For later version of postgres

        # Record the counts for matched records and
        # add unmatched records to the TableJoin object

        # Unmatched count
        cur.execute(matched_count_sql)
        tj.matched_records_count = cur.fetchone()[0]

        # Matched count
        cur.execute(unmatched_count_sql)
        tj.unmatched_records_count = int(cur.fetchone()[0])

        # Unmatched records list
        if tj.unmatched_records_count > 0:
            cur.execute(unmatched_list_sql)
            tj.unmatched_records_list = ",".join(
                ['%s' % r[0] for r in cur.fetchall()])

        conn.commit()
        cur.close()

        # If no records match, then delete the TableJoin
        #
        if tj.matched_records_count == 0:
            # Delete the table join
            tj.delete()

            # Create an error message, log it, and send it back
            err_msg = ('No records matched.  Make sure that you chose'
                       ' the correct column and that the chosen layer'
                       ' is in the same geographic area.')
            LOGGER.error(err_msg)
            return None, err_msg

    except Exception as ex_obj:
        tj.delete()  # If needed for debugging, don't delete the table join

        traceback.print_exc(sys.exc_info())
        err_msg = "Error Joining table %s to layer %s: %s" % (\
                        table_name,
                        layer_typename,
                        str(ex_obj[0]))
        LOGGER.error(err_msg)

        if err_msg.find('You might need to add explicit type casts.') > -1:
            user_msg = "The chosen column is a different data type than the one expected."
        else:
            user_msg = err_msg

        return None, user_msg

    finally:
        conn.close()

    #--------------------------------------------------
    # Create the Layer in GeoServer from the view
    #--------------------------------------------------
    LOGGER.info(
        'setup_join. Step (9): Create the Layer in GeoServer from the view')
    try:
        LOGGER.info('setup_join. Step (9a): Find the datastore')
        #----------------------------
        # Find the datastore
        #----------------------------
        cat = Catalog(
            settings.GEOSERVER_BASE_URL + "rest",
            settings.GEOSERVER_CREDENTIALS[0],
            settings.GEOSERVER_CREDENTIALS[1])  # "admin", "geoserver")

        workspace = cat.get_workspace('geonode')
        ds_list = cat.get_xml(workspace.datastore_url)
        datastores = [
            datastore_from_index(cat, workspace, n)
            for n in ds_list.findall("dataStore")
        ]
        ds = None

        # Iterate through datastores
        #
        for datastore in datastores:
            #msg ('datastore name:', datastore.name)
            if datastore.name == layer.store:
                ds = datastore

        if ds is None:
            tj.delete()
            err_msg = "Datastore name not found: '%s' (join)" % settings.DB_DATASTORE_NAME
            LOGGER.error(str(ds))
            return None, err_msg

        # Publish the feature
        #
        LOGGER.info('setup_join. Step (9b): Publish the feature type')
        ft = cat.publish_featuretype(view_name, ds, layer.srs, srs=layer.srs)
        #ft = cat.publish_featuretype(double_view_name, ds, layer.srs, srs=layer.srs)

        LOGGER.info('setup_join. Step (9c): Save the feature type')
        cat.save(ft)

    except Exception as e:
        tj.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoServer layer for %s: %s" % (view_name,
                                                                 str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------
    # Set the Layer's default Style
    # ------------------------------------------------------
    sld_success, err_msg = set_style_for_new_join_layer(cat, ft, layer)
    if not sld_success:
        return None, err_msg

    # ------------------------------------------------------------------
    # Create the Layer in GeoNode from the GeoServer Layer
    # ------------------------------------------------------------------
    LOGGER.info(
        'setup_join. Step (10): Create the Layer in GeoNode from the GeoServer Layer'
    )
    try:
        layer_params = {
            "workspace":
            workspace.name,
            "store":
            ds.name,
            "storeType":
            ds.resource_type,
            "typename":
            "%s:%s" %
            (workspace.name.encode('utf-8'), ft.name.encode('utf-8')),
            "title":
            dt.title or 'No title provided',
            "abstract":
            dt.abstract or 'No abstract provided',
            "uuid":
            str(uuid.uuid4()),
            "owner":
            new_table_owner,
            #"bbox_x0": Decimal(ft.latlon_bbox[0]),
            #"bbox_x1": Decimal(ft.latlon_bbox[1]),
            #"bbox_y0": Decimal(ft.latlon_bbox[2]),
            #"bbox_y1": Decimal(ft.latlon_bbox[3])
        }

        layer, created = Layer.objects.get_or_create(name=view_name,
                                                     defaults=layer_params)

        # Set default permissions (public)
        layer.set_default_permissions()
        #set_attributes(layer, overwrite=True)

        tj.join_layer = layer
        tj.save()
    except Exception as e:
        tj.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoNode layer for %s: %s" % (view_name,
                                                               str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------------------
    # Create LayerAttributes for the new Layer (not done in GeoNode 2.x)
    # ------------------------------------------------------------------
    LOGGER.info(
        'setup_join. Step (11): Create Layer Attributes from the Datatable')
    (attributes_created,
     err_msg) = create_layer_attributes_from_datatable(dt, layer)
    if not attributes_created:
        LOGGER.error(err_msg)
        tj.delete()  # Delete the table join object
        return None, "Sorry there was an error creating the Datatable (s11)"

    return tj, ""