Example #1
0
def sync_geometry_columns(schema=None, table=None):
    """
    Adds one or more entries to the PostGIS geometry_columns
    :param schema: Optional database schema to which to limit search
    :param table: Optional table name to which to limit search
    :return:
    """
    tables_with_geometry = InformationSchema.objects.tables_with_geometry(schema=schema, table=table)
    for information_scheme in tables_with_geometry:

        conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
        conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = conn.cursor()
        sql = "select ST_CoordDim({2}), ST_SRID({2}), ST_GeometryType({2}) from {1}.{0}".format(information_scheme.table_name, information_scheme.table_schema, information_scheme.column_name)
        ret = cursor.execute(sql)
        if ret and len(ret) > 0:
            coord, srid, geom_type = ret[0]
        else:
            coord, srid, geom_type = (2, 4326, 'GEOMETRY')
        geometry_record, new_record = GeometryColumns.objects.get_or_create(
            f_table_name=information_scheme.table_name,
            f_geometry_column=information_scheme.column_name,
            f_table_schema=information_scheme.table_schema,
                defaults=dict(
                    coord_dimension=coord,
                    srid=srid,
                    type=geom_type,
                ))
        if not new_record:
            geometry_record.coord_dimension = coord
            geometry_record.srid = srid
            geometry_record.type = geom_type
            geometry_record.save()
Example #2
0
    def import_db(self, database_name, local_dump_file):
        # Try to connect
        main_db = pg_connection_parameters(settings.DATABASES['default'])
        db = merge(main_db, dict(database=database_name))

        db_conn_string = "--host={host} --port={port} --user={user}".format(**db)
        self.run_as_pg('createdb {db_conn_string} {name}'.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS POSTGIS" {name}'.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS DBLINK" {name}'.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('''psql {db_conn_string} -c 'ALTER DATABASE {name} SET search_path = "$user",public,postgis;' postgres'''.format(
            db_conn_string=db_conn_string,
            name=database_name), **db)

        self.run_as_pg('psql {db_conn_string} -f {local_dump_file} {name}'.format(
            db_conn_string=db_conn_string,
            local_dump_file=local_dump_file,
            name=database_name), **db)
Example #3
0
    def import_db(self, database_name, local_dump_file):
        # Try to connect
        main_db = pg_connection_parameters(settings.DATABASES['default'])
        db = merge(main_db, dict(database=database_name))

        db_conn_string = "--host={host} --port={port} --user={user}".format(
            **db)
        self.run_as_pg(
            'createdb {db_conn_string} {name}'.format(
                db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            'psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS POSTGIS" {name}'
            .format(db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            'psql {db_conn_string} -c "CREATE EXTENSION IF NOT EXISTS DBLINK" {name}'
            .format(db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            '''psql {db_conn_string} -c 'ALTER DATABASE {name} SET search_path = "$user",public,postgis;' postgres'''
            .format(db_conn_string=db_conn_string, name=database_name), **db)

        self.run_as_pg(
            'psql {db_conn_string} -f {local_dump_file} {name}'.format(
                db_conn_string=db_conn_string,
                local_dump_file=local_dump_file,
                name=database_name), **db)
Example #4
0
def report_sql_values(pSQL, fetch_type):
    # with transaction.commit_manually():
    try:
        conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
        conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        curs = conn.cursor()
    except Exception, E:
        print str(E)
Example #5
0
    def drop_schema(self, schema, connection=connection):
        if self.schema_exists(schema):
            logger.info("Dropping schema %s" % schema)

            conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
            conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
            cursor = connection.cursor()
            cursor.execute('drop schema {0} cascade'.format(schema))
            logger.info("Schema %s dropped" % schema)
Example #6
0
def copy_from_text_to_db(text_file, table_name):

    conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
    conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
    curs = conn.cursor()
    try:
        logger.debug("Text file: {0}, Table name: {1}".format(text_file, table_name))
        curs.copy_from(text_file, table_name)
    except Exception, E:
        print str(E)
        raise Exception('Original Message: {0}'.format(E.message))
Example #7
0
def execute_sql(pSQL):

    conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
    conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
    curs = conn.cursor()

    try:
        logger.debug("executing custom sql: {0}".format(pSQL))
        curs.execute(pSQL)
    except Exception, E:
        print str(E)
        raise Exception('SQL: {0}. Original Message: {1}'.format(pSQL, E.message))
Example #8
0
def drop_db(database_name):
    """
        Drops and recreates the given database, which must be a database that present
        in the default database server
    """

    # Try to connect
    db = pg_connection_parameters(settings.DATABASES['default'])
    conn = psycopg2.connect(**db)

    cur = conn.cursor()
    conn.set_isolation_level(0)
    cur.execute("""DROP DATABASE %s""" % database_name)
Example #9
0
def drop_db(database_name):
    """
        Drops and recreates the given database, which must be a database that present
        in the default database server
    """

    # Try to connect
    db = pg_connection_parameters(settings.DATABASES['default'])
    conn = psycopg2.connect(**db)

    cur = conn.cursor()
    conn.set_isolation_level(0)
    cur.execute("""DROP DATABASE %s""" % database_name)
Example #10
0
    def create_schema(self, schema, connection=connection):
        if not self.schema_exists(schema):
            logger.info("Creating schema %s" % schema)

            conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
            conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
            cursor = connection.cursor()
            cursor.execute('create schema {0}'.format(schema))
            logger.info("Schema %s created" % schema)
            # This has to be here to create the schema immediately. I don't know why
            if transaction.is_managed():
                transaction.commit()
        else:
            logger.info("Schema %s already exists" % schema)
Example #11
0
def register_geometry_columns(schema=None):
    select_tables = "Truncate geometry_columns cascade; \n" \
                    "select table_name, table_schema from information_schema.columns " \
                    "where column_name = 'wkb_geometry' " \
                    "and table_schema = '{0}';".format(schema)
    if not schema:
        select_tables = "Truncate geometry_columns cascade; \n" \
                        "select table_name, table_schema from information_schema.columns " \
                        "where column_name = 'wkb_geometry';"

    params = pg_connection_parameters(settings.DATABASES['default'])
    connection = psycopg2.connect(**params)
    gCurs = connection.cursor()

    try:
        print select_tables
        gCurs.execute(select_tables)
    except Exception, E:
        print str(E)
        raise
Example #12
0
def register_geometry_columns(schema=None):
    select_tables = "Truncate geometry_columns cascade; \n" \
                    "select table_name, table_schema from information_schema.columns " \
                    "where column_name = 'wkb_geometry' " \
                    "and table_schema = '{0}';".format(schema)
    if not schema:
        select_tables = "Truncate geometry_columns cascade; \n" \
                        "select table_name, table_schema from information_schema.columns " \
                        "where column_name = 'wkb_geometry';"

    params = pg_connection_parameters(settings.DATABASES['default'])
    connection = psycopg2.connect(**params)
    gCurs = connection.cursor()

    try:
        print select_tables
        gCurs.execute(select_tables)
    except Exception, E:
        print str(E)
        raise
Example #13
0
    def import_data(self, **kwargs):
        """
            Imports data from an external source to create the test data
            :return a two item tuple containing the region that was imported and a list of the imported projects
        """

        # Calculate a sample lat/lon box of the config_entity
        config_entity = self.config_entity
        if self.test:
            bounds = chop_geom(config_entity.bounds, 0.90)
            logger.info(u"Creating subselection with extents: {0}. This will be used to crop any table that doesn't have a sample version".format(bounds))

        conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
        conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = conn.cursor()

        for db_entity in self.db_entities:

            # This is the index on wkb_geometry.
            spatial_index_name = '{schema}_{key}_geom_idx'.format(schema=db_entity.schema, key=db_entity.key)

            table = db_entity.table

            if db_entity.has_file_url:
                # Remove any table of the same name from the import schema. This is unlikley since imported
                # tables have timestamps
                drop_table('"%s"."%s"' % (settings.IMPORT_SCHEMA, db_entity.key))
                sql_file_path = file_url_to_path(db_entity.url)
                # Create a command that pipes shp2pgsql to psql
                db_entity.srid = db_entity.srid or '4326'
                logger.info("verifying SRID {0}".format(db_entity.srid))
                verify_srid(db_entity.srid)

                # Create the import schema if needed
                PGNamespace.objects.create_schema(settings.IMPORT_SCHEMA)

                # Import the table
                import_sql_command = '/usr/bin/psql {0} -f {1}'.format(self.target_database_connection, sql_file_path)
                stdin = "{0}\n{1}".format(self.arguments.get('password', None), self.target_database.get('PASSWORD', None))
                results = self.command_execution.run(import_sql_command, stdin=stdin)
                if results.returncode:
                    raise Exception(results.stderr.text)

                # We expect a table in the public schema with a named based on db_entity.key
                # Move the table from the public schema to the db_entity schema
                move_to_schema = "alter table {0}.{1} set schema {2};".format(settings.IMPORT_SCHEMA, db_entity.key, db_entity.schema)
                logger.info("Moving import file table to schema: %s" % move_to_schema)
                cursor.execute(move_to_schema)
                # Drop the constraint that enforces the srid of the wkb_geometry if one exists
                drop_constraint = '''alter table {0}.{1} drop constraint if exists enforce_srid_wkb_geometry'''.format(db_entity.schema, db_entity.key)
                logger.info("Dropping constraint on wkb_geometry: %s" % drop_constraint)
                cursor.execute(drop_constraint)

                # Note we're not creating an index on wkb_geometry
                # here because imported files already have an index
                # created.

            elif db_entity.has_db_url:
                # The import database currently stores tables as
                # public.[config_entity.key]_[feature_class._meta.db_table (with schema removed)][_sample (for samples)]
                #
                # We always use the table name without the word sample for the target table name
                if settings.USE_SAMPLE_DATA_SETS or self.test:
                    source_table = "{0}_{1}_{2}".format(
                        config_entity.import_key or config_entity.key, db_entity.table, 'sample')
                else:
                    source_table = "{0}_{1}".format(config_entity.import_key or config_entity.key, db_entity.table)

                connection_dict = postgres_url_to_connection_dict(db_entity.url)
                self._dump_tables_to_target(
                    '-t %s' % source_table,
                    source_schema='public',
                    target_schema=db_entity.schema,
                    source_table=source_table,
                    target_table=table,
                    connection_dict=connection_dict)

                # Create a spatial index
                spatial_index = '''create index {index_name} on {schema}.{key} using GIST (wkb_geometry);'''.format(
                    index_name=spatial_index_name,
                    schema=db_entity.schema, key=db_entity.key)
                cursor.execute(spatial_index)

            # Whether the table comes from our server or an upload, we want to transform the SRID to 4326
            transform_to_4326 = 'ALTER TABLE {schema}.{table} ALTER COLUMN wkb_geometry ' \
                                'TYPE Geometry(geometry, 4326) ' \
                                'USING ST_Transform(ST_Force_2d(wkb_geometry), 4326);'.format
            logger.info("Transforming to 4326: %s" % transform_to_4326(schema=db_entity.schema, table=db_entity.table))

            cursor.execute(transform_to_4326(schema=db_entity.schema, table=db_entity.table))

            # Now cluster the data and vacuum so that future joins are faster:
            # * CLUSTER rewrites the data on disk so that rows that are spatially near each
            #   other are also near each other on disk
            # * VACUUM cleans up disk space, removing sparse holes on disk.
            # * ANALYZE regenerates statistics about wkb_geometry so that the query planner can make
            #   better decisions.

            logger.info('Clustering %s.%s to optimize spatial joins', db_entity.schema, table)
            cluster = 'CLUSTER {index_name} ON {target_schema}.{target_table};'.format(
                index_name=spatial_index_name,
                target_schema=db_entity.schema,
                target_table=table)
            cursor.execute(cluster)

            logger.info('Vacuuming and analyzing %s.%s.', db_entity.schema, table)
            analyze = 'VACUUM ANALYZE {target_schema}.{target_table};'.format(
                target_schema=db_entity.schema,
                target_table=table)

            cursor.execute(analyze)

            logger.info("Finished importing data for DbEntity table {0}.{1}".format(db_entity.schema, db_entity.key))
Example #14
0
    def import_data(self, **kwargs):
        """
            Imports data from an external source to create the test data
            :return a two item tuple containing the region that was imported and a list of the imported projects
        """

        # Calculate a sample lat/lon box of the config_entity
        config_entity = self.config_entity
        if self.test:
            bounds = chop_geom(config_entity.bounds, 0.90)
            logger.info(
                u"Creating subselection with extents: {0}. This will be used to crop any table that doesn't have a sample version"
                .format(bounds))

        conn = psycopg2.connect(
            **pg_connection_parameters(settings.DATABASES['default']))
        conn.set_isolation_level(
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = conn.cursor()

        for db_entity in self.db_entities:

            # This is the index on wkb_geometry.
            spatial_index_name = '{schema}_{key}_geom_idx'.format(
                schema=db_entity.schema, key=db_entity.key)

            table = db_entity.table

            if db_entity.has_file_url:
                # Remove any table of the same name from the import schema. This is unlikley since imported
                # tables have timestamps
                drop_table('"%s"."%s"' %
                           (settings.IMPORT_SCHEMA, db_entity.key))
                sql_file_path = file_url_to_path(db_entity.url)
                # Create a command that pipes shp2pgsql to psql
                db_entity.srid = db_entity.srid or '4326'
                logger.info("verifying SRID {0}".format(db_entity.srid))
                verify_srid(db_entity.srid)

                # Create the import schema if needed
                PGNamespace.objects.create_schema(settings.IMPORT_SCHEMA)

                # Import the table
                import_sql_command = '/usr/bin/psql {0} -f {1}'.format(
                    self.target_database_connection, sql_file_path)
                stdin = "{0}\n{1}".format(
                    self.arguments.get('password', None),
                    self.target_database.get('PASSWORD', None))
                results = self.command_execution.run(import_sql_command,
                                                     stdin=stdin)
                if results.returncode:
                    raise Exception(results.stderr.text)

                # We expect a table in the public schema with a named based on db_entity.key
                # Move the table from the public schema to the db_entity schema
                move_to_schema = "alter table {0}.{1} set schema {2};".format(
                    settings.IMPORT_SCHEMA, db_entity.key, db_entity.schema)
                logger.info("Moving import file table to schema: %s" %
                            move_to_schema)
                cursor.execute(move_to_schema)
                # Drop the constraint that enforces the srid of the wkb_geometry if one exists
                drop_constraint = '''alter table {0}.{1} drop constraint if exists enforce_srid_wkb_geometry'''.format(
                    db_entity.schema, db_entity.key)
                logger.info("Dropping constraint on wkb_geometry: %s" %
                            drop_constraint)
                cursor.execute(drop_constraint)

                # Note we're not creating an index on wkb_geometry
                # here because imported files already have an index
                # created.

            elif db_entity.has_db_url:
                # The import database currently stores tables as
                # public.[config_entity.key]_[feature_class._meta.db_table (with schema removed)][_sample (for samples)]
                #
                # We always use the table name without the word sample for the target table name
                if settings.USE_SAMPLE_DATA_SETS or self.test:
                    source_table = "{0}_{1}_{2}".format(
                        config_entity.import_key or config_entity.key,
                        db_entity.table, 'sample')
                else:
                    source_table = "{0}_{1}".format(
                        config_entity.import_key or config_entity.key,
                        db_entity.table)

                connection_dict = postgres_url_to_connection_dict(
                    db_entity.url)
                self._dump_tables_to_target('-t %s' % source_table,
                                            source_schema='public',
                                            target_schema=db_entity.schema,
                                            source_table=source_table,
                                            target_table=table,
                                            connection_dict=connection_dict)

                # Create a spatial index
                spatial_index = '''create index {index_name} on {schema}.{key} using GIST (wkb_geometry);'''.format(
                    index_name=spatial_index_name,
                    schema=db_entity.schema,
                    key=db_entity.key)
                cursor.execute(spatial_index)

            # Whether the table comes from our server or an upload, we want to transform the SRID to 4326
            transform_to_4326 = 'ALTER TABLE {schema}.{table} ALTER COLUMN wkb_geometry ' \
                                'TYPE Geometry(geometry, 4326) ' \
                                'USING ST_Transform(ST_Force_2d(wkb_geometry), 4326);'.format
            logger.info("Transforming to 4326: %s" % transform_to_4326(
                schema=db_entity.schema, table=db_entity.table))

            cursor.execute(
                transform_to_4326(schema=db_entity.schema,
                                  table=db_entity.table))

            # Now cluster the data and vacuum so that future joins are faster:
            # * CLUSTER rewrites the data on disk so that rows that are spatially near each
            #   other are also near each other on disk
            # * VACUUM cleans up disk space, removing sparse holes on disk.
            # * ANALYZE regenerates statistics about wkb_geometry so that the query planner can make
            #   better decisions.

            logger.info('Clustering %s.%s to optimize spatial joins',
                        db_entity.schema, table)
            cluster = 'CLUSTER {index_name} ON {target_schema}.{target_table};'.format(
                index_name=spatial_index_name,
                target_schema=db_entity.schema,
                target_table=table)
            cursor.execute(cluster)

            logger.info('Vacuuming and analyzing %s.%s.', db_entity.schema,
                        table)
            analyze = 'VACUUM ANALYZE {target_schema}.{target_table};'.format(
                target_schema=db_entity.schema, target_table=table)

            cursor.execute(analyze)

            logger.info(
                "Finished importing data for DbEntity table {0}.{1}".format(
                    db_entity.schema, db_entity.key))
    def create_primary_key_column_from_another_column(cls,
                                                      schema,
                                                      table,
                                                      primary_key_column,
                                                      from_column=None):
        """
            Adds the column of the given type to the given table if absent
        :param schema: The database schema name
        :param table: The table name
        :param primary_key_column: Name of primary key column to create. If a primary key already exists it will be
        renamed from this, unless from_column is specified, in which case the existing primary_key will lose its constraint
        """
        full_tablename = '"{schema}"."{table}"'.format(schema=schema,
                                                       table=table)
        conn = psycopg2.connect(
            **pg_connection_parameters(settings.DATABASES['default']))
        conn.set_isolation_level(
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = conn.cursor()

        existing_primary_key = InformationSchema.get_primary_key_name(
            schema, table)
        if existing_primary_key:
            logger.info('Found existing primary key %s' % existing_primary_key)

        if not InformationSchema.objects.has_column(schema, table,
                                                    primary_key_column):
            # If not create a primary key or alter the existing one's name
            # Copy values from the from_column to the new primary_key_column
            if existing_primary_key and not from_column:
                # Rename the primary key to primary_key_column and end
                alter_source_id_sql = 'alter table {full_tablename} rename column {existing_primary_key} to {primary_key_column}'.format(
                    full_tablename=full_tablename,
                    existing_primary_key=existing_primary_key,
                    primary_key_column=primary_key_column)
                logger.info(
                    'Existing primary key exists and no from column is specified. Executing: %s'
                    % alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
                return

            if from_column:
                # Create a new primary key column without values
                create_column_sql = 'alter table {full_tablename} add column {primary_key_column} integer'.format(
                    full_tablename=full_tablename,
                    primary_key_column=primary_key_column)
                logger.info(
                    'From column is specified to be primary key source. Executing: %s'
                    % create_column_sql)
                cursor.execute(create_column_sql)
                # Copy values from the from_column, always casting to integer
                update_sql = 'update {full_tablename} set {primary_key_column} = cast({from_column} AS integer)'.format(
                    full_tablename=full_tablename,
                    primary_key_column=primary_key_column,
                    from_column=from_column)
                logger.info(
                    'Copying values from column to primary key. Executing: %s'
                    % update_sql)
                cursor.execute(update_sql)
            else:
                # Populate with a serial primary key
                alter_source_id_sql = 'alter table {full_tablename} add column {primary_key_column} serial primary key'.format(
                    full_tablename=full_tablename,
                    primary_key_column=primary_key_column)
                logger.info(
                    'Copying values from column to primary key. Executing: %s'
                    % alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
            # Drop the original_primary_key column if it exists
            if existing_primary_key:
                alter_source_id_sql = 'alter table {full_tablename} drop column {existing_primary_key}'.format(
                    full_tablename=full_tablename,
                    existing_primary_key=existing_primary_key)
                logger.info(
                    'Existing primary key being removed. Executing: %s' %
                    alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
            if from_column:
                # Create the primary key constraint if we haven't yet
                alter_source_id_sql = 'alter table {full_tablename} add constraint {table}_{schema}_{primary_key_column}_pk primary key ({primary_key_column})'.format(
                    full_tablename=full_tablename,
                    table=table,
                    schema=schema,
                    primary_key_column=primary_key_column)
                logger.info('Adding constraint to primary key. Executing: %s' %
                            alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
        elif existing_primary_key != primary_key_column:
            # If there a column matching primary_key_column that isn't the primary key, we need to rename
            # the primary_key_column to something unique and then rename existing_primary_key to primary_key_column

            # Find a unique column to rename primary_key_column (e.g. renamed id to id_1, or id_2, etc)
            unique_column = increment_key(primary_key_column)
            while InformationSchema.objects.has_column(schema, table,
                                                       unique_column):
                unique_column = increment_key(unique_column)

            # Rename the primary_key_column
            rename_primary_key_column_name_sql = 'alter table {full_tablename} rename column {primary_key_column} to {unique_column}'.format(
                full_tablename=full_tablename,
                primary_key_column=primary_key_column,
                unique_column=unique_column)
            logger.info(
                'Existing column with primary key name exists that needs to be renamed: %s'
                % rename_primary_key_column_name_sql)
            cursor.execute(rename_primary_key_column_name_sql)

            # Rename the existing_primary_key to primary_key_column (e.g. rename to ogc_fid to id for uploaded tables)
            rename_existing_primary_key_sql = 'alter table {full_tablename} rename column {existing_primary_key} to {primary_key_column}'.format(
                full_tablename=full_tablename,
                existing_primary_key=existing_primary_key,
                primary_key_column=primary_key_column)
            logger.info(
                'Existing primary key exists that needs to be renamed to desired primary key column name. Executing: %s'
                % rename_existing_primary_key_sql)
            cursor.execute(rename_existing_primary_key_sql)
Example #16
0
    def create_primary_key_column_from_another_column(cls, schema, table, primary_key_column, from_column=None):
        """
            Adds the column of the given type to the given table if absent
        :param schema: The database schema name
        :param table: The table name
        :param primary_key_column: Name of primary key column to create. If a primary key already exists it will be
        renamed from this, unless from_column is specified, in which case the existing primary_key will lose its constraint
        """
        full_tablename = '"{schema}"."{table}"'.format(schema=schema, table=table)
        conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default']))
        conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        cursor = conn.cursor()

        existing_primary_key = InformationSchema.get_primary_key_name(schema, table)
        if existing_primary_key:
            logger.info('Found existing primary key %s' % existing_primary_key)

        if not InformationSchema.objects.has_column(schema, table, primary_key_column):
            # If not create a primary key or alter the existing one's name
            # Copy values from the from_column to the new primary_key_column
            if existing_primary_key and not from_column:
                # Rename the primary key to primary_key_column and end
                alter_source_id_sql = 'alter table {full_tablename} rename column {existing_primary_key} to {primary_key_column}'.format(
                    full_tablename=full_tablename, existing_primary_key=existing_primary_key, primary_key_column=primary_key_column)
                logger.info('Existing primary key exists and no from column is specified. Executing: %s' % alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
                return

            if from_column:
                # Create a new primary key column without values
                create_column_sql = 'alter table {full_tablename} add column {primary_key_column} integer'.format(
                    full_tablename=full_tablename, primary_key_column=primary_key_column)
                logger.info('From column is specified to be primary key source. Executing: %s' % create_column_sql)
                cursor.execute(create_column_sql)
                # Copy values from the from_column, always casting to integer
                update_sql = 'update {full_tablename} set {primary_key_column} = cast({from_column} AS integer)'.format(
                    full_tablename=full_tablename, primary_key_column=primary_key_column, from_column=from_column)
                logger.info('Copying values from column to primary key. Executing: %s' % update_sql)
                cursor.execute(update_sql)
            else:
                # Populate with a serial primary key
                alter_source_id_sql = 'alter table {full_tablename} add column {primary_key_column} serial primary key'.format(
                    full_tablename=full_tablename, primary_key_column=primary_key_column)
                logger.info('Copying values from column to primary key. Executing: %s' % alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
            # Drop the original_primary_key column if it exists
            if existing_primary_key:
                alter_source_id_sql = 'alter table {full_tablename} drop column {existing_primary_key}'.format(
                    full_tablename=full_tablename, existing_primary_key=existing_primary_key)
                logger.info('Existing primary key being removed. Executing: %s' % alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
            if from_column:
                # Create the primary key constraint if we haven't yet
                alter_source_id_sql = 'alter table {full_tablename} add constraint {table}_{schema}_{primary_key_column}_pk primary key ({primary_key_column})'.format(
                    full_tablename=full_tablename, table=table, schema=schema, primary_key_column=primary_key_column)
                logger.info('Adding constraint to primary key. Executing: %s' % alter_source_id_sql)
                cursor.execute(alter_source_id_sql)
        elif existing_primary_key != primary_key_column:
            # If there a column matching primary_key_column that isn't the primary key, we need to rename
            # the primary_key_column to something unique and then rename existing_primary_key to primary_key_column

            # Find a unique column to rename primary_key_column (e.g. renamed id to id_1, or id_2, etc)
            unique_column = increment_key(primary_key_column)
            while InformationSchema.objects.has_column(schema, table, unique_column):
                unique_column = increment_key(unique_column)

            # Rename the primary_key_column
            rename_primary_key_column_name_sql = 'alter table {full_tablename} rename column {primary_key_column} to {unique_column}'.format(
                full_tablename=full_tablename, primary_key_column=primary_key_column, unique_column=unique_column)
            logger.info('Existing column with primary key name exists that needs to be renamed: %s' % rename_primary_key_column_name_sql)
            cursor.execute(rename_primary_key_column_name_sql)

            # Rename the existing_primary_key to primary_key_column (e.g. rename to ogc_fid to id for uploaded tables)
            rename_existing_primary_key_sql = 'alter table {full_tablename} rename column {existing_primary_key} to {primary_key_column}'.format(
                    full_tablename=full_tablename, existing_primary_key=existing_primary_key, primary_key_column=primary_key_column)
            logger.info('Existing primary key exists that needs to be renamed to desired primary key column name. Executing: %s' % rename_existing_primary_key_sql)
            cursor.execute(rename_existing_primary_key_sql)