示例#1
0
    def insert_data_from_file(self, filename):
        """Use PostgreSQL's "COPY FROM" statement to perform a bulk insert."""
        self.get_cursor()
        ct = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (([self.table.cleanup.function, self.table.delimiter,
              self.table.header_rows] == [no_cleanup, ",", 1])
            and not self.table.fixed_width
            and not ct
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)):
            columns = self.table.get_insert_columns()
            filename = os.path.abspath(filename)
            statement = """
COPY """ + self.table_name() + " (" + columns + """)
FROM '""" + filename.replace("\\", "\\\\") + """'
WITH DELIMITER ','
CSV HEADER;"""
            try:
                self.execute("BEGIN")
                self.execute(statement)
                self.execute("COMMIT")
            except BaseException:
                self.connection.rollback()
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#2
0
    def insert_raster(self, path=None, srid=4326):
        """Import Raster into Postgis Table
        Uses raster2pgsql -Y -M -d -I -s <SRID> <PATH> <SCHEMA>.<DBTABLE>
        | psql -d <DATABASE>
        The sql processed by raster2pgsql is run
        as psql -U postgres -d <gisdb> -f <elev>.sql
        -Y uses COPY to insert data,
        -M VACUUM table,
        -d  Drops the table, recreates insert raster data
        """

        if not path:
            path = Engine.format_data_dir(self)

        raster_sql = "raster2pgsql -Y -M -d -I -s {SRID} \"{path}\" -F -t 100x100 {SCHEMA_DBTABLE}".format(
            SRID=srid,
            path=os.path.normpath(path),
            SCHEMA_DBTABLE=self.table_name())

        cmd_string = """ | psql -U {USER} -d {DATABASE} --port {PORT} --host {HOST} > {nul_dev} """.format(
            USER=self.opts["user"],
            DATABASE=self.opts["database"],
            PORT=self.opts["port"],
            HOST=self.opts["host"],
            nul_dev=os.devnull
        )

        cmd_stmt = raster_sql + cmd_string
        if self.debug:
            print(cmd_stmt)
        Engine.register_tables(self)
        try:
            subprocess.call(cmd_stmt, shell=True)
        except BaseException as e:
            pass
示例#3
0
    def insert_data_from_file(self, filename):
        """Calls MySQL "LOAD DATA LOCAL INFILE" statement to perform a bulk 
        insert."""
        self.get_cursor()
        ct = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (self.table.cleanup.function == no_cleanup 
            and not self.table.fixed_width 
            and not ct
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)
            ):
            print ("Inserting data from " + os.path.basename(filename) + "...")
            
            columns = self.table.get_insert_columns()
            statement = """        
LOAD DATA LOCAL INFILE '""" + filename.replace("\\", "\\\\") + """'
INTO TABLE """ + self.table_name() + """
FIELDS TERMINATED BY '""" + self.table.delimiter + """'
OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\\n'
IGNORE """ + str(self.table.header_rows) + """ LINES
(""" + columns + ")"
            try:
                self.cursor.execute(statement)
            except Exception as e:
                print "Failed bulk insert (%s), inserting manually" % e
                self.disconnect() # If the execute fails the database connection can get hung up
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#4
0
 def create_db(self):
     """Creates the database"""
     try:
         Engine.create_db(self)
     except:
         self.connection.rollback()
         pass
示例#5
0
    def insert_data_from_file(self, filename):
        """Call MySQL "LOAD DATA LOCAL INFILE" statement to perform a bulk insert."""

        mysql_set_autocommit_off = """SET autocommit=0; SET UNIQUE_CHECKS=0; SET FOREIGN_KEY_CHECKS=0; SET sql_log_bin=0;"""
        mysql_set_autocommit_on = """SET GLOBAL innodb_flush_log_at_trx_commit=1; COMMIT; SET autocommit=1; SET unique_checks=1; SET foreign_key_checks=1;"""

        self.get_cursor()
        ct = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (self.table.cleanup.function == no_cleanup and
                not self.table.fixed_width and
                not ct and
                (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)):

            print("Inserting data from " + os.path.basename(filename) + "...")

            columns = self.table.get_insert_columns()
            statement = """
LOAD DATA LOCAL INFILE '""" + filename.replace("\\", "\\\\") + """'
INTO TABLE """ + self.table_name() + """
FIELDS TERMINATED BY '""" + self.table.delimiter + """'
OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\\n'
IGNORE """ + str(self.table.header_rows) + """ LINES
(""" + columns + ")"
            try:
                self.cursor.execute(mysql_set_autocommit_off)
                self.cursor.execute(statement)

                self.cursor.execute(mysql_set_autocommit_on)
            except Exception as e:
                self.disconnect()  # If the execute fails the database connection can get hung up
                self.cursor.execute(mysql_set_autocommit_on)
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#6
0
    def insert_data_from_file(self, filename):
        """Use executemany to perform a high speed bulk insert

        Checks to see if a given file can be bulk inserted, and if so loads
        it in chunks and inserts those chunks into the database using
        executemany.

        """
        CHUNK_SIZE = 1000000
        self.get_cursor()
        ct = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (([self.table.cleanup.function, self.table.header_rows] == [no_cleanup, 1])
            and not self.table.fixed_width
            and not ct
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)
            ):
            columns = self.table.get_insert_columns()
            filename = os.path.abspath(filename)
            try:
                bulk_insert_statement = self.get_bulk_insert_statement()
                with open(filename, 'r') as data_file:
                    data_chunk = data_file.readlines(CHUNK_SIZE)
                    del(data_chunk[:self.table.header_rows])
                    while data_chunk:
                        data_chunk_split = [row.split(self.table.delimiter)
                                            for row in data_chunk]
                        self.cursor.executemany(bulk_insert_statement, data_chunk_split)
                        data_chunk = data_file.readlines(CHUNK_SIZE)
                self.connection.commit()
            except:
                self.connection.rollback()
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#7
0
    def create_table(self):
        """Create the table by creating an empty csv file"""
        self.auto_column_number = 1
        table_path = os.path.join(self.opts["data_dir"], self.table_name())
        self.file = open_fw(table_path)
        self.output_file = open_csvw(self.file)
        column_list = self.table.get_insert_columns(join=False, create=True)
        self.output_file.writerow([u'{}'.format(val) for val in column_list])
        self.table_names.append((self.file, table_path))

        # Register all tables created to enable
        # testing python files having custom download function
        Engine.register_tables(self)
示例#8
0
 def find_file(self, filename):
     """Checks for the given file and adds it to the list of all files"""
     result = Engine.find_file(self, filename)
     if not hasattr(self, "all_files"):
         self.all_files = set()
     if result:
         self.all_files.add(result)
     return result
示例#9
0
 def format_insert_value(self, value, datatype):
     v = Engine.format_insert_value(self, value, datatype)
     if v == 'null': return ""
     try:
         if len(v) > 1 and v[0] == v[-1] == "'":
             v = '"%s"' % v[1:-1]
     except:
         pass
     return v
示例#10
0
    def insert_vector(self, path=None, srid=4326):
        """Import Vector into Postgis Table

        -- Enable PostGIS (includes raster)
        CREATE EXTENSION postgis;

        -- Enable Topology
        CREATE EXTENSION postgis_topology;

        -- fuzzy matching needed for Tiger
        CREATE EXTENSION fuzzystrmatch;

        -- Enable US Tiger Geocoder
        CREATE EXTENSION postgis_tiger_geocoder;
        Uses shp2pgsql -I -s <SRID> <PATH/TO/SHAPEFILE> <SCHEMA>.<DBTABLE>
        | psql -U postgres -d <DBNAME>>

        The sql processed by shp2pgsql is run
        as  psql -U postgres -d <DBNAME>>
        shp2pgsql -c -D -s 4269 -i -I
         """
        if not path:
            path = Engine.format_data_dir(self)
        vector_sql = "shp2pgsql -d -I -W \"{encd}\"  -s {SRID} \"{path}\" \"{SCHEMA_DBTABLE}\"".format(
            encd=ENCODING,
            SRID=srid,
            path=os.path.normpath(path),
            SCHEMA_DBTABLE=self.table_name())

        cmd_string = """ | psql -U {USER} -d {DATABASE} --port {PORT} --host {HOST} > {nul_dev} """.format(
            USER=self.opts["user"],
            DATABASE=self.opts["database"],
            PORT=self.opts["port"],
            HOST=self.opts["host"],
            nul_dev=os.devnull
        )
        cmd_stmt = vector_sql + cmd_string
        if self.debug:
            print(cmd_stmt)
        Engine.register_tables(self)
        try:
            subprocess.call(cmd_stmt, shell=True)
        except BaseException as e:
            pass
示例#11
0
 def format_insert_value(self, value, datatype):
     if datatype == "bool":
         try:
             if int(value) == 1:
                 return "TRUE"
             elif int(value) == 0:
                 return "FALSE"
         except:
             pass
     return Engine.format_insert_value(self, value, datatype)
示例#12
0
 def format_insert_value(self, value, datatype):
     """Format value for an insert statement."""
     if datatype == "bool":
         try:
             if int(value) == 1:
                 return "TRUE"
             elif int(value) == 0:
                 return "FALSE"
         except BaseException:
             pass
     return Engine.format_insert_value(self, value, datatype)
示例#13
0
 def format_insert_value(self, value, datatype):
     """Formats a value for an insert statement"""
     v = Engine.format_insert_value(self, value, datatype, escape=False, processed=True)
     if v == 'null':
         return ""
     try:
         if len(v) > 1 and v[0] == v[-1] == "'":
             v = '"%s"' % v[1:-1]
     except:
         pass
     return v
示例#14
0
 def format_insert_value(self, value, datatype):
     """Formats a value for an insert statement"""
     v = Engine.format_insert_value(self, value, datatype)
     if v == 'null':
         return ""
     try:
         if len(v) > 1 and v[0] == v[-1] == "'":
             v = '"%s"' % v[1:-1]
     except BaseException:
         pass
     return v
示例#15
0
 def format_insert_value(self, value, datatype):
     """Format value for an insert statement."""
     v = Engine.format_insert_value(self, value, datatype)
     if v == None:
         return ""
     try:
         if len(v) > 1 and v[0] == v[-1] == "'":
             v = '"%s"' % v[1:-1]
     except:
         pass
     return v
示例#16
0
    def create_table(self):
        """Create a table and commit.

        PostgreSQL needs to commit operations individually.
        Enable PostGis extensions if a script has a non tabular table.
        """
        if self.table and self.table.dataset_type and \
                not self.table.dataset_type == "TabularDataset":
            try:
                # Check if Postgis is installed and EXTENSION are Loaded
                self.execute("SELECT PostGIS_full_version();")
            except BaseException as e:
                print(e)
                print("Make sure that you have PostGIS installed\n"
                      "Open Postgres CLI or GUI(PgAdmin) and run:\n"
                      "CREATE EXTENSION postgis;\n"
                      "CREATE EXTENSION postgis_topology;")
                exit()
            return
        Engine.create_table(self)
        self.connection.commit()
示例#17
0
    def create_table(self):
        """Create a table and commit.

        PostgreSQL needs to commit operations individually.
        Enable PostGis extensions if a script has a non tabular table.
        """
        if self.table and self.table.dataset_type and \
                not self.table.dataset_type == "TabularDataset":
            try:
                # Check if Postgis is installed and EXTENSION are Loaded
                if self.execute("SELECT PostGIS_full_version();") or \
                        self.execute("SELECT PostGIS_version()"):
                    pass
            except BaseException:
                print("Make sure that you have PostGIS installed\n"
                      "Open Postgres CLI or GUI(PgAdmin) and run:\n"
                      "CREATE EXTENSION postgis;\n"
                      "CREATE EXTENSION postgis_topology;")
                exit()
            return
        Engine.create_table(self)
        self.connection.commit()
示例#18
0
 def convert_data_type(self, datatype):
     """MS Access can't handle complex Decimal types"""
     converted = Engine.convert_data_type(self, datatype)
     if "NUMERIC" in converted:
         converted = "NUMERIC"
     elif "VARCHAR" in converted:
         try:
             length = int(converted.split('(')[1].split(')')[0].split(',')[0])
             if length > 255:
                 converted = "TEXT"
         except:
             pass
     return converted
示例#19
0
 def convert_data_type(self, datatype):
     """MS Access can't handle complex Decimal types"""
     converted = Engine.convert_data_type(self, datatype)
     if "NUMERIC" in converted:
         converted = "NUMERIC"
     elif "VARCHAR" in converted:
         try:
             length = int(converted.split('(')[1].split(')')[0].split(',')[0])
             if length > 255:
                 converted = "TEXT"
         except:
             pass
     return converted
示例#20
0
    def insert_data_from_file(self, filename):
        """Calls MySQL "LOAD DATA LOCAL INFILE" statement to perform a bulk
        insert."""

        mysql_set_autocommit_off = """SET autocommit=0; SET UNIQUE_CHECKS=0; SET FOREIGN_KEY_CHECKS=0; SET sql_log_bin=0;"""
        mysql_set_autocommit_on = """SET GLOBAL innodb_flush_log_at_trx_commit=1; COMMIT; SET autocommit=1; SET unique_checks=1; SET foreign_key_checks=1;"""

        self.get_cursor()
        ct = len([True
                  for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (self.table.cleanup.function == no_cleanup
                and not self.table.fixed_width and not ct
                and (not hasattr(self.table, "do_not_bulk_insert")
                     or not self.table.do_not_bulk_insert)):

            print("Inserting data from " + os.path.basename(filename) + "...")

            columns = self.table.get_insert_columns()
            statement = """
LOAD DATA LOCAL INFILE '""" + filename.replace("\\", "\\\\") + """'
INTO TABLE """ + self.table_name() + """
FIELDS TERMINATED BY '""" + self.table.delimiter + """'
OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\\n'
IGNORE """ + str(self.table.header_rows) + """ LINES
(""" + columns + ")"
            try:
                self.cursor.execute(mysql_set_autocommit_off)
                self.cursor.execute(statement)

                self.cursor.execute(mysql_set_autocommit_on)
            except Exception as e:
                self.disconnect(
                )  # If the execute fails the database connection can get hung up
                self.cursor.execute(mysql_set_autocommit_on)
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#21
0
    def insert_raster(self, path=None, srid=4326):
        """Import Raster into Postgis Table
        Uses raster2pgsql -Y -M -d -I -s <SRID> <PATH> <SCHEMA>.<DBTABLE>
        | psql -d <DATABASE>
        The sql processed by raster2pgsql is run
        as psql -U postgres -d <gisdb> -f <elev>.sql
        -Y uses COPY to insert data,
        -M VACUUM table,
        -d  Drops the table, recreates insert raster data
        """

        if not path:
            path = Engine.format_data_dir(self)

        raster_sql = ('raster2pgsql -Y -M -d -I -s {SRID} "{path}"'
                      " -F -t 100x100 {SCHEMA_DBTABLE}".format(
                          SRID=srid,
                          path=os.path.normpath(path),
                          SCHEMA_DBTABLE=self.table_name()))

        cmd_string = (" | psql -U {USER} -d {DATABASE} "
                      "--port {PORT} --host {HOST} > {nul_dev} ".format(
                          USER=self.opts["user"],
                          DATABASE=self.opts["database"],
                          PORT=self.opts["port"],
                          HOST=self.opts["host"],
                          nul_dev=os.devnull,
                      ))

        cmd_stmt = raster_sql + cmd_string
        if self.debug:
            print(cmd_stmt)
        Engine.register_tables(self)
        try:
            subprocess.call(cmd_stmt, shell=True)
        except BaseException:
            pass
示例#22
0
    def insert_data_from_file(self, filename):
        """Use executemany to perform a high speed bulk insert

        Checks to see if a given file can be bulk inserted, and if so loads
        it in chunks and inserts those chunks into the database using
        executemany.

        """
        CHUNK_SIZE = 1000000
        self.get_cursor()
        ct = len([True
                  for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (([self.table.cleanup.function, self.table.header_rows]
             == [no_cleanup, 1]) and not self.table.fixed_width and not ct
                and (not hasattr(self.table, "do_not_bulk_insert")
                     or not self.table.do_not_bulk_insert)):
            columns = self.table.get_insert_columns()
            filename = os.path.abspath(filename)
            try:
                bulk_insert_statement = self.get_bulk_insert_statement()
                with open(filename, 'r') as data_file:
                    data_chunk = data_file.readlines(CHUNK_SIZE)
                    del (data_chunk[:self.table.header_rows])
                    while data_chunk:
                        data_chunk_split = [
                            row.split(self.table.delimiter)
                            for row in data_chunk
                        ]
                        self.cursor.executemany(bulk_insert_statement,
                                                data_chunk_split)
                        data_chunk = data_file.readlines(CHUNK_SIZE)
                self.connection.commit()
            except:
                self.connection.rollback()
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#23
0
    def insert_data_from_file(self, filename):
        """Perform a high speed bulk insert

        Checks to see if a given file can be bulk inserted, and if so loads
        it in chunks and inserts those chunks into the database using
        executemany.
        """
        chunk_size = 1000000
        self.get_cursor()

        # Determine if the dataset includes cross-tab data
        crosstab = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0

        if (([self.table.cleanup.function, self.table.header_rows] == [no_cleanup, 1])
            and not self.table.fixed_width
            and not crosstab
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)):
            filename = os.path.abspath(filename)
            try:
                bulk_insert_statement = self.get_bulk_insert_statement()
                line_endings = set(['\n', '\r', '\r\n'])
                with open(filename, 'r') as data_file:
                    data_chunk = data_file.readlines(chunk_size)
                    data_chunk = [line.rstrip('\r\n') for line in data_chunk if line not in line_endings]
                    del data_chunk[:self.table.header_rows]
                    while data_chunk:
                        data_chunk_split = [row.split(self.table.delimiter)
                                            for row in data_chunk]
                        self.cursor.executemany(bulk_insert_statement, data_chunk_split)
                        data_chunk = data_file.readlines(chunk_size)
                self.connection.commit()
            except:
                self.connection.rollback()
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#24
0
    def insert_data_from_file(self, filename):
        """Use PostgreSQL's "COPY FROM" statement to perform a bulk insert."""
        self.get_cursor()
        ct = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if (([self.table.cleanup.function, self.table.delimiter,
             self.table.header_rows] == [no_cleanup, ",", 1])
            and not self.table.fixed_width
            and not ct
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)
            ):
            columns = self.table.get_insert_columns()
            filename = os.path.abspath(filename)
            statement = """
COPY """ + self.table_name() + " (" + columns + """)
FROM '""" + filename.replace("\\", "\\\\") + """'
WITH DELIMITER ','
CSV HEADER"""
            try:
                self.execute(statement)
            except:
                self.connection.rollback()
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#25
0
    def auto_create_table(self, table, url=None, filename=None, pk=None):
        """Create a table automatically.

        Overwrites the main Engine class. Identifies the type of table to create.
        For a Raster or vector (Gis) dataset, create the table from the contents
        downloaded from the url or from the contents in the filename.
        Otherwise, use the Engine function for a tabular table.
        """
        if table.dataset_type in ["RasterDataset", "VectorDataset"]:
            self.table = table
            if url and not filename:
                filename = Engine.filename_from_url(url)

            if url and not self.find_file(filename):
                # If the file doesn't exist, download it
                self.download_file(url, filename)

            file_path = self.find_file(filename)
            if file_path:
                filename, _ = os.path.splitext(os.path.basename(file_path))

                self.create_table()
        else:
            Engine.auto_create_table(self, table, url, filename, pk)
示例#26
0
    def insert_data_from_file(self, filename):
        """Perform a high speed bulk insert

        Checks to see if a given file can be bulk inserted, and if so loads
        it in chunks and inserts those chunks into the database using
        executemany.
        """
        chunk_size = 1000000
        self.get_cursor()

        # Determine if the dataset includes cross-tab data
        crosstab = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0

        if (([self.table.cleanup.function, self.table.header_rows] == [no_cleanup, 1])
            and not self.table.fixed_width
            and not crosstab
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)):
            filename = os.path.abspath(filename)
            try:
                bulk_insert_statement = self.get_bulk_insert_statement()
                line_endings = set(['\n', '\r', '\r\n'])
                with open(filename, 'r') as data_file:
                    data_chunk = data_file.readlines(chunk_size)
                    data_chunk = [line.rstrip('\r\n') for line in data_chunk if line not in line_endings]
                    del data_chunk[:self.table.header_rows]
                    while data_chunk:
                        data_chunk_split = [row.split(self.table.delimiter)
                                            for row in data_chunk]
                        self.cursor.executemany(bulk_insert_statement, data_chunk_split)
                        data_chunk = data_file.readlines(chunk_size)
                self.connection.commit()
            except:
                self.connection.rollback()
                return Engine.insert_data_from_file(self, filename)
        else:
            return Engine.insert_data_from_file(self, filename)
示例#27
0
 def format_insert_value(self, value, datatype):
     """Formats a value for an insert statement"""
     v = Engine.format_insert_value(self,
                                    value,
                                    datatype,
                                    escape=False,
                                    processed=True)
     if v == 'null':
         return ""
     try:
         if len(v) > 1 and v[0] == v[-1] == "'":
             v = '"%s"' % v[1:-1]
     except:
         pass
     return v
示例#28
0
    def auto_create_table(self, table, url=None, filename=None, pk=None):
        """Create a table automatically.

        Overwrites the main Engine class. Identifies the type of table to create.
        For a Raster or vector (Gis) dataset, create the table from the contents
        downloaded from the url or from the contents in the filename.
        Otherwise, use the Engine function for a tabular table.
        """
        if table.dataset_type in ["RasterDataset", "VectorDataset"]:
            self.table = table
            if url and not filename:
                filename = Engine.filename_from_url(url)

            if url and not self.find_file(filename):
                # If the file doesn't exist, download it
                self.download_file(url, filename)

            file_path = self.find_file(filename)
            if file_path:
                filename, _ = os.path.splitext(os.path.basename(file_path))

                self.create_table()
        else:
            Engine.auto_create_table(self, table, url, filename, pk)
示例#29
0
    def insert_raster(self, path=None, srid=4326):
        """Import Raster into Postgis Table
        Uses raster2pgsql -I -C -s <SRID> <PATH> <SCHEMA>.<DBTABLE>
        | psql -d <DATABASE>
        The sql processed by raster2pgsql is run
        as psql -U postgres -d <gisdb> -f <elev>.sql
        """

        if not path:
            path = Engine.format_data_dir(self)

        raster_sql = "raster2pgsql -M -d -I -s {SRID} {path} -F -t 100x100 {SCHEMA_DBTABLE}".format(
            SRID=srid, path=path, SCHEMA_DBTABLE=self.table_name())

        cmd_string = """ | psql -U {USER} -d {DATABASE}""".format(
            USER=self.opts["user"], DATABASE=self.opts["database"])

        os.system(raster_sql + cmd_string)
示例#30
0
    def insert_vector(self, path=None, srid=4326):
        """Import Vector into Postgis Table

        -- Enable PostGIS (includes raster)
        CREATE EXTENSION postgis;

        -- Enable Topology
        CREATE EXTENSION postgis_topology;

        -- fuzzy matching needed for Tiger
        CREATE EXTENSION fuzzystrmatch;

        -- Enable US Tiger Geocoder
        CREATE EXTENSION postgis_tiger_geocoder;
        Uses shp2pgsql -I -s <SRID> <PATH/TO/SHAPEFILE> <SCHEMA>.<DBTABLE>
        | psql -U postgres -d <DBNAME>>

        The sql processed by shp2pgsql is run
        as  psql -U postgres -d <DBNAME>>
         """
        if not path:
            path = Engine.format_data_dir(self)
        vector_sql = "shp2pgsql -d -I -s {SRID} \"{path}\ {SCHEMA_DBTABLE}".format(
            SRID=srid,
            path=os.path.normpath(path),
            SCHEMA_DBTABLE=self.table_name())

        cmd_string = """ | psql -U {USER} -d {DATABASE} --port {PORT} --host {HOST}""".format(
            USER=self.opts["user"],
            DATABASE=self.opts["database"],
            PORT=self.opts["port"],
            HOST=self.opts["host"])
        cmd_stmt = vector_sql + cmd_string
        if self.debug:
            print(cmd_stmt)
        subprocess.call(cmd_stmt, shell=True, stdout=subprocess.PIPE)
示例#31
0
 def insert_statement(self, values):
     statement = Engine.insert_statement(self, values)
     if isinstance(statement, basestring):
         statement = statement.decode("utf-8", "ignore")
     return statement
示例#32
0
 def find_file(self, filename):
     result = Engine.find_file(self, filename)
     if not hasattr(self, "all_files"): self.all_files = set()
     if result: self.all_files.add(result)
     return result
示例#33
0
 def set_engine_encoding(self):
     """Set MySQL database encoding to match data encoding"""
     db_encoding = self.lookup_encoding()
     self.execute("SET NAMES '{0}';".format(db_encoding))
     Engine.set_engine_encoding(self)
示例#34
0
 def drop_statement(self, objecttype, objectname):
     """In PostgreSQL, the equivalent of a SQL database is a schema."""
     statement = Engine.drop_statement(self, objecttype, objectname)
     statement += " CASCADE;"
     return statement.replace(" DATABASE ", " SCHEMA ")
示例#35
0
 def create_db_statement(self):
     """In PostgreSQL, the equivalent of a SQL database is a schema."""
     return Engine.create_db_statement(self).replace("DATABASE", "SCHEMA")
示例#36
0
    def insert_data_from_file(self, filename):
        """Perform a bulk insert."""
        self.get_cursor()
        if self.check_bulk_insert() and self.table.header_rows < 2 and (
                self.table.delimiter in ["\t", ","]):
            print("Inserting data from " + os.path.basename(filename) + "...")

            if self.table.delimiter == "\t":
                fmt = "TabDelimited"
            elif self.table.delimiter == ",":
                fmt = "CSVDelimited"

            if self.table.header_rows == 1:
                hdr = "Yes"
            else:
                hdr = "No"

            columns = self.table.get_insert_columns()

            need_to_delete = False
            add_to_record_id = 0

            if self.table.pk and not self.table.contains_pk:
                if '.' in os.path.basename(filename):
                    proper_name = filename.split('.')
                    len_name = len(proper_name)
                    newfilename = '.'.join(
                        proper_name[0:-1] if len_name > 0 else proper_name[0]
                    ) + "_new." + filename.split(".")[-1]
                else:
                    newfilename = filename + "_new"

                if not os.path.isfile(newfilename):
                    print("Adding index to " + os.path.abspath(newfilename) +
                          "...")
                    read = open(filename, "rb")
                    write = open(newfilename, "wb")
                    to_write = ""

                    for line in read:
                        line = line.strip()
                        to_write += str(id) + self.table.delimiter + line
                        add_to_record_id += 1
                    self.table.record_id += add_to_record_id

                    write.write(to_write + os.linesep)
                    write.close()
                    read.close()
                    need_to_delete = True
                columns = "record_id, " + columns
            else:
                newfilename = filename

            newfilename = os.path.abspath(newfilename)
            filename_length = (len(os.path.basename(newfilename)) * -1) - 1
            filepath = newfilename[:filename_length]
            statement = """
INSERT INTO """ + self.table_name() + " (" + columns + """)
SELECT * FROM [""" + os.path.basename(newfilename) + ''']
IN "''' + filepath + '''" "Text;FMT=''' + fmt + ''';HDR=''' + hdr + ''';"'''
            try:
                self.execute(statement)
                return True
            except BaseException:
                print("Couldn't bulk insert. Trying manual insert.")
                self.connection.rollback()
                self.table.record_id -= add_to_record_id
                return None
            finally:
                if need_to_delete:
                    os.remove(newfilename)

        return Engine.insert_data_from_file(self, filename)
示例#37
0
 def create_db(self):
     """Create Engine database."""
     try:
         Engine.create_db(self)
     except BaseException:
         self.connection.rollback()
示例#38
0
 def to_csv(self):
     self.connection.text_factory = str
     Engine.to_csv(self)
示例#39
0
 def create_table(self):
     """PostgreSQL needs to commit operations individually."""
     Engine.create_table(self)
     self.connection.commit()
示例#40
0
 def insert_statement(self, values):
     statement = Engine.insert_statement(self, values)
     if isinstance(statement, basestring):
         statement = statement.decode("utf-8", "ignore")
     return statement
示例#41
0
 def create_db(self):
     try:
         Engine.create_db(self)
     except:
         self.connection.rollback()
         pass
示例#42
0
 def find_file(self, filename):
     result = Engine.find_file(self, filename)
     if not hasattr(self, "all_files"): self.all_files = set()
     if result: self.all_files.add(result)
     return result
示例#43
0
    def Draw(self, evt):
        """When the page is drawn, it may need to update its fields if
        the selected database has changed."""

        if not evt.GetDirection():
            btn = self.parent.FindWindowById(wx.ID_FORWARD)
            if btn: btn.Enable()

        if len(self.Parent.CHOOSEDB.dblist.GetStringSelection()
               ) == 0 and evt.Direction:
            evt.Veto()
        else:
            if self.sel != self.Parent.CHOOSEDB.dblist.GetStringSelection():
                self.sel = self.Parent.CHOOSEDB.dblist.GetStringSelection()
                self.engine = Engine()
                for db in self.Parent.engine_list:
                    if db.name == self.sel:
                        self.engine = db
                self.fields.Clear(True)
                self.fields = wx.BoxSizer(wx.VERTICAL)
                if self.engine.instructions:
                    self.fields.Add(
                        StaticText(self, -1,
                                   '\n' + self.engine.instructions + '\n\n'))
                self.fieldset = dict()
                self.option = dict()
                saved_opts = get_saved_connection(self.engine.name)
                for opt in self.engine.required_opts:
                    if opt[0] in saved_opts.keys():
                        default = saved_opts[opt[0]]
                    else:
                        default = opt[2]
                    self.fieldset[opt[0]] = wx.BoxSizer(wx.HORIZONTAL)
                    label = StaticText(self,
                                       -1,
                                       opt[0] + ": ",
                                       size=wx.Size(90, 35))
                    style = wx.TE_PASSWORD if opt[0] == "password" else 0
                    txt = TextCtrl(self,
                                   -1,
                                   str(default),
                                   size=wx.Size(200, -1),
                                   style=style)
                    self.option[opt[0]] = txt
                    self.fieldset[opt[0]].AddMany([label, self.option[opt[0]]])
                    if opt[0] == "file":
                        file_opt = opt

                        def open_file_dialog(evt):
                            filter = ""
                            if file_opt[3]:
                                filter = file_opt[3] + "|"
                            filter += "All files (*.*)|*.*"
                            dialog = wx.FileDialog(None,
                                                   style=wx.OPEN,
                                                   wildcard=filter)
                            if dialog.ShowModal() == wx.ID_OK:
                                self.option[file_opt[0]].SetValue(
                                    dialog.GetPath())

                        self.browse = wx.Button(self, -1, "Choose...")
                        self.fieldset[file_opt[0]].Add(self.browse)
                        self.browse.Bind(wx.EVT_BUTTON, open_file_dialog)
                    self.fieldset[opt[0]].Layout()
                    self.fields.Add(self.fieldset[opt[0]])
                self.sizer.Add(self.fields)
                self.sizer.Layout()
示例#44
0
 def create_db(self):
     try:
         Engine.create_db(self)
     except:
         self.connection.rollback()
         pass
示例#45
0
    def insert_raster(self, path=None, srid=4326):
        """Import Raster into Postgis Table
        Uses raster2pgsql -Y -M -d -I -s <SRID> <PATH> <SCHEMA>.<DBTABLE>
        | psql -d <DATABASE>
        The sql processed by raster2pgsql is run
        as psql -U postgres -d <gisdb> -f <elev>.sql
        -Y uses COPY to insert data,
        -M VACUUM table,
        -d  Drops the table, recreates insert raster data
        """

        if not path:
            path = Engine.format_data_dir(self)
        is_cli_extent = hasattr(self, 'opts') and self.opts["bbox"]
        is_resource_extent = hasattr(self.script.tables[self.table.name],
                                     'extent')
        is_global_script_extent = hasattr(self.script, 'extent')
        if any([is_cli_extent, is_resource_extent, is_global_script_extent]):
            # bounding box array: bbox[xmin, ymax, xmax, ymin]
            bbox = []

            if self.opts["bbox"]:
                bbox.append(self.opts["bbox"][0])
                bbox.append(self.opts["bbox"][1])
                bbox.append(self.opts["bbox"][2])
                bbox.append(self.opts["bbox"][3])

            else:
                if is_global_script_extent and self.script.extent:
                    bbox.append(self.script.extent["xMin"])
                    bbox.append(self.script.extent["yMax"])
                    bbox.append(self.script.extent["xMax"])
                    bbox.append(self.script.extent["yMin"])

                else:
                    if is_resource_extent and self.script.tables[
                            self.table.name].extent:
                        bbox.append(
                            self.script.tables[self.table.name].extent["xMin"])
                        bbox.append(
                            self.script.tables[self.table.name].extent["yMax"])
                        bbox.append(
                            self.script.tables[self.table.name].extent["xMax"])
                        bbox.append(
                            self.script.tables[self.table.name].extent["yMin"])

            bbox = [int(i) for i in bbox]

            if gdal and bbox and bbox[2] > bbox[0] and bbox[1] > bbox[3]:
                if self.script.tables[self.table.name].extensions:
                    converted_tif = path[:-3] + "tif"
                    if self.script.tables[
                            self.table.name].extensions[0] == "bil":
                        conversion = "gdal_translate -co 'COMPRESS=LZW' {path} {converted_tif}".format(
                            path=os.path.normpath(path),
                            converted_tif=converted_tif)
                        os.system(conversion)

                    ds = gdal.Open(converted_tif)

                    info = gdal.Info(converted_tif, format='json')
                    coordinates = info['wgs84Extent']['coordinates'][0]

                    if bbox[0] < coordinates[2][0] and bbox[2] > coordinates[
                            0][0] and bbox[1] > coordinates[1][1] and bbox[
                                3] < coordinates[0][1]:

                        if bbox[0] < coordinates[0][0]:
                            bbox[0] = coordinates[0][0]

                        if bbox[1] > coordinates[0][1]:
                            bbox[1] = coordinates[0][1]

                        if bbox[2] > coordinates[2][0]:
                            bbox[2] = coordinates[2][0]

                        if bbox[3] < coordinates[2][1]:
                            bbox[3] = coordinates[2][1]

                        i = converted_tif.find(".tif")
                        location_of_cropped_tif = converted_tif[:i] + \
                            "crop" + converted_tif[i:]
                        ds = gdal.Translate(location_of_cropped_tif,
                                            ds,
                                            projWin=bbox)
                        ds = None
                        path = location_of_cropped_tif
                    else:
                        print("Bounding Box exceds image boundaries")
                elif bbox:
                    print(
                        "Invalid value of Extent, bbox[xmin, ymax, xmax, ymin]"
                    )
        raster_sql = ('raster2pgsql -Y -M -d -I -l 2 -s {SRID} "{path}"'
                      " -F -t 100x100 {SCHEMA_DBTABLE}".format(
                          SRID=srid,
                          path=os.path.normpath(path),
                          SCHEMA_DBTABLE=self.table_name()))

        cmd_string = (" | psql -U {USER} -d {DATABASE} "
                      "--port {PORT} --host {HOST} > {nul_dev} ".format(
                          USER=self.opts["user"],
                          DATABASE=self.opts["database"],
                          PORT=self.opts["port"],
                          HOST=self.opts["host"],
                          nul_dev=os.devnull,
                      ))

        cmd_stmt = raster_sql + cmd_string
        if self.debug:
            print(cmd_stmt)
        Engine.register_tables(self)
        try:
            subprocess.call(cmd_stmt, shell=True)
        except BaseException:
            pass
示例#46
0
 def create_db_statement(self):
     """In PostgreSQL, the equivalent of a SQL database is a schema."""
     return Engine.create_db_statement(self).replace("DATABASE", "SCHEMA")
示例#47
0
 def drop_statement(self, objecttype, objectname):
     """In PostgreSQL, the equivalent of a SQL database is a schema."""
     statement = Engine.drop_statement(self, objecttype, objectname)
     statement += " CASCADE;"
     return statement.replace(" DATABASE ", " SCHEMA ")
示例#48
0
 def to_csv(self):
     self.connection.text_factory = str
     Engine.to_csv(self)
示例#49
0
 def to_csv(self):
     Engine.to_csv(self)
示例#50
0
 def create_db(self):
     """Create Engine database."""
     try:
         Engine.create_db(self)
     except BaseException:
         self.connection.rollback()
示例#51
0
 def insert_statement(self, values):
     """Return SQL statement to insert a set of values."""
     statement = Engine.insert_statement(self, values)
     if isinstance(statement, bytes):
         statement = statement.decode("utf-8", "ignore")
     return statement
示例#52
0
 def create_table(self):
     """PostgreSQL needs to commit operations individually."""
     Engine.create_table(self)
     self.connection.commit()
示例#53
0
 def to_csv(self):
     Engine.to_csv(self)
示例#54
0
 def insert_statement(self, values):
     """Return SQL statement to insert a set of values."""
     statement = Engine.insert_statement(self, values)
     if isinstance(statement, bytes):
         statement = statement.decode("utf-8", "ignore")
     return statement
示例#55
0
    def insert_data_from_file(self, filename):
        """Perform a bulk insert."""
        self.get_cursor()
        ct = len([True for c in self.table.columns if c[1][0][:3] == "ct-"]) != 0
        if ((self.table.cleanup.function == no_cleanup and not self.table.fixed_width and
                     self.table.header_rows < 2)
            and (self.table.delimiter in ["\t", ","])
            and not ct
            and (not hasattr(self.table, "do_not_bulk_insert") or not self.table.do_not_bulk_insert)
            ):
            print("Inserting data from " + os.path.basename(filename) + "...")

            if self.table.delimiter == "\t":
                fmt = "TabDelimited"
            elif self.table.delimiter == ",":
                fmt = "CSVDelimited"

            if self.table.header_rows == 1:
                hdr = "Yes"
            else:
                hdr = "No"

            columns = self.table.get_insert_columns()

            need_to_delete = False
            add_to_record_id = 0

            if self.table.pk and not self.table.contains_pk:
                if '.' in os.path.basename(filename):
                    proper_name = filename.split('.')
                    newfilename = '.'.join((proper_name[0:-1]) if len(proper_name) > 0 else proper_name[0]
                                           ) + "_new." + filename.split(".")[-1]
                else:
                    newfilename = filename + "_new"

                if not os.path.isfile(newfilename):
                    print("Adding index to " + os.path.abspath(newfilename) + "...")
                    read = open(filename, "rb")
                    write = open(newfilename, "wb")
                    to_write = ""

                    for line in read:
                        to_write += str(id) + self.table.delimiter + line.replace("\n", "\r\n")
                        add_to_record_id += 1
                    self.table.record_id += add_to_record_id

                    write.write(to_write)
                    write.close()
                    read.close()
                    need_to_delete = True
                columns = "record_id, " + columns
            else:
                newfilename = filename

            newfilename = os.path.abspath(newfilename)
            filename_length = (len(os.path.basename(newfilename)) * -1) - 1
            filepath = newfilename[:filename_length]
            statement = """
INSERT INTO """ + self.table_name() + " (" + columns + """)
SELECT * FROM [""" + os.path.basename(newfilename) + ''']
IN "''' + filepath + '''" "Text;FMT=''' + fmt + ''';HDR=''' + hdr + ''';"'''

            try:
                self.execute(statement)
            except:
                print("Couldn't bulk insert. Trying manual insert.")
                self.connection.rollback()

                self.table.record_id -= add_to_record_id

                return Engine.insert_data_from_file(self, filename)

            if need_to_delete:
                os.remove(newfilename)

        else:
            return Engine.insert_data_from_file(self, filename)