def execute_sql_string_with_logs(self, sql_string, append_to_logs=ADD_LOGS_FLAG): if append_to_logs: self._add_query_to_log_file(sql_string) dbl().execute_sql_string(sql_string) if append_to_logs: self._add_results_to_log_file(None)
def csv_read_file_to_database(self, table_name, csv_file, encoding='UTF-8', encoding_errors='strict'): """ Use only for temporary use connect to database in memory if not connected, create table with name and import data from csv file""" if dbl()._dbconnection is None: dbl().connect_to_database_using_custom_params("sqlite3", "':memory:'") array_table = ttmkl().csv_read_file(csv_file, encoding, encoding_errors) self.insert_data_to_table(table_name, array_table) return table_name
def connect_to_database_using_jdbc_driver(self, jdbc_connection_string, user, password, jdbc_driver, jdbc_jar_path): """ Example parameters for CIS databases Cisto Integration Services, using java drivers is done using library jdbc_connection_string: jdbc:compositesw:dbapi@localhost:9401?domain=composite&dataSource=Example jdbc_driver: cs.jdbc.driver.CompositeDriver jdbc_jar_path: Binaries/csjdbc.jar """ dbl().connect_to_database_using_custom_params("jaydebeapi", "'%s',['%s', '%s', '%s'],'%s'" % (jdbc_driver, jdbc_connection_string, user, password, jdbc_jar_path))
def csv_read_file_to_database( self, table_name, csv_file, encoding="UTF-8", encoding_errors="strict" ): """Use only for temporary use connect to database in memory if not connected, create table with name and import data from csv file""" if dbl()._dbconnection is None: dbl().connect_to_database_using_custom_params("sqlite3", "':memory:'") array_table = ttmkl().csv_read_file(csv_file, encoding, encoding_errors) self.insert_data_to_table(table_name, array_table) return table_name
def connect_to_database_using_jdbc_driver(self, jdbc_connection_string, user, password, jdbc_driver, jdbc_jar_path): """ Example parameters for CIS databases Cisto Integration Services, using java drivers is done using library jdbc_connection_string: jdbc:compositesw:dbapi@localhost:9401?domain=composite&dataSource=Example jdbc_driver: cs.jdbc.driver.CompositeDriver jdbc_jar_path: Binaries/csjdbc.jar """ dbl().connect_to_database_using_custom_params( "jaydebeapi", "'%s',['%s', '%s', '%s'],'%s'" % (jdbc_driver, jdbc_connection_string, user, password, jdbc_jar_path))
def execute_sql_string_with_logs(self, sql_string, append_to_logs=ADD_LOGS_FLAG): """ To switch output file with logs use | Set Sql Log Output File | ./myFile.sql | :param sql_string: :param append_to_logs: :return: """ if append_to_logs: self._add_query_to_log_file(sql_string) dbl().execute_sql_string(sql_string) if append_to_logs: self._add_results_to_log_file(None)
def insert_data_to_table(self, table_name, data): """ return table name, table will have columns with names 'c0', 'c1' for all columns table will be also reindex after insert """ cur = dbl()._dbconnection.cursor() size = len(data) if size < 1: raise AssertionError("missing data 0 rows") row_size = len(data[0]) if row_size < 1: raise AssertionError("missing data 0 columns") columns_desc = "" for index in range(len(data[0])): columns_desc += """"c""" + str(index) + """" VARCHAR""" if index < (row_size - 1): columns_desc += "," create_sql = 'CREATE TABLE "%s" (%s)' % (table_name, columns_desc) logger.info(create_sql) cur.execute(create_sql) columns_names = "" values = "" for index in range(len(data[0])): columns_names += "c" + str(index) values += """?""" if index < (row_size - 1): columns_names += ", " values += ", " insert_data_sql = 'INSERT INTO "main"."%s" (%s) VALUES (%s) ' % ( table_name, columns_names, values, ) logger.info(insert_data_sql) cur.executemany(insert_data_sql, data) reindex_sql = 'REINDEX "main"."%s"' % table_name cur.execute(reindex_sql) dbl()._dbconnection.commit() return
def query_many_rows(self, select_statement, append_to_logs=ADD_LOGS_FLAG): if append_to_logs: self._add_query_to_log_file(select_statement) results = dbl().query(select_statement) if append_to_logs: self._add_results_to_log_file(results) return results
def insert_data_to_table(self, table_name, data): """ return table name, table will have columns with names 'c0', 'c1' for all columns table will be also reindex after insert """ cur = dbl()._dbconnection.cursor() size = len(data) if size < 1: raise AssertionError("missing data 0 rows") row_size = len(data[0]) if row_size < 1: raise AssertionError("missing data 0 columns") columns_desc = "" for index in range(len(data[0])): columns_desc += """"c""" + str(index) + """" VARCHAR""" if index < (row_size - 1): columns_desc += "," create_sql = "CREATE TABLE \"%s\" (%s)" % (table_name, columns_desc) logger.info(create_sql) cur.execute(create_sql) columns_names = "" values = "" for index in range(len(data[0])): columns_names += "c" + str(index) values += """?""" if index < (row_size - 1): columns_names += ", " values += ", " insert_data_sql = "INSERT INTO \"main\".\"%s\" (%s) VALUES (%s) " % (table_name, columns_names, values) logger.info(insert_data_sql) cur.executemany(insert_data_sql, data) reindex_sql = "REINDEX \"main\".\"%s\"" % table_name cur.execute(reindex_sql) dbl()._dbconnection.commit() return
def query_many_rows(self, select_statement, append_to_logs=ADD_LOGS_FLAG): """ To switch output file with logs use | Set Sql Log Output File | ./myFile.sql | :param select_statement: :param append_to_logs: :return: """ if append_to_logs: self._add_query_to_log_file(select_statement) results = dbl().query(select_statement) if append_to_logs: self._add_results_to_log_file(results) return results