def _create_cache_table(self): if not self.is_postgre_sql: sql_statement = "CREATE TABLE %s.%s ( " \ "ENTITY_TYPE_ID BIGINT NOT NULL, " \ "PARQUET_NAME VARCHAR(2048) NOT NULL, " \ "PARQUET_FILE BLOB(2G), " \ "UPDATED_TS TIMESTAMP NOT NULL DEFAULT CURRENT TIMESTAMP, " \ "CONSTRAINT %s UNIQUE(ENTITY_TYPE_ID, PARQUET_NAME) ENFORCED ) " \ "ORGANIZE BY ROW" % (self.quoted_schema, self.quoted_cache_tablename, dbhelper.quotingTableName('uc_%s' % self.cache_tablename, self.is_postgre_sql)) try: stmt = ibm_db.exec_immediate(self.db_connection, sql_statement) ibm_db.free_result(stmt) except Exception as ex: raise Exception('Execution of sql statement "%s" failed.' % sql_statement) from ex else: sql_statement = "CREATE TABLE %s.%s ( " \ "entity_type_id BIGINT NOT NULL, " \ "parquet_name VARCHAR(2048) NOT NULL, " \ "parquet_file BYTEA, " \ "updated_ts TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " \ "CONSTRAINT %s UNIQUE(entity_type_id, parquet_name))" % ( self.quoted_schema, self.quoted_cache_tablename, dbhelper.quotingTableName('uc_%s' % self.cache_tablename, self.is_postgre_sql)) try: dbhelper.execute_postgre_sql_query(self.db_connection, sql_statement) except Exception as ex: raise Exception('Execution of sql statement "%s" failed.' % sql_statement) from ex logger.info('Table %s.%s has been created.' % (self.quoted_schema, self.quoted_cache_tablename))
def run_test_018(self): conn = ibm_db.connect(config.database, config.user, config.password) ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_ON) if conn: stmt = ibm_db.prepare(conn, "SELECT * from animals WHERE weight < 10.0" ) ibm_db.set_option(stmt, {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_ON}, 2) result = ibm_db.execute(stmt) if result: rows = ibm_db.num_rows(stmt) print "affected row:", rows ibm_db.free_result(stmt) else: print ibm_db.stmt_errormsg() ibm_db.set_option(stmt, {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_OFF}, 2) result = ibm_db.execute(stmt) if result: rows = ibm_db.num_rows(stmt) print "affected row:", rows ibm_db.free_result(stmt) else: print ibm_db.stmt_errormsg() ibm_db.set_option(stmt, {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_ON}, 2) result = ibm_db.execute(stmt) if result: rows = ibm_db.num_rows(stmt) print "affected row:", rows else: print ibm_db.stmt_errormsg() ibm_db.close(conn) else: print "no connection:", ibm_db.conn_errormsg()
def _cache_table_exists(self): exists = False try: if not self.is_postgre_sql: stmt = ibm_db.tables(self.db_connection, None, self.schema, self.cache_tablename, None) try: fetch_value = ibm_db.fetch_row(stmt, 0) if fetch_value: exists = True finally: ibm_db.free_result(stmt) else: exists = dbhelper.check_table_exist(self.db_connection, self.db_type, self.schema, self.cache_tablename) except Exception as ex: raise Exception( 'Error while probing for table %s.%s' % (self.quoted_schema, self.quoted_cache_tablename)) from ex logger.debug('Table %s.%s %s.' % (self.quoted_schema, self.quoted_cache_tablename, 'exists' if exists else 'does not exist')) return exists
def delete_model(self, model_name): if not self.is_postgre_sql: sql_statement = "DELETE FROM %s.%s where ENTITY_TYPE_ID = ? and MODEL_NAME = ?" % ( self.quoted_schema, self.quoted_store_tablename) try: stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.bind_param(stmt, 2, model_name) ibm_db.execute(stmt) finally: ibm_db.free_result(stmt) except Exception as ex: raise Exception( 'Deletion of model %s failed with sql statement "%s"' % (model_name, sql_statement)) from ex else: sql_statement = "DELETE FROM %s.%s" % (self.quoted_schema, self.quoted_store_tablename) sql_statement += ' where entity_type_id = %s and model_name = %s' try: dbhelper.execute_postgre_sql_query(self.db_connection, sql_statement, (self.entity_type_id, model_name)) except Exception as ex: raise Exception( 'Deletion of model %s failed with sql statement "%s"' % (model_name, sql_statement)) from ex logger.info('Model %s has been deleted from table %s.%s' % ( model_name, self.quoted_schema, self.quoted_store_tablename))
def run_test_setgetOption(self): if sys.platform == 'zos': options = {} else: options = {ibm_db.SQL_ATTR_INFO_PROGRAMNAME: 'TestProgram'} conn = ibm_db.connect(config.database, config.user, config.password, options) # Get the server type serverinfo = ibm_db.server_info(conn) if conn: if sys.platform != 'zos': value = ibm_db.get_option(conn, ibm_db.SQL_ATTR_INFO_PROGRAMNAME, 1) print("Connection options:\nSQL_ATTR_INFO_PROGRAMNAME = ", end="") print(value) else: print("Connection options:\n", end="") returncode = ibm_db.set_option(conn, {ibm_db.SQL_ATTR_AUTOCOMMIT: 0}, 1) value = ibm_db.get_option(conn, ibm_db.SQL_ATTR_AUTOCOMMIT, 1) print("SQL_ATTR_AUTOCOMMIT = ", end="") print(str(value) + "\n") drop = "DROP TABLE TEMP_TEST" try: result = ibm_db.exec_immediate(conn, drop) except: pass # Create the table temp_test create = "CREATE TABLE TEMP_TEST (id INTEGER, name CHAR(16))" result = ibm_db.exec_immediate(conn, create) insert = "INSERT INTO temp_test values (1, 'cat')" ibm_db.exec_immediate(conn, insert) stmt = ibm_db.prepare(conn, "SELECT * FROM temp_test WHERE id > 1") if sys.platform != 'zos': returnCode = ibm_db.set_option( stmt, {ibm_db.SQL_ATTR_QUERY_TIMEOUT: 20}, 0) value = ibm_db.get_option(stmt, ibm_db.SQL_ATTR_QUERY_TIMEOUT, 0) print("Statement options:\nSQL_ATTR_QUERY_TIMEOUT = ", end="") print(str(value) + "\n") ibm_db.execute(stmt) if result: ibm_db.free_result(stmt) else: print(ibm_db.stmt_errormsg()) ibm_db.rollback(conn) ibm_db.close(conn) else: print("Connection failed.")
def run_test_064(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info(conn) create = 'CREATE SCHEMA AUTHORIZATION t' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, 't') else: result = ibm_db.tables(conn, None, 'T') for i in range(0, ibm_db.num_fields(result)): print "%s, " % ibm_db.field_name(result, i) print print i = 0 row = ibm_db.fetch_tuple(result) while (row): ibm_db.num_fields(result) if (i < 4): print ", " + row[1] + ", " + row[2] + ", " + row[3] + ", , \n" i = i + 1 row = ibm_db.fetch_tuple(result) ibm_db.free_result(result) ibm_db.exec_immediate(conn, 'DROP TABLE t.t1') ibm_db.exec_immediate(conn, 'DROP TABLE t.t2') ibm_db.exec_immediate(conn, 'DROP TABLE t.t3') ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
def run_test_064(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info( conn ) create = 'CREATE SCHEMA AUTHORIZATION t' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, 't') else: result = ibm_db.tables(conn, None, 'T') for i in range(0, ibm_db.num_fields(result)): print("%s, " % ibm_db.field_name(result, i)) print() print() i = 0 row = ibm_db.fetch_tuple(result) while ( row ): ibm_db.num_fields(result) if (i < 4): print(", " + row[1] + ", " + row[2] + ", " + row[3] + ", , \n") i = i + 1 row = ibm_db.fetch_tuple(result) ibm_db.free_result(result) ibm_db.exec_immediate(conn, 'DROP TABLE t.t1') ibm_db.exec_immediate(conn, 'DROP TABLE t.t2') ibm_db.exec_immediate(conn, 'DROP TABLE t.t3') ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
def _push_cache(self, cache_filename, cache_pathname): if not self.is_postgre_sql: sql_statement = "MERGE INTO %s.%s AS TARGET " \ "USING (VALUES (?, ?, ?, CURRENT_TIMESTAMP)) " \ "AS SOURCE (ENTITY_TYPE_ID, PARQUET_NAME, PARQUET_FILE, UPDATED_TS) " \ "ON TARGET.ENTITY_TYPE_ID = SOURCE.ENTITY_TYPE_ID " \ "AND TARGET.PARQUET_NAME = SOURCE.PARQUET_NAME " \ "WHEN MATCHED THEN " \ "UPDATE SET TARGET.PARQUET_FILE = SOURCE.PARQUET_FILE, " \ "TARGET.UPDATED_TS = SOURCE.UPDATED_TS " \ "WHEN NOT MATCHED THEN " \ "INSERT (ENTITY_TYPE_ID, PARQUET_NAME, PARQUET_FILE, UPDATED_TS) " \ "VALUES (SOURCE.ENTITY_TYPE_ID, SOURCE.PARQUET_NAME, SOURCE.PARQUET_FILE, " \ "SOURCE.UPDATED_TS)" % (self.quoted_schema, self.quoted_cache_tablename) try: stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.bind_param(stmt, 2, cache_filename) ibm_db.bind_param(stmt, 3, cache_pathname, ibm_db.PARAM_FILE, ibm_db.SQL_BLOB) ibm_db.execute(stmt) finally: ibm_db.free_result(stmt) except Exception as ex: raise Exception('Storing cache file %s under name %s failed with sql statement "%s"' % ( cache_pathname, cache_filename, sql_statement)) from ex else: try: f = open(cache_pathname, 'rb') try: blob = f.read() finally: f.close() except Exception as ex: raise Exception('The cache file %s could not be read from disc.' % cache_pathname) from ex else: statement1 = "INSERT INTO %s.%s (entity_type_id, parquet_name, parquet_file, updated_ts) " % ( self.quoted_schema, self.quoted_cache_tablename) statement3 = "ON CONFLICT ON CONSTRAINT %s DO update set entity_type_id = EXCLUDED.entity_type_id, " \ "parquet_name = EXCLUDED.parquet_name, parquet_file = EXCLUDED.parquet_file, " \ "updated_ts = EXCLUDED.updated_ts" % dbhelper.quotingTableName( ('uc_%s' % self.cache_tablename), self.is_postgre_sql) sql_statement = statement1 + " values (%s, %s, %s, current_timestamp) " + statement3 try: dbhelper.execute_postgre_sql_query(self.db_connection, sql_statement, (self.entity_type_id, cache_filename, psycopg2.Binary(blob))) except Exception as ex: raise Exception('Storing cache under name %s failed with sql statement "%s"' % ( cache_filename, sql_statement)) from ex logger.info('Cache has been stored under name %s in table %s.%s' % ( cache_filename, self.quoted_schema, self.quoted_cache_tablename))
def insert_data_into_alert_table(self, key_and_msg_and_db_parameter=[]): logger.info("Processing %s alerts. This alert may contain duplicates, " "so need to process the alert before inserting into Database." % len(key_and_msg_and_db_parameter)) updated_key_and_msg = [] postgres_sql = "insert into " + self.quotedSchema + "." + self.quotedTableName + " (entity_id, timestamp, entity_type_id, data_item_name, severity, priority,domain_status) values (%s, %s, %s, %s, %s, %s, %s)" db2_sql = "insert into " + self.quotedSchema + "." + self.quotedTableName + " (ENTITY_ID, TIMESTAMP, ENTITY_TYPE_ID, DATA_ITEM_NAME, SEVERITY, PRIORITY,DOMAIN_STATUS) values (?, ?, ?, ?, ?, ?, ?) " total_count = 0 count = 0 start_time = dt.datetime.now() if self.is_postgre_sql: for key, msg, db_params in key_and_msg_and_db_parameter: count += 1 try: dbhelper.execute_postgre_sql_query(self.db_connection, sql=postgres_sql, params=db_params) updated_key_and_msg.append((key, msg)) except Exception as ex: if ex.pgcode != '23505': raise ex if count == 500: total_count += count count = 0 logger.info('Alerts that have been processed so far: %d' % total_count) else: try: stmt = ibm_db.prepare(self.db_connection, db2_sql) try: for key, msg, db_params in key_and_msg_and_db_parameter: count += 1 for i, param in enumerate(iterable=db_params, start=1): ibm_db.bind_param(stmt, i, param) try: ibm_db.execute(stmt) updated_key_and_msg.append((key, msg)) except Exception as ex: if "SQLSTATE=23505" not in ex.args[0]: raise Exception('Inserting alert %s into table %s.%s failed.' % ( str(db_params), self.quotedSchema, self.quotedTableName)) from ex if count == 500: total_count += count count = 0 logger.info('Alerts that have been processed so far: %d' % total_count) finally: ibm_db.free_result(stmt) except Exception as ex: raise Exception( 'Inserting alerts into table %s.%s failed.' % (self.quotedSchema, self.quotedTableName)) from ex logger.info('%d new alerts out of %d processed alerts have been inserted into table %s.%s in %d seconds.' % ( len(updated_key_and_msg), len(key_and_msg_and_db_parameter), self.quotedSchema, self.quotedTableName, (dt.datetime.now() - start_time).total_seconds())) return updated_key_and_msg
def _get_cache(self, cache_filename, cache_pathname): # Remove file on disc if there is one try: if os.path.exists(cache_pathname): os.remove(cache_pathname) except Exception as ex: raise Exception('Removal of old cache file %s failed.' % cache_pathname) from ex if not self.is_postgre_sql: sql_statement = "SELECT PARQUET_FILE FROM %s.%s WHERE ENTITY_TYPE_ID = ? AND PARQUET_NAME = ?" % ( self.quoted_schema, self.quoted_cache_tablename) stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.bind_param(stmt, 2, cache_filename) ibm_db.execute(stmt) row = ibm_db.fetch_tuple(stmt) if row is False: row = None except Exception as ex: raise Exception( 'Retrieval of cache %s failed with sql statement "%s"' % (cache_filename, sql_statement)) from ex finally: ibm_db.free_result(stmt) else: sql_statement = 'SELECT parquet_file FROM %s.%s' % (self.quoted_schema, self.quoted_cache_tablename) sql_statement += ' WHERE entity_type_id = %s AND parquet_name = %s' try: row = dbhelper.execute_postgre_sql_select_query(self.db_connection, sql_statement, (self.entity_type_id, cache_filename), fetch_one_only=True) except Exception as ex: raise Exception( 'Retrieval of cache %s failed with sql statement "%s"' % (cache_filename, sql_statement)) from ex cache_found = False if row is not None: cache_found = True parquet = row[0] if parquet is not None and len(parquet) > 0: try: f = open(cache_pathname, "wb") try: f.write(parquet) logger.info('Cache %s has been retrieved from table %s.%s and stored under %s' % ( cache_filename, self.quoted_schema, self.quoted_cache_tablename, cache_pathname)) finally: f.close() except Exception as ex: raise Exception('Writing cache file %s to disc failed.' % cache_pathname) from ex else: logger.info('The cache %s is empty' % cache_filename) else: logger.info('No cache found for %s' % cache_filename) return cache_found
def run_test_065(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info(conn) create = 'CREATE SCHEMA AUTHORIZATION t' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, '%', "t3") else: result = ibm_db.tables(conn, None, '%', "T3") columns = ibm_db.num_fields(result) for i in range(0, columns): print("%s, " % ibm_db.field_name(result, i)) print("\n\n") row = ibm_db.fetch_tuple(result) while (row): final = ", " + row[1] + ", " + row[2] + ", " + row[3] + ", , " row = ibm_db.fetch_tuple(result) print(final) ibm_db.free_result(result) ibm_db.exec_immediate(conn, 'DROP TABLE t.t1') ibm_db.exec_immediate(conn, 'DROP TABLE t.t2') ibm_db.exec_immediate(conn, 'DROP TABLE t.t3') ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
def run_test_065(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info( conn ) create = 'CREATE SCHEMA AUTHORIZATION t' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, '%', "t3") else: result = ibm_db.tables(conn, None, '%', "T3") columns = ibm_db.num_fields(result) for i in range(0, columns): print "%s, " % ibm_db.field_name(result, i) print "\n\n" row = ibm_db.fetch_tuple(result) while ( row ): final = ", " + row[1] + ", " + row[2] + ", " + row[3] + ", , "; row = ibm_db.fetch_tuple(result) print final ibm_db.free_result(result) ibm_db.exec_immediate(conn, 'DROP TABLE t.t1') ibm_db.exec_immediate(conn, 'DROP TABLE t.t2') ibm_db.exec_immediate(conn, 'DROP TABLE t.t3') ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
def run_test_063(self): conn = ibm_db.connect(config.database, config.user, config.password) result = ibm_db.tables(conn, None, "SYSIBM", "", "VIEW") if (type(result) == ibm_db.IBM_DBStatement): print "Resource is a DB2 Statement" ibm_db.free_result(result)
def run_test_063(self): conn = ibm_db.connect(config.database, config.user, config.password) result = ibm_db.tables(conn, None, "SYSIBM", "", "VIEW") if (type(result) == ibm_db.IBM_DBStatement): print("Resource is a DB2 Statement") ibm_db.free_result(result)
def delete_all_caches(self): # Delete all cache entries for this entity type locally cache_filename, cache_pathname, base_path = self._get_cache_filename( None, None) if os.path.exists(base_path): try: file_listing = os.listdir(base_path) except Exception as ex: raise Exception('Failure to list content of directory %s' % base_path) from ex for filename in file_listing: if filename.startswith(DBDataCache.CACHE_FILE_STEM): full_path = '%s/%s' % (base_path, filename) try: os.remove(full_path) except Exception as ex: raise Exception('Removal of file %s failed' % full_path) from ex # Delete all cache entries for this entity type in database if not self.is_postgre_sql: sql_statement = "DELETE FROM %s.%s where ENTITY_TYPE_ID = ?" % ( self.quoted_schema, self.quoted_cache_tablename) try: stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.execute(stmt) finally: ibm_db.free_result(stmt) except Exception as ex: raise Exception( 'Deletion of cache files failed with sql statement "%s"' % sql_statement) from ex else: sql_statement = "DELETE FROM %s.%s" % ( self.quoted_schema, self.quoted_cache_tablename, ) sql_statement += ' where entity_type_id = %s' try: dbhelper.execute_postgre_sql_query(self.db_connection, sql_statement, (self.entity_type_id, )) except Exception as ex: raise Exception( 'Deletion of cache files failed with sql statement %s' % sql_statement) from ex logger.info( 'All caches have been deleted from table %s.%s for entity type id %d' % (self.quoted_schema, self.quoted_cache_tablename, self.entity_type_id))
def main(): conn = None stmt = None if not conf.has_option('passwd'): conf.conf.set(conf.section, 'passwd', unicode(getpass.getpass('Пароль: '), 'utf-8')) try: conn = ibm_db.connect('DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=%s;UID=%s;PWD=%s;' % (conf.get('database'), conf.get('hostname'), conf.getint('port'), conf.get('protocol'), conf.get('user'), conf.get('passwd')), '', '') stmt = ibm_db.exec_immediate(conn, unicode(args.request[0], 'utf-8')) try: result = ibm_db.fetch_tuple(stmt) except: rows = ibm_db.num_rows(stmt) if rows != -1: print u'Обработано строк %d' % rows else: print u'Команда выполнена' return if result: column_conv = [] head = u'' underline=u'' for i in xrange(len(result)): if i != 0: head += u'|' underline += u'+' name = ibm_db.field_name(stmt, i) size = ibm_db.field_display_size(stmt, i) if len(name) > size: size = len(name) if ibm_db.field_nullable(stmt, i) and len(u'NULL') > size: size = len(u'NULL') type_field = ibm_db.field_type(stmt, i) if type_field == 'float' or type_field == 'real' or type_field == 'decimal': column_conv.append({'size': size, 'format': u'{0:%d.%df}' % (size, (size - ibm_db.field_precision(stmt, i))), 'fn': convert_to_float}) elif type_field == 'int' or type_field == 'bigint': column_conv.append({'size': size, 'format': u'{0:%dd}' % size, 'fn': convert_to_int}) else: column_conv.append({'size': size, 'format': u'{0:%ds}' % size, 'fn': without_convert}) head += name.center(size) underline += u'-' * size print head print underline while( result ): print conv(result, column_conv) result = ibm_db.fetch_tuple(stmt) else: print u'Результата не возвращено' except Exception as e: print >> sys.stderr, e sys.exit(-1) finally: if stmt: ibm_db.free_result(stmt) if conn: ibm_db.close(conn)
def store_model(self, model_name, model, user_name=None, serialize=True): if serialize: try: model = pickle.dumps(model) except Exception as ex: raise Exception( 'Serialization of model %s that is supposed to be stored in ModelStore failed.' % model_name) from ex if not self.is_postgre_sql: sql_statement = "MERGE INTO %s.%s AS TARGET " \ "USING (VALUES (?, ?, ?, CURRENT_TIMESTAMP, ?)) " \ "AS SOURCE (ENTITY_TYPE_ID, MODEL_NAME, MODEL, UPDATED_TS, LAST_UPDATED_BY) " \ "ON TARGET.ENTITY_TYPE_ID = SOURCE.ENTITY_TYPE_ID " \ "AND TARGET.MODEL_NAME = SOURCE.MODEL_NAME " \ "WHEN MATCHED THEN " \ "UPDATE SET TARGET.MODEL = SOURCE.MODEL, " \ "TARGET.UPDATED_TS = SOURCE.UPDATED_TS " \ "WHEN NOT MATCHED THEN " \ "INSERT (ENTITY_TYPE_ID, MODEL_NAME, MODEL, UPDATED_TS, LAST_UPDATED_BY) " \ "VALUES (SOURCE.ENTITY_TYPE_ID, SOURCE.MODEL_NAME, SOURCE.MODEL, " \ "SOURCE.UPDATED_TS, SOURCE.LAST_UPDATED_BY)" % ( self.quoted_schema, self.quoted_store_tablename) try: stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.bind_param(stmt, 2, model_name) ibm_db.bind_param(stmt, 3, model) ibm_db.bind_param(stmt, 4, user_name) ibm_db.execute(stmt) finally: ibm_db.free_result(stmt) except Exception as ex: raise Exception('Storing model %s failed with sql statement "%s"' % (model_name, sql_statement)) from ex else: statement1 = "INSERT INTO %s.%s (entity_type_id, model_name, model, updated_ts, last_updated_by) " % ( self.quoted_schema, self.quoted_store_tablename) statement3 = "ON CONFLICT ON CONSTRAINT %s DO update set entity_type_id = EXCLUDED.entity_type_id, " \ "model_name = EXCLUDED.model_name, model = EXCLUDED.model, " \ "updated_ts = EXCLUDED.updated_ts, last_updated_by = EXCLUDED.last_updated_by" % dbhelper.quotingTableName( ('uc_%s' % self.store_tablename), self.is_postgre_sql) sql_statement = statement1 + " values (%s, %s, %s, current_timestamp, %s) " + statement3 try: dbhelper.execute_postgre_sql_query(self.db_connection, sql_statement, (self.entity_type_id, model_name, psycopg2.Binary(model), user_name)) except Exception as ex: raise Exception('Storing model %s failed with sql statement "%s"' % (model_name, sql_statement)) from ex logger.info('Model %s of size %d bytes has been stored in table %s.%s.' % ( model_name, len(model) if model is not None else 0, self.quoted_schema, self.quoted_store_tablename))
def retrieve_model(self, model_name, deserialize=True): if not self.is_postgre_sql: sql_statement = "SELECT MODEL FROM %s.%s WHERE ENTITY_TYPE_ID = ? AND MODEL_NAME = ?" % ( self.quoted_schema, self.quoted_store_tablename) stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.bind_param(stmt, 2, model_name) ibm_db.execute(stmt) row = ibm_db.fetch_tuple(stmt) if row is False: model = None else: model = row[0] except Exception as ex: raise Exception( 'Retrieval of model %s failed with sql statement "%s"' % (model_name, sql_statement)) from ex finally: ibm_db.free_result(stmt) else: sql_statement = 'SELECT model FROM %s.%s' % (self.quoted_schema, self.quoted_store_tablename) sql_statement += ' WHERE entity_type_id = %s AND model_name = %s' try: row = dbhelper.execute_postgre_sql_select_query(self.db_connection, sql_statement, (self.entity_type_id, model_name), fetch_one_only=True) if row is None: model = None else: model = bytes(row[0]) except Exception as ex: raise Exception( 'Retrieval of model %s failed with sql statement "%s"' % (model_name, sql_statement)) from ex if model is not None: logger.info('Model %s of size %d bytes has been retrieved from table %s.%s' % ( model_name, len(model) if model is not None else 0, self.quoted_schema, self.quoted_store_tablename)) else: logger.info('Model %s does not exist in table %s.%s' % ( model_name, self.quoted_schema, self.quoted_store_tablename)) if model is not None and deserialize: try: model = pickle.loads(model) except Exception as ex: raise Exception( 'Deserialization of model %s that has been retrieved from ModelStore failed.' % model_name) from ex return model
def run_test_250(self): conn = ibm_db.connect(config.database, config.user, config.password) result = ibm_db.exec_immediate(conn, "select * from sales") result2 = ibm_db.exec_immediate(conn, "select * from staff") result3 = ibm_db.exec_immediate(conn, "select * from emp_photo") r1 = ibm_db.free_result(result) r2 = ibm_db.free_result(result2) r3 = ibm_db.free_result(result3) print(r1) print(r2) print(r3)
def run_test_251(self): conn = ibm_db.connect(config.database, config.user, config.password) result = ibm_db.exec_immediate(conn, "select * from sales") r1 = ibm_db.free_result(result) r2 = ibm_db.free_result(result) r3 = '' try: r3 = ibm_db.free_result(result99) except: r3 = None print r1 print r2 print r3
def run_test_251(self): conn = ibm_db.connect(config.database, config.user, config.password) result = ibm_db.exec_immediate(conn, "select * from sales") r1 = ibm_db.free_result(result) r2 = ibm_db.free_result(result) r3 = '' try: r3 = ibm_db.free_result(result99) except: r3 = None print(r1) print(r2) print(r3)
def delete_cache(self, dep_grain, grain, old_name=False): # Delete single cache entry locally cache_filename, cache_pathname, base_path = self._get_cache_filename( dep_grain, grain, old_name) if os.path.exists(cache_pathname): try: os.remove(cache_pathname) except Exception as ex: raise Exception('Removal of cache file %s failed' % cache_pathname) from ex # Delete single cache entry in database if not self.is_postgre_sql: sql_statement = "DELETE FROM %s.%s WHERE ENTITY_TYPE_ID = ? AND PARQUET_NAME = ?" % ( self.quoted_schema, self.quoted_cache_tablename) try: stmt = ibm_db.prepare(self.db_connection, sql_statement) try: ibm_db.bind_param(stmt, 1, self.entity_type_id) ibm_db.bind_param(stmt, 2, cache_filename) ibm_db.execute(stmt) finally: ibm_db.free_result(stmt) except Exception as ex: raise Exception( 'Deletion of cache file %s failed with sql statement "%s"' % (cache_filename, sql_statement)) from ex else: sql_statement = "DELETE FROM %s.%s" % (self.quoted_schema, self.quoted_cache_tablename) sql_statement += ' where entity_type_id = %s and parquet_name = %s' try: dbhelper.execute_postgre_sql_query( self.db_connection, sql_statement, (self.entity_type_id, cache_filename)) except Exception as ex: raise Exception( 'Deletion of cache file %s failed with sql statement %s' % (cache_filename, sql_statement)) from ex logger.info( 'Cache file %s has been deleted from table %s.%s' % (cache_filename, self.quoted_schema, self.quoted_cache_tablename))
def run_test_019(self): conn = ibm_db.connect(config.database, config.user, config.password) ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_ON) if conn: if ('zos' in sys.platform): stmt = ibm_db.prepare( conn, "SELECT * from animals WHERE weight < 10.0") else: stmt = ibm_db.prepare( conn, "SELECT * from animals WHERE weight < 10.0", { ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH: ibm_db.SQL_ROWCOUNT_PREFETCH_ON }) result = ibm_db.execute(stmt) if result: rows = ibm_db.num_rows(stmt) print("affected row:", rows) ibm_db.free_result(stmt) else: print(ibm_db.stmt_errormsg()) ibm_db.close(conn) else: print("no connection:", ibm_db.conn_errormsg())
try: numRows = ibm_db.num_rows(preparedStmt) except Exception: pass # Display An Appropriate Message, Based On The Information Returned if numRows <= 0: print("Unable to obtain information about the number of rows returned.\n") else: print("Number of rows returned by the query: " + str(numRows) + "\n") # Free System Resources That Are Associated With The Prepared Statement And Result Set Produced print("Freeing system resources associated with the prepared statement ... ", end="") try: returnCode = ibm_db.free_result(preparedStmt) except Exception: pass # If The Appropriate System Resources Could Not Be Freed, Display An Error Message And Exit if returnCode is False: print("\nERROR: Unable to free the appropriate system resources.\n") conn.closeConnection() exit(-1) # Otherwise, Complete The Status Message else: print("Done!\n") # Create A Dictionary That Contains The Value Needed To Turn Row Prefetch Behavior On; # This Enables Db2 To Determine The Number Of Rows That Are Returned By A Query (So The
def run_test_064(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info( conn ) create = 'CREATE SCHEMA AUTHORIZATION t' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))' try: result = ibm_db.exec_immediate(conn, create) except: pass if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, 't') else: result = ibm_db.tables(conn, None, 'T') for i in range(0, ibm_db.num_fields(result)): print "%s, " % ibm_db.field_name(result, i) print print i = 0 row = ibm_db.fetch_tuple(result) while ( row ): ibm_db.num_fields(result) if (i < 4): print ", " + row[1] + ", " + row[2] + ", " + row[3] + ", , \n" i = i + 1 row = ibm_db.fetch_tuple(result) ibm_db.free_result(result) ibm_db.exec_immediate(conn, 'DROP TABLE t.t1') ibm_db.exec_immediate(conn, 'DROP TABLE t.t2') ibm_db.exec_immediate(conn, 'DROP TABLE t.t3') ibm_db.exec_immediate(conn, 'DROP TABLE t.t4') #__END__ #__LUW_EXPECTED__ #TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, # #, T, T1, TABLE, , #, T, T2, TABLE, , #, T, T3, TABLE, , #, T, T4, TABLE, , #__ZOS_EXPECTED__ #TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, # #, T, T1, TABLE, , #, T, T2, TABLE, , #, T, T3, TABLE, , #, T, T4, TABLE, , #__SYSTEMI_EXPECTED__ #TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, # #, T, T1, TABLE, , #, T, T2, TABLE, , #, T, T3, TABLE, , #, T, T4, TABLE, , #__IDS_EXPECTED__ #table_cat, table_schem, table_name, table_type, remarks, # #, t, t1, TABLE%s, , #, t, t2, TABLE%s, , #, t, t3, TABLE%s, , #, t, t4, TABLE%s, ,
def run_test_066(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info(conn) if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, config.user.lower(), 'animals') else: result = ibm_db.tables(conn, None, config.user.upper(), 'ANIMALS') # NOTE: This is a workaround # function fetch_object() to be implemented... # row = ibm_db.fetch_object(result) class Row: pass data = ibm_db.fetch_assoc(result) while (data): row = Row() if (server.DBMS_NAME[0:3] == 'IDS'): row.table_schem = data['table_schem'] row.table_name = data['table_name'] row.table_type = data['table_type'] row.remarks = data['remarks'] print("Schema: %s" % row.table_schem) print("Name: %s" % row.table_name) print("Type: %s" % row.table_type) print("Remarks: %s\n" % row.remarks) else: row.TABLE_SCHEM = data['TABLE_SCHEM'] row.TABLE_NAME = data['TABLE_NAME'] row.TABLE_TYPE = data['TABLE_TYPE'] row.REMARKS = data['REMARKS'] print("Schema: %s" % row.TABLE_SCHEM) print("Name: %s" % row.TABLE_NAME) print("Type: %s" % row.TABLE_TYPE) print("Remarks: %s\n" % row.REMARKS) # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, config.user.lower(), 'animal_pics') else: result = ibm_db.tables(conn, None, config.user.upper(), 'ANIMAL_PICS') # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) while (data): row = Row() if (server.DBMS_NAME[0:3] == 'IDS'): row.table_schem = data['table_schem'] row.table_name = data['table_name'] row.table_type = data['table_type'] row.remarks = data['remarks'] print("Schema: %s" % row.table_schem) print("Name: %s" % row.table_name) print("Type: %s" % row.table_type) print("Remarks: %s\n" % row.remarks) else: row.TABLE_SCHEM = data['TABLE_SCHEM'] row.TABLE_NAME = data['TABLE_NAME'] row.TABLE_TYPE = data['TABLE_TYPE'] row.REMARKS = data['REMARKS'] print("Schema: %s" % row.TABLE_SCHEM) print("Name: %s" % row.TABLE_NAME) print("Type: %s" % row.TABLE_TYPE) print("Remarks: %s\n" % row.REMARKS) # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, config.user.lower(), 'anime_cat') else: result = ibm_db.tables(conn, None, config.user.upper(), 'ANIME_CAT') # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) while (data): row = Row() if (server.DBMS_NAME[0:3] == 'IDS'): row.table_schem = data['table_schem'] row.table_name = data['table_name'] row.table_type = data['table_type'] row.remarks = data['remarks'] print("Schema: %s" % row.table_schem) print("Name: %s" % row.table_name) print("Type: %s" % row.table_type) print("Remarks: %s\n" % row.remarks) else: row.TABLE_SCHEM = data['TABLE_SCHEM'] row.TABLE_NAME = data['TABLE_NAME'] row.TABLE_TYPE = data['TABLE_TYPE'] row.REMARKS = data['REMARKS'] print("Schema: %s" % row.TABLE_SCHEM) print("Name: %s" % row.TABLE_NAME) print("Type: %s" % row.TABLE_TYPE) print("Remarks: %s\n" % row.REMARKS) # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) ibm_db.free_result(result) ibm_db.close(conn)
def run_test_066(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info( conn ) if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, config.user.lower(), 'animals') else: result = ibm_db.tables(conn, None, config.user.upper(), 'ANIMALS') # NOTE: This is a workaround # function fetch_object() to be implemented... # row = ibm_db.fetch_object(result) class Row: pass data = ibm_db.fetch_assoc(result) while ( data ): row = Row() if (server.DBMS_NAME[0:3] == 'IDS'): row.table_schem = data['table_schem'] row.table_name = data['table_name'] row.table_type = data['table_type'] row.remarks = data['remarks'] print("Schema: %s" % row.table_schem) print("Name: %s" % row.table_name) print("Type: %s" % row.table_type) print("Remarks: %s\n" % row.remarks) else: row.TABLE_SCHEM = data['TABLE_SCHEM'] row.TABLE_NAME = data['TABLE_NAME'] row.TABLE_TYPE = data['TABLE_TYPE'] row.REMARKS = data['REMARKS'] print("Schema: %s" % row.TABLE_SCHEM) print("Name: %s" % row.TABLE_NAME) print("Type: %s" % row.TABLE_TYPE) print("Remarks: %s\n" % row.REMARKS) # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, config.user.lower(), 'animal_pics') else: result = ibm_db.tables(conn, None, config.user.upper(), 'ANIMAL_PICS') # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) while (data ): row = Row() if (server.DBMS_NAME[0:3] == 'IDS'): row.table_schem = data['table_schem'] row.table_name = data['table_name'] row.table_type = data['table_type'] row.remarks = data['remarks'] print("Schema: %s" % row.table_schem) print("Name: %s" % row.table_name) print("Type: %s" % row.table_type) print("Remarks: %s\n" % row.remarks) else: row.TABLE_SCHEM = data['TABLE_SCHEM'] row.TABLE_NAME = data['TABLE_NAME'] row.TABLE_TYPE = data['TABLE_TYPE'] row.REMARKS = data['REMARKS'] print("Schema: %s" % row.TABLE_SCHEM) print("Name: %s" % row.TABLE_NAME) print("Type: %s" % row.TABLE_TYPE) print("Remarks: %s\n" % row.REMARKS) # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) if (server.DBMS_NAME[0:3] == 'IDS'): result = ibm_db.tables(conn, None, config.user.lower(), 'anime_cat') else: result = ibm_db.tables(conn, None, config.user.upper(), 'ANIME_CAT') # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) while ( data ): row = Row() if (server.DBMS_NAME[0:3] == 'IDS'): row.table_schem = data['table_schem'] row.table_name = data['table_name'] row.table_type = data['table_type'] row.remarks = data['remarks'] print("Schema: %s" % row.table_schem) print("Name: %s" % row.table_name) print("Type: %s" % row.table_type) print("Remarks: %s\n" % row.remarks) else: row.TABLE_SCHEM = data['TABLE_SCHEM'] row.TABLE_NAME = data['TABLE_NAME'] row.TABLE_TYPE = data['TABLE_TYPE'] row.REMARKS = data['REMARKS'] print("Schema: %s" % row.TABLE_SCHEM) print("Name: %s" % row.TABLE_NAME) print("Type: %s" % row.TABLE_TYPE) print("Remarks: %s\n" % row.REMARKS) # row = ibm_db.fetch_object(result) data = ibm_db.fetch_assoc(result) ibm_db.free_result(result) ibm_db.close(conn)