def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db('discovery0') with db_utils.get_test_connection('discovery0') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, test_table_name, cur))) cur = conn.cursor() cur.execute( """ SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """ ) if cur.fetchone()[0] is None: cur.execute(""" CREATE EXTENSION hstore; """) #pylint: disable=line-too-long create_table_sql = 'CREATE TABLE {} (id SERIAL PRIMARY KEY)'.format( canonicalized_table_name(test_schema_name, test_table_name, cur)) cur.execute(create_table_sql)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db() self.maxDiff = None with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: old_table = cur.execute("""SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format(canonicalized_table_name(test_schema_name, test_table_name, cur))) cur = conn.cursor() cur.execute(""" SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """) if cur.fetchone()[0] is None: cur.execute(""" CREATE EXTENSION hstore; """) cur.execute(""" CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;""") cur.execute(""" DROP TYPE IF EXISTS ALIGNMENT CASCADE """) cur.execute(""" CREATE TYPE ALIGNMENT AS ENUM ('good', 'bad', 'ugly') """) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_bit_array BIT(1)[], our_boolean_array BOOLEAN[], our_cidr_array CIDR[], our_citext_array CITEXT[], our_date_array DATE[], our_decimal_array NUMERIC(12,2)[], our_double_array DOUBLE PRECISION[], our_enum_array ALIGNMENT[], our_float_array FLOAT[], our_hstore_array HSTORE[], our_inet_array INET[], our_int_array INTEGER[][], our_json_array JSON[], our_jsonb_array JSONB[], our_mac_array MACADDR[], our_money_array MONEY[], our_real_array REAL[], our_smallint_array SMALLINT[], our_string_array VARCHAR[], our_text_array TEXT[], our_time_array TIME[], our_ts_tz_array TIMESTAMP WITH TIME ZONE[], our_uuid_array UUID[]) """.format(canonicalized_table_name(test_schema_name, test_table_name, cur)) cur.execute(create_table_sql)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db() self.maxDiff = None with db_utils.get_test_connection() as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: for table in [test_table_name_1, test_table_name_2]: old_table = cur.execute("""SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s)""", [test_schema_name, table]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {} CASCADE".format(canonicalized_table_name(test_schema_name, table, cur))) cur.execute("""DROP VIEW IF EXISTS {} """.format(quote_ident(test_view, cur))) cur.execute("""CREATE TABLE {} (id SERIAL PRIMARY KEY, name VARCHAR, size VARCHAR) """.format(canonicalized_table_name(test_schema_name, test_table_name_1, cur))) cur.execute("""CREATE TABLE {} (fk_id bigint, age integer) """.format(canonicalized_table_name(test_schema_name, test_table_name_2, cur))) cur.execute("""CREATE VIEW {} AS (SELECT * FROM {} join {} on {}.id = {}.fk_id )""".format(quote_ident(test_view, cur), canonicalized_table_name(test_schema_name, test_table_name_1, cur), canonicalized_table_name(test_schema_name, test_table_name_2, cur), canonicalized_table_name(test_schema_name, test_table_name_1, cur), canonicalized_table_name(test_schema_name, test_table_name_2, cur))) self.rec_1 = { 'name' : 'fred', 'size' : 'big' } insert_record(cur, test_table_name_1, self.rec_1) cur.execute("SELECT id FROM {}".format(canonicalized_table_name(test_schema_name, test_table_name_1, cur))) fk_id = cur.fetchone()[0] self.rec_2 = { 'fk_id' : fk_id, 'age' : 99 } insert_record(cur, test_table_name_2, self.rec_2)
def setUp(self): db_utils.ensure_db('discovery0') creds = {} missing_envs = [ x for x in [ os.getenv('TAP_POSTGRES_HOST'), os.getenv('TAP_POSTGRES_USER'), os.getenv('TAP_POSTGRES_PASSWORD'), os.getenv('TAP_POSTGRES_PORT'), os.getenv('TAP_POSTGRES_DBNAME') ] if x == None ] if len(missing_envs) != 0: #pylint: disable=line-too-long raise Exception( "set TAP_POSTGRES_HOST, TAP_POSTGRES_DBNAME, TAP_POSTGRES_USER, TAP_POSTGRES_PASSWORD, TAP_POSTGRES_PORT" ) with db_utils.get_test_connection('discovery0') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, test_table_name, cur))) cur = conn.cursor() cur.execute( """ SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """ ) if cur.fetchone()[0] is None: cur.execute(""" CREATE EXTENSION hstore; """) #pylint: disable=line-too-long create_table_sql = 'CREATE TABLE {} (id SERIAL PRIMARY KEY)'.format( canonicalized_table_name(test_schema_name, test_table_name, cur)) cur.execute(create_table_sql)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db("dev") self.maxDiff = None with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: canon_table_name = db_utils.canonicalized_table_name( cur, test_schema_name, test_table_name) cur = db_utils.ensure_fresh_table(conn, cur, test_schema_name, test_table_name) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_varchar VARCHAR, our_varchar_10 VARCHAR(10), our_text TEXT, our_integer INTEGER, our_smallint SMALLINT, our_bigint BIGINT, our_decimal NUMERIC({},{}), "OUR TS" TIMESTAMP WITHOUT TIME ZONE, "OUR TS TZ" TIMESTAMP WITH TIME ZONE, "OUR TIME" TIME WITHOUT TIME ZONE, "OUR TIME TZ" TIME WITH TIME ZONE, "OUR DATE" DATE, our_double DOUBLE PRECISION, our_real REAL, our_boolean BOOLEAN, our_bit BIT(1), our_json JSON, our_jsonb JSONB, our_uuid UUID, our_store HSTORE, our_citext CITEXT, our_inet inet, our_cidr cidr, our_mac macaddr, our_alignment_enum ALIGNMENT, our_money money) """.format(canon_table_name, NUMERIC_PRECISION, NUMERIC_SCALE) cur.execute(create_table_sql) # insert fixture data and track expected records self.inserted_records = [] self.expected_records = [] # record 1 our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12, 11, 10) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.inserted_records.append({ 'our_varchar': "our_varchar", 'our_varchar_10': "varchar_10", 'our_text': "some text", 'our_integer': 44100, 'our_smallint': 1, 'our_bigint': 1000000, 'our_decimal': decimal.Decimal('.01'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': decimal.Decimal('1.1'), 'our_real': 1.2, 'our_boolean': True, 'our_bit': '0', 'our_json': json.dumps({'secret': 55}), 'our_jsonb': json.dumps({'burgers': 'good'}), 'our_uuid': my_uuid, 'our_store': 'size=>"small",name=>"betty"', 'our_citext': 'maGICKal 4', 'our_cidr': '192.168.100.128/25', 'our_inet': '192.168.100.128/24', 'our_mac': '08:00:2b:01:02:03', 'our_alignment_enum': 'good', 'our_money': '100.1122', }) self.expected_records.append({ 'our_decimal': decimal.Decimal('.01'), 'our_text': 'some text', 'our_bit': False, 'our_integer': 44100, 'our_double': decimal.Decimal('1.1'), 'id': 1, 'our_json': '{"secret": 55}', 'our_boolean': True, 'our_jsonb': '{"burgers": "good"}', 'our_bigint': 1000000, 'OUR TS': self.expected_ts(our_ts), 'OUR TS TZ': self.expected_ts_tz(our_ts_tz), 'OUR TIME': str(our_time), 'OUR TIME TZ': str(our_time_tz), 'our_store': { "name": "betty", "size": "small" }, 'our_smallint': 1, 'OUR DATE': '1998-03-04T00:00:00+00:00', 'our_varchar': 'our_varchar', 'our_uuid': self.inserted_records[0]['our_uuid'], 'our_real': decimal.Decimal('1.2'), 'our_varchar_10': 'varchar_10', 'our_citext': self.inserted_records[0]['our_citext'], 'our_inet': self.inserted_records[0]['our_inet'], 'our_cidr': self.inserted_records[0]['our_cidr'], 'our_mac': self.inserted_records[0]['our_mac'], 'our_alignment_enum': self.inserted_records[0]['our_alignment_enum'], 'our_money': '$100.11' }) # record 2 our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(10, 9, 8) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1964, 7, 1) my_uuid = str(uuid.uuid1()) self.inserted_records.append({ 'our_varchar': "our_varchar 2", 'our_varchar_10': "varchar_10", 'our_text': "some text 2", 'our_integer': 44101, 'our_smallint': 2, 'our_bigint': 1000001, 'our_decimal': decimal.Decimal('.02'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': decimal.Decimal('1.1'), 'our_real': decimal.Decimal('1.2'), 'our_boolean': True, 'our_bit': '1', 'our_json': json.dumps(["nymn 77"]), 'our_jsonb': json.dumps({'burgers': 'good++'}), 'our_uuid': my_uuid, 'our_store': 'dances=>"floor",name=>"betty"', 'our_citext': 'maGICKal 2', 'our_cidr': '192.168.101.128/25', 'our_inet': '192.168.101.128/24', 'our_mac': '08:00:2b:01:02:04', 'our_money': None }) self.expected_records.append({ 'our_decimal': decimal.Decimal('.02'), 'OUR TIME': str(our_time), 'our_text': 'some text 2', 'our_bit': True, 'our_integer': 44101, 'our_double': decimal.Decimal('1.1'), 'id': 2, 'our_json': '["nymn 77"]', 'our_boolean': True, 'our_jsonb': '{"burgers": "good++"}', 'our_bigint': 1000001, 'OUR TIME TZ': str(our_time_tz), 'our_store': { "name": "betty", "dances": "floor" }, 'OUR TS TZ': self.expected_ts_tz(our_ts_tz), 'our_smallint': 2, 'OUR DATE': '1964-07-01T00:00:00+00:00', 'our_varchar': 'our_varchar 2', 'OUR TS': self.expected_ts(our_ts), 'our_uuid': self.inserted_records[1]['our_uuid'], 'our_real': decimal.Decimal('1.2'), 'our_varchar_10': 'varchar_10', 'our_citext': self.inserted_records[1]['our_citext'], 'our_inet': self.inserted_records[1]['our_inet'], 'our_cidr': self.inserted_records[1]['our_cidr'], 'our_mac': self.inserted_records[1]['our_mac'], 'our_alignment_enum': None, 'our_money': None }) # record 3 self.inserted_records.append({ 'our_decimal': decimal.Decimal('NaN'), 'our_double': float('nan'), 'our_real': float('-inf') }) self.expected_records.append({ 'id': 3, # We cast NaN's, +Inf, -Inf to NULL as wal2json does not support # them and now we are at least consistent(ly wrong). 'our_decimal': None, 'our_double': None, 'our_real': None, # any field without a set value will be set to NULL 'OUR TIME': None, 'our_text': None, 'our_bit': None, 'our_integer': None, 'our_json': None, 'our_boolean': None, 'our_jsonb': None, 'our_bigint': None, 'OUR TIME TZ': None, 'our_store': None, 'OUR TS TZ': None, 'our_smallint': None, 'OUR DATE': None, 'our_varchar': None, 'OUR TS': None, 'our_uuid': None, 'our_varchar_10': None, 'our_citext': None, 'our_inet': None, 'our_cidr': None, 'our_mac': None, 'our_alignment_enum': None, 'our_money': None }) for record in self.inserted_records: db_utils.insert_record(cur, test_table_name, record)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db("dev") self.maxDiff = None with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: cur.execute(""" SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'stitch') """ ) old_slot = cur.fetchone()[0] with db_utils.get_test_connection('dev', True) as conn2: with conn2.cursor() as cur2: if old_slot: cur2.drop_replication_slot("stitch") cur2.create_replication_slot('stitch', output_plugin='wal2json') for t in [test_table_name_cows, test_table_name_chickens]: old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, t]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, t, cur))) cur = conn.cursor() create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, cow_age integer, cow_name varchar) """.format( canonicalized_table_name(test_schema_name, test_table_name_cows, cur)) cur.execute(create_table_sql) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, chicken_age integer, chicken_name varchar) """.format( canonicalized_table_name(test_schema_name, test_table_name_chickens, cur)) cur.execute(create_table_sql) #insert a cow self.cows_rec_1 = {'cow_name': "anne_cow", 'cow_age': 30} insert_record(cur, test_table_name_cows, self.cows_rec_1) #insert a chicken self.chickens_rec_1 = { 'chicken_name': "alfred_chicken", 'chicken_age': 4 } insert_record(cur, test_table_name_chickens, self.chickens_rec_1)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db(test_db) self.maxDiff = None with db_utils.get_test_connection(test_db) as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: cur.execute(""" SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'stitch') """ ) old_slot = cur.fetchone()[0] with db_utils.get_test_connection(test_db, True) as conn2: with conn2.cursor() as cur2: if old_slot: cur2.drop_replication_slot("stitch") cur2.create_replication_slot('stitch', output_plugin='wal2json') old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, test_table_name, cur))) cur = conn.cursor() cur.execute( """ SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """ ) if cur.fetchone()[0] is None: cur.execute(""" CREATE EXTENSION hstore; """) cur.execute( """ CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;""" ) cur.execute(""" DROP TYPE IF EXISTS ALIGNMENT CASCADE """) cur.execute( """ CREATE TYPE ALIGNMENT AS ENUM ('good', 'bad', 'ugly') """ ) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_varchar VARCHAR, our_varchar_10 VARCHAR(10), our_text TEXT, our_text_2 TEXT, our_integer INTEGER, our_smallint SMALLINT, our_bigint BIGINT, our_decimal NUMERIC(12,2), "OUR TS" TIMESTAMP WITHOUT TIME ZONE, "OUR TS TZ" TIMESTAMP WITH TIME ZONE, "OUR TIME" TIME WITHOUT TIME ZONE, "OUR TIME TZ" TIME WITH TIME ZONE, "OUR DATE" DATE, our_double DOUBLE PRECISION, our_real REAL, our_boolean BOOLEAN, our_bit BIT(1), our_json JSON, our_jsonb JSONB, our_uuid UUID, our_store HSTORE, our_citext CITEXT, our_cidr cidr, our_inet inet, our_mac macaddr, our_alignment_enum ALIGNMENT, our_money money) """.format( canonicalized_table_name(test_schema_name, test_table_name, cur)) cur.execute(create_table_sql) #insert fixture data 1 our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12, 11, 10) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.rec_1 = { 'our_varchar': "our_varchar", 'our_varchar_10': "varchar_10", 'our_text': "some text", 'our_text_2': "NOT SELECTED", 'our_integer': 44100, 'our_smallint': 1, 'our_bigint': 1000000, 'our_decimal': decimal.Decimal('1234567890.01'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': 1.1, 'our_real': 1.2, 'our_boolean': True, 'our_bit': '0', 'our_json': json.dumps({'secret': 55}), 'our_jsonb': json.dumps(['burgers are good']), 'our_uuid': my_uuid, 'our_store': 'size=>"small",name=>"betty"', 'our_citext': 'maGICKal', 'our_cidr': '192.168.100.128/25', 'our_inet': '192.168.100.128/24', 'our_mac': '08:00:2b:01:02:03', 'our_alignment_enum': 'bad' } insert_record(cur, test_table_name, self.rec_1) #insert fixture data 2 our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(10, 9, 8) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1964, 7, 1) my_uuid = str(uuid.uuid1()) self.rec_2 = { 'our_varchar': "our_varchar 2", 'our_varchar_10': "varchar_10", 'our_text': "some text 2", 'our_text_2': "NOT SELECTED", 'our_integer': 44101, 'our_smallint': 2, 'our_bigint': 1000001, 'our_decimal': decimal.Decimal('9876543210.02'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': 1.1, 'our_real': 1.2, 'our_boolean': True, 'our_bit': '1', 'our_json': json.dumps({'nymn': 77}), 'our_jsonb': json.dumps({'burgers': 'good++'}), 'our_uuid': my_uuid, 'our_store': 'dances=>"floor",name=>"betty"', 'our_citext': 'maGICKal 2', 'our_cidr': '192.168.101.128/25', 'our_inet': '192.168.101.128/24', 'our_mac': '08:00:2b:01:02:04', } insert_record(cur, test_table_name, self.rec_2) #insert fixture data 3 our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12, 11, 10) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.rec_3 = { 'our_varchar': "our_varchar 3", 'our_varchar_10': "varchar_10", 'our_text': "some text", 'our_text_2': "NOT SELECTED", 'our_integer': 44100, 'our_smallint': 1, 'our_bigint': 1000000, 'our_decimal': decimal.Decimal('1234567890.01'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': 1.1, 'our_real': 1.2, 'our_boolean': True, 'our_bit': '0', 'our_json': json.dumps({'secret': 55}), 'our_jsonb': json.dumps(['burgers are good']), 'our_uuid': my_uuid, 'our_store': 'size=>"small",name=>"betty"', 'our_citext': 'maGICKal', 'our_cidr': '192.168.100.128/25', 'our_inet': '192.168.100.128/24', 'our_mac': '08:00:2b:01:02:03', 'our_alignment_enum': 'bad' } insert_record(cur, test_table_name, self.rec_3) #insert fixture data 4 our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(10, 9, 8) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1964, 7, 1) my_uuid = str(uuid.uuid1()) self.rec_4 = { 'our_varchar': "our_varchar 4", 'our_varchar_10': "varchar_10", 'our_text': "some text 2", 'our_text_2': "NOT SELECTED", 'our_integer': 44101, 'our_smallint': 2, 'our_bigint': 1000001, 'our_decimal': decimal.Decimal('9876543210.02'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': 1.1, 'our_real': 1.2, 'our_boolean': True, 'our_bit': '1', 'our_json': json.dumps({'nymn': 77}), 'our_jsonb': json.dumps({'burgers': 'good++'}), 'our_uuid': my_uuid, 'our_store': 'dances=>"floor",name=>"betty"', 'our_citext': 'maGICKal 2', 'our_cidr': '192.168.101.128/25', 'our_inet': '192.168.101.128/24', 'our_mac': '08:00:2b:01:02:04', } insert_record(cur, test_table_name, self.rec_4)
def setUp(self): db_utils.ensure_db('dev') self.maxDiff = None creds = {} missing_envs = [ x for x in [ os.getenv('TAP_POSTGRES_HOST'), os.getenv('TAP_POSTGRES_USER'), os.getenv('TAP_POSTGRES_PASSWORD'), os.getenv('TAP_POSTGRES_PORT'), os.getenv('TAP_POSTGRES_DBNAME') ] if x == None ] if len(missing_envs) != 0: #pylint: disable=line-too-long raise Exception( "set TAP_POSTGRES_HOST, TAP_POSTGRES_DBNAME, TAP_POSTGRES_USER, TAP_POSTGRES_PASSWORD, TAP_POSTGRES_PORT" ) with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: cur.execute(""" SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'stitch') """ ) old_slot = cur.fetchone()[0] with db_utils.get_test_connection('dev', True) as conn2: with conn2.cursor() as cur2: if old_slot: cur2.drop_replication_slot("stitch") cur2.create_replication_slot('stitch', output_plugin='wal2json') old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, test_table_name, cur))) cur = conn.cursor() cur.execute( """ SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """ ) if cur.fetchone()[0] is None: cur.execute(""" CREATE EXTENSION hstore; """) cur.execute( """ CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;""" ) cur.execute(""" DROP TYPE IF EXISTS ALIGNMENT CASCADE """) cur.execute( """ CREATE TYPE ALIGNMENT AS ENUM ('good', 'bad', 'ugly') """ ) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_bit_array BIT(1)[], our_boolean_array BOOLEAN[], our_cidr_array CIDR[], our_citext_array CITEXT[], our_date_array DATE[], our_decimal_array NUMERIC(12,2)[], our_double_array DOUBLE PRECISION[], our_enum_array ALIGNMENT[], our_float_array FLOAT[], our_hstore_array HSTORE[], our_inet_array INET[], our_int_array INTEGER[][], our_int8_array INT8[], our_json_array JSON[], our_jsonb_array JSONB[], our_mac_array MACADDR[], our_money_array MONEY[], our_real_array REAL[], our_smallint_array SMALLINT[], our_string_array VARCHAR[], our_text_array TEXT[], our_time_array TIME[], our_ts_tz_array TIMESTAMP WITH TIME ZONE[], our_uuid_array UUID[]) """.format( canonicalized_table_name(test_schema_name, test_table_name, cur)) cur.execute(create_table_sql)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db(test_db) self.maxDiff = None with db_utils.get_test_connection(test_db) as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: # db_utils.ensure_replication_slot(cur, test_db) canonicalized_table_name = db_utils.canonicalized_table_name( cur, test_schema_name, test_table_name) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_varchar VARCHAR, our_varchar_10 VARCHAR(10), our_text TEXT, our_text_2 TEXT, our_integer INTEGER, our_smallint SMALLINT, our_bigint BIGINT, our_decimal NUMERIC(12,2), "OUR TS" TIMESTAMP WITHOUT TIME ZONE, "OUR TS TZ" TIMESTAMP WITH TIME ZONE, "OUR TIME" TIME WITHOUT TIME ZONE, "OUR TIME TZ" TIME WITH TIME ZONE, "OUR DATE" DATE, our_double DOUBLE PRECISION, our_real REAL, our_boolean BOOLEAN, our_bit BIT(1), our_json JSON, our_jsonb JSONB, our_uuid UUID, our_store HSTORE, our_citext CITEXT, our_cidr cidr, our_inet inet, our_mac macaddr, our_alignment_enum ALIGNMENT, our_money money, invalid_bigserial BIGSERIAL, invalid_bit_varying BIT VARYING, invalid_box BOX, invalid_bytea BYTEA, invalid_circle CIRCLE, invalid_interval INTERVAL, invalid_line LINE, invalid_lseg LSEG, invalid_path PATH, invalid_pg_lsn PG_LSN, invalid_point POINT, invalid_polygon POLYGON, invalid_serial SERIAL, invalid_smallserial SMALLSERIAL, invalid_tsquery TSQUERY, invalid_tsvector TSVECTOR, invalid_txid_snapshot TXID_SNAPSHOT, invalid_xml XML) """.format(canonicalized_table_name) cur = db_utils.ensure_fresh_table(conn, cur, test_schema_name, test_table_name) cur.execute(create_table_sql) #insert fixture data 1 our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12, 11, 10) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.recs = [] for _ in range(500): our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(10, 9, 8) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1964, 7, 1) my_uuid = str(uuid.uuid1()) record = { 'our_varchar': "our_varchar 4", 'our_varchar_10': "varchar_10", 'our_text': "some text 2", 'our_text_2': "NOT SELECTED", 'our_integer': 44101, 'our_smallint': 2, 'our_bigint': 1000001, 'our_decimal': decimal.Decimal('9876543210.02'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': 1.1, 'our_real': 1.2, 'our_boolean': True, 'our_bit': '1', 'our_json': json.dumps({'nymn': 77}), 'our_jsonb': json.dumps({'burgers': 'good++'}), 'our_uuid': my_uuid, 'our_store': 'dances=>"floor",name=>"betty"', 'our_citext': 'maGICKal 2', 'our_cidr': '192.168.101.128/25', 'our_inet': '192.168.101.128/24', 'our_mac': '08:00:2b:01:02:04', } db_utils.insert_record(cur, test_table_name, record) self.recs.append(record) cur.execute("""ANALYZE {}""".format(canonicalized_table_name))
def setUp(self): db_utils.ensure_db() self.maxDiff = None creds = {} missing_envs = [ x for x in [ os.getenv('TAP_POSTGRES_HOST'), os.getenv('TAP_POSTGRES_USER'), os.getenv('TAP_POSTGRES_PASSWORD'), os.getenv('TAP_POSTGRES_DBNAME'), os.getenv('TAP_POSTGRES_PORT') ] if x == None ] if len(missing_envs) != 0: #pylint: disable=line-too-long raise Exception( "set TAP_POSTGRES_HOST, TAP_POSTGRES_DBNAME, TAP_POSTGRES_USER, TAP_POSTGRES_PASSWORD, TAP_POSTGRES_PORT" ) with db_utils.get_test_connection() as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: for table in [test_table_name_1, test_table_name_2]: old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s)""", [test_schema_name, table]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {} CASCADE".format( canonicalized_table_name(test_schema_name, table, cur))) cur.execute("""DROP VIEW IF EXISTS {} """.format( quote_ident(test_view, cur))) cur.execute("""CREATE TABLE {} (id SERIAL PRIMARY KEY, name VARCHAR, size VARCHAR) """.format( canonicalized_table_name(test_schema_name, test_table_name_1, cur))) cur.execute("""CREATE TABLE {} (fk_id bigint, age integer) """.format( canonicalized_table_name(test_schema_name, test_table_name_2, cur))) cur.execute("""CREATE VIEW {} AS (SELECT * FROM {} join {} on {}.id = {}.fk_id )""".format( quote_ident(test_view, cur), canonicalized_table_name(test_schema_name, test_table_name_1, cur), canonicalized_table_name(test_schema_name, test_table_name_2, cur), canonicalized_table_name(test_schema_name, test_table_name_1, cur), canonicalized_table_name(test_schema_name, test_table_name_2, cur))) self.rec_1 = {'name': 'fred', 'size': 'big'} insert_record(cur, test_table_name_1, self.rec_1) cur.execute("SELECT id FROM {}".format( canonicalized_table_name(test_schema_name, test_table_name_1, cur))) fk_id = cur.fetchone()[0] self.rec_2 = {'fk_id': fk_id, 'age': 99} insert_record(cur, test_table_name_2, self.rec_2)
def setUp(self): db_utils.ensure_db('dev') db_utils.ensure_db('postgres') self.maxDiff = None creds = {} missing_envs = [ x for x in [ os.getenv('TAP_POSTGRES_HOST'), os.getenv('TAP_POSTGRES_USER'), os.getenv('TAP_POSTGRES_PASSWORD'), os.getenv('TAP_POSTGRES_PORT'), os.getenv('TAP_POSTGRES_DBNAME') ] if x == None ] if len(missing_envs) != 0: #pylint: disable=line-too-long raise Exception( "set TAP_POSTGRES_HOST, TAP_POSTGRES_DBNAME, TAP_POSTGRES_USER, TAP_POSTGRES_PASSWORD, TAP_POSTGRES_PORT" ) with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: cur.execute(""" SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'stitch_dev') """ ) old_slot = cur.fetchone()[0] with db_utils.get_test_connection('dev', True) as conn2: with conn2.cursor() as cur2: if old_slot: cur2.drop_replication_slot("stitch_dev") cur2.create_replication_slot('stitch_dev', output_plugin='wal2json') old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name_cows]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, test_table_name_cows, cur))) #create dev_cows cur = conn.cursor() create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, cow_age integer, cow_name varchar) """.format( canonicalized_table_name(test_schema_name, test_table_name_cows, cur)) cur.execute(create_table_sql) #insert a cow self.cows_rec_1 = {'cow_name': "anne_cow", 'cow_age': 30} insert_record(cur, test_table_name_cows, self.cows_rec_1) with db_utils.get_test_connection('postgres') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: cur.execute(""" SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'stitch_postgres') """ ) old_slot = cur.fetchone()[0] with db_utils.get_test_connection('postgres', True) as conn2: with conn2.cursor() as cur2: if old_slot: cur2.drop_replication_slot("stitch_postgres") cur2.create_replication_slot('stitch_postgres', output_plugin='wal2json') old_table = cur.execute( """SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name_chickens]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format( canonicalized_table_name(test_schema_name, test_table_name_chickens, cur))) #create postgres_chickens create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, chicken_age integer, chicken_name varchar) """.format( canonicalized_table_name(test_schema_name, test_table_name_chickens, cur)) cur.execute(create_table_sql) #insert a chicken self.chickens_rec_1 = { 'chicken_name': "alfred_chicken", 'chicken_age': 4 } insert_record(cur, test_table_name_chickens, self.chickens_rec_1)
def setUp(self): db_utils.ensure_db("dev") self.maxDiff = None creds = {} missing_envs = [x for x in [os.getenv('TAP_POSTGRES_HOST'), os.getenv('TAP_POSTGRES_USER'), os.getenv('TAP_POSTGRES_PASSWORD'), os.getenv('TAP_POSTGRES_PORT'), os.getenv('TAP_POSTGRES_DBNAME')] if x == None] if len(missing_envs) != 0: #pylint: disable=line-too-long raise Exception("set TAP_POSTGRES_HOST, TAP_POSTGRES_DBNAME, TAP_POSTGRES_USER, TAP_POSTGRES_PASSWORD, TAP_POSTGRES_PORT") with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: old_table = cur.execute("""SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s);""", [test_schema_name, test_table_name]) old_table = cur.fetchone()[0] if old_table: cur.execute("DROP TABLE {}".format(canonicalized_table_name(test_schema_name, test_table_name, cur))) cur = conn.cursor() cur.execute(""" SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """) if cur.fetchone()[0] is None: cur.execute(""" CREATE EXTENSION hstore; """) cur.execute(""" CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;""") cur.execute(""" DROP TYPE IF EXISTS ALIGNMENT CASCADE """) cur.execute(""" CREATE TYPE ALIGNMENT AS ENUM ('good', 'bad', 'ugly') """) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_varchar VARCHAR, our_varchar_10 VARCHAR(10), our_text TEXT, our_integer INTEGER, our_smallint SMALLINT, our_bigint BIGINT, our_decimal NUMERIC({},{}), "OUR TS" TIMESTAMP WITHOUT TIME ZONE, "OUR TS TZ" TIMESTAMP WITH TIME ZONE, "OUR TIME" TIME WITHOUT TIME ZONE, "OUR TIME TZ" TIME WITH TIME ZONE, "OUR DATE" DATE, our_double DOUBLE PRECISION, our_real REAL, our_boolean BOOLEAN, our_bit BIT(1), our_json JSON, our_jsonb JSONB, our_uuid UUID, our_store HSTORE, our_citext CITEXT, our_inet inet, our_cidr cidr, our_mac macaddr, our_alignment_enum ALIGNMENT, our_money money) """.format(canonicalized_table_name(test_schema_name, test_table_name, cur), NUMERIC_PRECISION, NUMERIC_SCALE) cur.execute(create_table_sql) #insert fixture data 1 our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12,11,10) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.rec_1 = {'our_varchar' : "our_varchar", 'our_varchar_10' : "varchar_10", 'our_text' : "some text", 'our_integer' : 44100, 'our_smallint' : 1, 'our_bigint' : 1000000, 'our_decimal' : decimal.Decimal('.01'), quote_ident('OUR TS', cur) : our_ts, quote_ident('OUR TS TZ', cur) : our_ts_tz, quote_ident('OUR TIME', cur) : our_time, quote_ident('OUR TIME TZ', cur) : our_time_tz, quote_ident('OUR DATE', cur) : our_date, 'our_double' : decimal.Decimal('1.1'), 'our_real' : 1.2, 'our_boolean' : True, 'our_bit' : '0', 'our_json' : json.dumps({'secret' : 55}), 'our_jsonb' : json.dumps({'burgers' : 'good'}), 'our_uuid' : my_uuid, 'our_store' : 'size=>"small",name=>"betty"', 'our_citext': 'maGICKal 4', 'our_cidr' : '192.168.100.128/25', 'our_inet': '192.168.100.128/24', 'our_mac' : '08:00:2b:01:02:03', 'our_alignment_enum': 'good', 'our_money': '100.1122', } insert_record(cur, test_table_name, self.rec_1) #insert fixture data 2 our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184) nyc_tz = pytz.timezone('America/New_York') our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(10,9,8) our_time_tz = our_time.isoformat() + "-04:00" our_date = datetime.date(1964, 7, 1) my_uuid = str(uuid.uuid1()) self.rec_2 = {'our_varchar' : "our_varchar 2", 'our_varchar_10' : "varchar_10", 'our_text' : "some text 2", 'our_integer' : 44101, 'our_smallint' : 2, 'our_bigint' : 1000001, 'our_decimal' : decimal.Decimal('.02'), quote_ident('OUR TS', cur) : our_ts, quote_ident('OUR TS TZ', cur) : our_ts_tz, quote_ident('OUR TIME', cur) : our_time, quote_ident('OUR TIME TZ', cur) : our_time_tz, quote_ident('OUR DATE', cur) : our_date, 'our_double' : decimal.Decimal('1.1'), 'our_real' : decimal.Decimal('1.2'), 'our_boolean' : True, 'our_bit' : '1', 'our_json' : json.dumps(["nymn 77"]), 'our_jsonb' : json.dumps({'burgers' : 'good++'}), 'our_uuid' : my_uuid, 'our_store' : 'dances=>"floor",name=>"betty"', 'our_citext': 'maGICKal 2', 'our_cidr' : '192.168.101.128/25', 'our_inet': '192.168.101.128/24', 'our_mac' : '08:00:2b:01:02:04', 'our_money': None } insert_record(cur, test_table_name, self.rec_2) self.rec_3 = {'our_decimal' : decimal.Decimal('NaN'), 'our_double' : float('nan'), 'our_real' : float('-inf') } insert_record(cur, test_table_name, self.rec_3)
def setUp(self): db_utils.ensure_environment_variables_set() db_utils.ensure_db('dev') self.maxDiff = None with db_utils.get_test_connection('dev') as conn: conn.autocommit = True with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: cur = db_utils.ensure_fresh_table(conn, cur, test_schema_name, test_table_name) create_table_sql = """ CREATE TABLE {} (id SERIAL PRIMARY KEY, our_varchar VARCHAR, our_varchar_10 VARCHAR(10), our_text TEXT, our_integer INTEGER, our_smallint SMALLINT, our_bigint BIGINT, our_decimal NUMERIC(12,2), "OUR TS" TIMESTAMP WITHOUT TIME ZONE, "OUR TS TZ" TIMESTAMP WITH TIME ZONE, "OUR TIME" TIME WITHOUT TIME ZONE, "OUR TIME TZ" TIME WITH TIME ZONE, "OUR DATE" DATE, our_double DOUBLE PRECISION, our_real REAL, our_boolean BOOLEAN, our_bit BIT(1), our_json JSON, our_jsonb JSONB, our_uuid UUID, our_store HSTORE, our_citext CITEXT, our_inet inet, our_cidr cidr, our_mac macaddr, our_money money) """.format( db_utils.canonicalized_table_name(cur, test_schema_name, test_table_name)) cur.execute(create_table_sql) # insert fixture data and track expected records self.inserted_records = [] self.expected_records = [] nyc_tz = pytz.timezone('America/New_York') our_time_offset = "-04:00" # record 1 our_ts = datetime.datetime(1977, 3, 3, 3, 3, 3, 733184) our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(10, 9, 8) our_time_tz = our_time.isoformat() + our_time_offset our_date = datetime.date(1964, 7, 1) my_uuid = str(uuid.uuid1()) self.inserted_records.append({ 'our_varchar': "our_varchar 2", 'our_varchar_10': "varchar_10", 'our_text': "some text 2", 'our_integer': 44101, 'our_smallint': 2, 'our_bigint': 1000001, 'our_decimal': decimal.Decimal('9876543210.02'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': decimal.Decimal('1.1'), 'our_real': decimal.Decimal('1.2'), 'our_boolean': True, 'our_bit': '1', 'our_json': json.dumps({'nymn': 77}), 'our_jsonb': json.dumps({'burgers': 'good++'}), 'our_uuid': my_uuid, 'our_citext': 'cyclops 2', 'our_store': 'dances=>"floor",name=>"betty"', 'our_cidr': '192.168.101.128/25', 'our_inet': '192.168.101.128/24', 'our_mac': '08:00:2b:01:02:04', }) self.expected_records.append({ 'our_decimal': decimal.Decimal('9876543210.02'), 'OUR TIME': str(our_time), 'our_text': 'some text 2', 'our_bit': True, 'our_integer': 44101, 'our_double': decimal.Decimal('1.1'), 'id': 1, 'our_json': '{"nymn": 77}', 'our_boolean': True, 'our_jsonb': '{"burgers": "good++"}', 'our_bigint': 1000001, 'OUR TIME TZ': str(our_time_tz), 'our_store': { "name": "betty", "dances": "floor" }, 'OUR TS TZ': self.expected_ts_tz(our_ts_tz), 'our_smallint': 2, 'OUR DATE': '1964-07-01T00:00:00+00:00', 'our_varchar': 'our_varchar 2', 'OUR TS': self.expected_ts(our_ts), 'our_uuid': self.inserted_records[0]['our_uuid'], 'our_real': decimal.Decimal('1.2'), 'our_varchar_10': 'varchar_10', 'our_citext': self.inserted_records[0]['our_citext'], 'our_inet': self.inserted_records[0]['our_inet'], 'our_cidr': self.inserted_records[0]['our_cidr'], 'our_mac': self.inserted_records[0]['our_mac'], 'our_money': None }) # record 2 our_ts = datetime.datetime(1987, 2, 2, 2, 2, 2, 722184) our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12, 11, 10) our_time_tz = our_time.isoformat() + our_time_offset our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.inserted_records.append({ 'our_varchar': "our_varchar", 'our_varchar_10': "varchar_10", 'our_text': "some text", 'our_integer': 44100, 'our_smallint': 1, 'our_bigint': 1000000, 'our_decimal': decimal.Decimal('1234567890.01'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': decimal.Decimal('1.1'), 'our_real': decimal.Decimal('1.2'), 'our_boolean': True, 'our_bit': '0', 'our_json': json.dumps({'secret': 55}), 'our_jsonb': json.dumps(6777777), 'our_uuid': my_uuid, 'our_store': 'size=>"small",name=>"betty"', 'our_citext': 'cyclops 1', 'our_cidr': '192.168.100.128/25', 'our_inet': '192.168.100.128/24', 'our_mac': '08:00:2b:01:02:03', 'our_money': '$1,445.5678' }) self.expected_records.append({ 'our_decimal': decimal.Decimal('1234567890.01'), 'our_text': 'some text', 'our_bit': False, 'our_integer': 44100, 'our_double': decimal.Decimal('1.1'), 'id': 2, 'our_json': '{"secret": 55}', 'our_boolean': True, 'our_jsonb': self.inserted_records[1]['our_jsonb'], 'our_bigint': 1000000, 'OUR TS': self.expected_ts(our_ts), 'OUR TS TZ': self.expected_ts_tz(our_ts_tz), 'OUR TIME': str(our_time), 'OUR TIME TZ': str(our_time_tz), 'our_store': { "name": "betty", "size": "small" }, 'our_smallint': 1, 'OUR DATE': '1998-03-04T00:00:00+00:00', 'our_varchar': 'our_varchar', 'our_uuid': self.inserted_records[1]['our_uuid'], 'our_real': decimal.Decimal('1.2'), 'our_varchar_10': 'varchar_10', 'our_citext': self.inserted_records[1]['our_citext'], 'our_inet': self.inserted_records[1]['our_inet'], 'our_cidr': self.inserted_records[1]['our_cidr'], 'our_mac': self.inserted_records[1]['our_mac'], 'our_money': '$1,445.57' }) # record 3 our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184) our_ts_tz = nyc_tz.localize(our_ts) our_time = datetime.time(12, 11, 10) our_time_tz = our_time.isoformat() + our_time_offset our_date = datetime.date(1998, 3, 4) my_uuid = str(uuid.uuid1()) self.inserted_records.append({ 'our_varchar': "our_varchar", 'our_varchar_10': "varchar_10", 'our_text': "some text", 'our_integer': 44100, 'our_smallint': 1, 'our_bigint': 1000000, 'our_decimal': decimal.Decimal('1234567890.01'), quote_ident('OUR TS', cur): our_ts, quote_ident('OUR TS TZ', cur): our_ts_tz, quote_ident('OUR TIME', cur): our_time, quote_ident('OUR TIME TZ', cur): our_time_tz, quote_ident('OUR DATE', cur): our_date, 'our_double': '1.1', 'our_real': decimal.Decimal('1.2'), 'our_boolean': True, 'our_bit': '0', 'our_json': json.dumps({'secret': 55}), 'our_jsonb': json.dumps(6777777), 'our_uuid': my_uuid, 'our_store': 'size=>"small",name=>"betty"', 'our_citext': 'cyclops 1', 'our_cidr': '192.168.100.128/25', 'our_inet': '192.168.100.128/24', 'our_mac': '08:00:2b:01:02:03', 'our_money': '$1,445.5678' }) self.expected_records.append({ 'our_decimal': decimal.Decimal('1234567890.01'), 'our_text': 'some text', 'our_bit': False, 'our_integer': 44100, 'our_double': decimal.Decimal('1.1'), 'id': 3, 'our_json': '{"secret": 55}', 'our_boolean': True, 'our_jsonb': self.inserted_records[1]['our_jsonb'], 'our_bigint': 1000000, 'OUR TS': self.expected_ts(our_ts), 'OUR TS TZ': self.expected_ts_tz(our_ts_tz), 'OUR TIME': str(our_time), 'OUR TIME TZ': str(our_time_tz), 'our_store': { "name": "betty", "size": "small" }, 'our_smallint': 1, 'OUR DATE': '1998-03-04T00:00:00+00:00', 'our_varchar': 'our_varchar', 'our_uuid': self.inserted_records[2]['our_uuid'], 'our_real': decimal.Decimal('1.2'), 'our_varchar_10': 'varchar_10', 'our_citext': self.inserted_records[2]['our_citext'], 'our_inet': self.inserted_records[2]['our_inet'], 'our_cidr': self.inserted_records[2]['our_cidr'], 'our_mac': self.inserted_records[2]['our_mac'], 'our_money': '$1,445.57' }) for rec in self.inserted_records: db_utils.insert_record(cur, test_table_name, rec)