def test_create_indexes(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables # table 1 sqltable1 = tables[collection_name] indexes1 = generate_create_index_statement(sqltable1, '', 'PREFIX_', INDEX_ID_IDXS) print indexes1 expect = 'CREATE INDEX "ia_PREFIX_a_inserts" ON "PREFIX_a_inserts" ("id_oid");' assert( indexes1 == expect ) # table 2 sqltable2 = tables[collection_name[:-1]+'_comments'] indexes2 = generate_create_index_statement(sqltable2, '', 'PREFIX_', INDEX_ID_IDXS) print indexes2 expect = 'CREATE INDEX "ia_PREFIX_a_insert_comments" ON "PREFIX_a_insert_comments" ("a_inserts_id_oid", "idx");' assert( indexes2 == expect ) # table 3 sqltable3 = tables[collection_name[:-1]+'_comment_items'] indexes3 = generate_create_index_statement(sqltable3, '', 'PREFIX_', INDEX_ID_IDXS) print indexes3 expect = 'CREATE INDEX "ia_PREFIX_a_insert_comment_items" ON "PREFIX_a_insert_comment_items" ("a_inserts_comments_idx", "a_inserts_id_oid", "idx");' assert( indexes3 == expect ) # table 4 sqltable4 = tables[collection_name[:-1]+'_comment_slugs'] indexes4 = generate_create_index_statement(sqltable4, '', 'PREFIX_', INDEX_ID_IDXS) print indexes4 expect = 'CREATE INDEX "ia_PREFIX_a_insert_comment_slugs" ON "PREFIX_a_insert_comment_slugs" ("a_inserts_comments_idx", "a_inserts_id_oid", "idx");' assert( indexes4 == expect )
def test_create_indexes(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables # table 1 sqltable1 = tables[collection_name] indexes1 = generate_create_index_statement(sqltable1, '', 'PREFIX_', INDEX_ID_IDXS) print indexes1 expect = 'CREATE INDEX "ia_PREFIX_a_inserts" ON "PREFIX_a_inserts" ("id_oid");' assert (indexes1 == expect) # table 2 sqltable2 = tables[collection_name[:-1] + '_comments'] indexes2 = generate_create_index_statement(sqltable2, '', 'PREFIX_', INDEX_ID_IDXS) print indexes2 expect = 'CREATE INDEX "ia_PREFIX_a_insert_comments" ON "PREFIX_a_insert_comments" ("a_inserts_id_oid", "idx");' assert (indexes2 == expect) # table 3 sqltable3 = tables[collection_name[:-1] + '_comment_items'] indexes3 = generate_create_index_statement(sqltable3, '', 'PREFIX_', INDEX_ID_IDXS) print indexes3 expect = 'CREATE INDEX "ia_PREFIX_a_insert_comment_items" ON "PREFIX_a_insert_comment_items" ("a_inserts_comments_idx", "a_inserts_id_oid", "idx");' assert (indexes3 == expect) # table 4 sqltable4 = tables[collection_name[:-1] + '_comment_slugs'] indexes4 = generate_create_index_statement(sqltable4, '', 'PREFIX_', INDEX_ID_IDXS) print indexes4 expect = 'CREATE INDEX "ia_PREFIX_a_insert_comment_slugs" ON "PREFIX_a_insert_comment_slugs" ("a_inserts_comments_idx", "a_inserts_id_oid", "idx");' assert (indexes4 == expect)
def test_insert1(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables assert(tables.keys() == ['a_insert_comment_items', 'a_inserts', 'a_insert_comments', 'a_insert_comment_slugs']) sqltable1 = tables[collection_name] create1 = generate_create_table_statement(sqltable1, "", "9999_12_31_") query1 = 'CREATE TABLE IF NOT EXISTS "9999_12_31_a_inserts" ("body" TEXT, "created_at" TIMESTAMP WITH TIME ZONE, "id_bsontype" INTEGER, "id_oid" TEXT, "title" TEXT, "updated_at" TIMESTAMP WITH TIME ZONE, "user_id" TEXT);' assert(query1==create1) #test another table sqltable2 = tables[collection_name[:-1]+'_comments'] create2 = generate_create_table_statement(sqltable2, "", "") query2 = 'CREATE TABLE IF NOT EXISTS "a_insert_comments" ("a_inserts_id_oid" TEXT, "body" TEXT, "created_at" TIMESTAMP WITH TIME ZONE, "id_bsontype" INTEGER, "id_oid" TEXT, "updated_at" TIMESTAMP WITH TIME ZONE, "idx" BIGINT);' assert(query2==create2) #test another table sqltable3 = tables[collection_name[:-1]+'_comment_items'] create3 = generate_create_table_statement(sqltable3, "", "") query3 = 'CREATE TABLE IF NOT EXISTS "a_insert_comment_items" ("a_inserts_id_oid" TEXT, "data" TEXT, "a_inserts_comments_idx" BIGINT, "idx" BIGINT);' assert(query3==create3) #test another table sqltable4 = tables[collection_name[:-1]+'_comment_slugs'] create4 = generate_create_table_statement(sqltable4, "", "") query4 = 'CREATE TABLE IF NOT EXISTS "a_insert_comment_slugs" ("a_inserts_id_oid" TEXT, "slugs" INTEGER, "a_inserts_comments_idx" BIGINT, "idx" BIGINT);' assert(query4==create4)
def test_insert1(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables assert (tables.keys() == [ 'a_insert_comment_items', 'a_inserts', 'a_insert_comments', 'a_insert_comment_slugs' ]) sqltable1 = tables[collection_name] create1 = generate_create_table_statement(sqltable1, "", "9999_12_31_") query1 = 'CREATE TABLE IF NOT EXISTS "9999_12_31_a_inserts" ("body" TEXT, "created_at" TIMESTAMP WITH TIME ZONE, "id_bsontype" INTEGER, "id_oid" TEXT, "title" TEXT, "updated_at" TIMESTAMP WITH TIME ZONE, "user_id" TEXT);' assert (query1 == create1) #test another table sqltable2 = tables[collection_name[:-1] + '_comments'] create2 = generate_create_table_statement(sqltable2, "", "") query2 = 'CREATE TABLE IF NOT EXISTS "a_insert_comments" ("a_inserts_id_oid" TEXT, "body" TEXT, "created_at" TIMESTAMP WITH TIME ZONE, "id_bsontype" INTEGER, "id_oid" TEXT, "updated_at" TIMESTAMP WITH TIME ZONE, "idx" BIGINT);' assert (query2 == create2) #test another table sqltable3 = tables[collection_name[:-1] + '_comment_items'] create3 = generate_create_table_statement(sqltable3, "", "") query3 = 'CREATE TABLE IF NOT EXISTS "a_insert_comment_items" ("a_inserts_id_oid" TEXT, "data" TEXT, "a_inserts_comments_idx" BIGINT, "idx" BIGINT);' assert (query3 == create3) #test another table sqltable4 = tables[collection_name[:-1] + '_comment_slugs'] create4 = generate_create_table_statement(sqltable4, "", "") query4 = 'CREATE TABLE IF NOT EXISTS "a_insert_comment_slugs" ("a_inserts_id_oid" TEXT, "slugs" INTEGER, "a_inserts_comments_idx" BIGINT, "idx" BIGINT);' assert (query4 == create4)
def test_insert1(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables assert (tables.keys() == [ 'a_insert_comment_items', 'a_inserts', 'a_insert_comments', 'a_insert_comment_slugs' ]) sqltable1 = tables[collection_name] inserts1 = generate_insert_queries(sqltable1, "schema_name", "prefix_") tz = inserts1[1][0][1].tzinfo expect(inserts1, \ 'INSERT INTO schema_name."prefix_a_inserts" \ ("body", "created_at", "id_bsontype", "id_oid", "title", "updated_at", "user_id") \ VALUES(%s, %s, %s, %s, %s, %s, %s);' , [(u'body3"\tbody2\nbody1', d('2016-02-08T19:45:32.501Z', tz), 7, '56b8f05cf9fcee1b00000010', u'title3\uff08:', d('2016-02-08T19:45:32.501Z', tz), u'56b8d7caf9fcee1b00000001')]) sqltable2 = tables[collection_name[:-1] + '_comments'] initial_indexes = {'a_inserts': 50, 'a_inserts_comments': 100} expect( generate_insert_queries(sqltable2, "", "", initial_indexes), 'INSERT INTO "a_insert_comments" \ ("a_inserts_id_oid", "body", "created_at", "id_bsontype", "id_oid", "updated_at", "idx") \ VALUES(%s, %s, %s, %s, %s, %s, %s);', [('56b8f05cf9fcee1b00000010', None, d('2016-02-08T19:45:32.501Z', tz), 7, '56b8f05cf9fcee1b00000110', d('2016-02-08T19:45:32.501Z', tz), initial_indexes['a_inserts_comments'] + 1), ('56b8f05cf9fcee1b00000010', u'body2', d('2016-02-08T19:45:33.501Z', tz), 7, '56b8f05cf9fcee1b00000011', d('2016-02-08T19:45:33.501Z', tz), initial_indexes['a_inserts_comments'] + 2)]) sqltable3 = tables[collection_name[:-1] + '_comment_items'] expect( generate_insert_queries(sqltable3, "", ""), 'INSERT INTO "a_insert_comment_items" \ ("a_inserts_id_oid", "data", "a_inserts_comments_idx", "idx") \ VALUES(%s, %s, %s, %s);', [('56b8f05cf9fcee1b00000010', u'1', 1, 1), ('56b8f05cf9fcee1b00000010', u'2', 2, 1)]) sqltable4 = tables[collection_name[:-1] + '_comment_slugs'] expect( generate_insert_queries(sqltable4, "", ""), 'INSERT INTO "a_insert_comment_slugs" \ ("a_inserts_id_oid", "slugs", "a_inserts_comments_idx", "idx") \ VALUES(%s, %s, %s, %s);', [('56b8f05cf9fcee1b00000010', 22, 1, 1)])
def test_all(): connstr = os.environ['TEST_PSQLCONN'] dbreq = PsqlRequests(psycopg2.connect(connstr)) tables = collection_tables("a_inserts").tables for table in tables: create_table = generate_create_table_statement(tables[table], "", "") print create_table dbreq.cursor.execute(create_table) indexes = dbreq.get_table_max_indexes(tables[table], "") inserts = generate_insert_queries(tables[table], "", "", initial_indexes = indexes) for query in inserts[1]: dbreq.cursor.execute(inserts[0], query) dbreq.cursor.execute('COMMIT')
def test_csv_error(): table_name = collection_name = 'a_inserts' tables = collection_tables(collection_name).tables csvs = {} CsvStruct = collections.namedtuple('CsvStruct', ['output', 'writer']) table = tables[table_name] output = BytesIO() csvs[table_name] = CsvStruct(output = output, writer=CsvWriter(output, False)) rows = table_rows_list(table, NO_ENCODE_NO_ESCAPE, NULLVAL) ok = False try: csvs[table_name].writer.write_csv(rows) except UnicodeEncodeError: ok = True assert(ok == True)
def test_csv_error(): table_name = collection_name = 'a_inserts' tables = collection_tables(collection_name).tables csvs = {} CsvStruct = collections.namedtuple('CsvStruct', ['output', 'writer']) table = tables[table_name] output = BytesIO() csvs[table_name] = CsvStruct(output=output, writer=CsvWriter(output, False)) rows = table_rows_list(table, NO_ENCODE_NO_ESCAPE, NULLVAL) ok = False try: csvs[table_name].writer.write_csv(rows) except UnicodeEncodeError: ok = True assert (ok == True)
def test_csv1(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables csvs = {} CsvStruct = collections.namedtuple('CsvStruct', ['output', 'writer']) for table_name, table in tables.iteritems(): output = BytesIO() if table_name not in csvs.keys(): csvs[table_name] = CsvStruct(output = output, writer=CsvWriter(output, False)) rows = table_rows_list(table, ENCODE_ESCAPE, NULLVAL) csvs[table_name].writer.write_csv(rows) print "output", output.getvalue() csvs[table_name].output.seek(0) table1_name = collection_name tz = tables[table1_name].sql_columns['created_at'].values[0].tzinfo table1_data_row_0 = row_by_idx(tables[table1_name], 0) print "table1_data_row_0", table1_data_row_0 csv_reader1 = CsvReader(csvs[table1_name].output, NULLVAL) table1_csv_row_0 = csv_reader1.read_record() no_more_records = csv_reader1.read_record() csv_reader1.close() #only one record is expected in table: a_inserts print "table1_csv_row_0", table1_csv_row_0 print "null record", no_more_records assert(no_more_records == None) assert(table1_data_row_0[0] == table1_csv_row_0[0]) assert(table1_data_row_0[6] == table1_csv_row_0[6]) assert(len(table1_data_row_0) == len(table1_csv_row_0)) table2_name = collection_name[:-1]+'_comments' table2_data_row_0 = row_by_idx(tables[table2_name], 0) table2_data = csvs[table2_name].output.getvalue() print "table2_data_row_0", table2_data_row_0 csv_reader2 = CsvReader(csvs[table2_name].output, NULLVAL) table2_csv_row_0 = csv_reader2.read_record() print "table2_csv_row_0", table2_csv_row_0 # Null is expected. assert(table2_csv_row_0[1] == None) assert(table2_data_row_0[1] == table2_csv_row_0[1]) assert(table2_csv_row_0[4] == '56b8f05cf9fcee1b00000110') assert(len(table2_data_row_0) == len(table2_csv_row_0)) for table_name in tables: csvs[table_name].writer.close()
def test_csv1(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables csvs = {} CsvStruct = collections.namedtuple('CsvStruct', ['output', 'writer']) for table_name, table in tables.iteritems(): output = BytesIO() if table_name not in csvs.keys(): csvs[table_name] = CsvStruct(output=output, writer=CsvWriter(output, False)) rows = table_rows_list(table, ENCODE_ESCAPE, NULLVAL) csvs[table_name].writer.write_csv(rows) print "output", output.getvalue() csvs[table_name].output.seek(0) table1_name = collection_name tz = tables[table1_name].sql_columns['created_at'].values[0].tzinfo table1_data_row_0 = row_by_idx(tables[table1_name], 0) print "table1_data_row_0", table1_data_row_0 csv_reader1 = CsvReader(csvs[table1_name].output, NULLVAL) table1_csv_row_0 = csv_reader1.read_record() no_more_records = csv_reader1.read_record() csv_reader1.close() #only one record is expected in table: a_inserts print "table1_csv_row_0", table1_csv_row_0 print "null record", no_more_records assert (no_more_records == None) assert (table1_data_row_0[0] == table1_csv_row_0[0]) assert (table1_data_row_0[6] == table1_csv_row_0[6]) assert (len(table1_data_row_0) == len(table1_csv_row_0)) table2_name = collection_name[:-1] + '_comments' table2_data_row_0 = row_by_idx(tables[table2_name], 0) table2_data = csvs[table2_name].output.getvalue() print "table2_data_row_0", table2_data_row_0 csv_reader2 = CsvReader(csvs[table2_name].output, NULLVAL) table2_csv_row_0 = csv_reader2.read_record() print "table2_csv_row_0", table2_csv_row_0 # Null is expected. assert (table2_csv_row_0[1] == None) assert (table2_data_row_0[1] == table2_csv_row_0[1]) assert (table2_csv_row_0[4] == '56b8f05cf9fcee1b00000110') assert (len(table2_data_row_0) == len(table2_csv_row_0)) for table_name in tables: csvs[table_name].writer.close()
def test_insert1(): collection_name = 'a_inserts' tables = collection_tables(collection_name).tables assert(tables.keys() == ['a_insert_comment_items', 'a_inserts', 'a_insert_comments', 'a_insert_comment_slugs']) sqltable1 = tables[collection_name] inserts1 = generate_insert_queries(sqltable1, "schema_name", "prefix_") tz = inserts1[1][0][1].tzinfo expect(inserts1, \ 'INSERT INTO schema_name."prefix_a_inserts" \ ("body", "created_at", "id_bsontype", "id_oid", "title", "updated_at", "user_id") \ VALUES(%s, %s, %s, %s, %s, %s, %s);', [(u'body3"\tbody2\nbody1', d('2016-02-08T19:45:32.501Z', tz), 7, '56b8f05cf9fcee1b00000010', u'title3\uff08:', d('2016-02-08T19:45:32.501Z', tz), u'56b8d7caf9fcee1b00000001')]) sqltable2 = tables[collection_name[:-1]+'_comments'] initial_indexes = { 'a_inserts': 50, 'a_inserts_comments': 100} expect(generate_insert_queries(sqltable2, "", "", initial_indexes), 'INSERT INTO "a_insert_comments" \ ("a_inserts_id_oid", "body", "created_at", "id_bsontype", "id_oid", "updated_at", "idx") \ VALUES(%s, %s, %s, %s, %s, %s, %s);', [ ('56b8f05cf9fcee1b00000010', None, d('2016-02-08T19:45:32.501Z',tz), 7, '56b8f05cf9fcee1b00000110', d('2016-02-08T19:45:32.501Z',tz), initial_indexes['a_inserts_comments']+1), ('56b8f05cf9fcee1b00000010', u'body2', d('2016-02-08T19:45:33.501Z',tz), 7, '56b8f05cf9fcee1b00000011', d('2016-02-08T19:45:33.501Z',tz), initial_indexes['a_inserts_comments']+2) ]) sqltable3 = tables[collection_name[:-1]+'_comment_items'] expect(generate_insert_queries(sqltable3, "", ""), 'INSERT INTO "a_insert_comment_items" \ ("a_inserts_id_oid", "data", "a_inserts_comments_idx", "idx") \ VALUES(%s, %s, %s, %s);', [ ('56b8f05cf9fcee1b00000010', u'1', 1, 1), ('56b8f05cf9fcee1b00000010', u'2', 2, 1)]) sqltable4 = tables[collection_name[:-1]+'_comment_slugs'] expect(generate_insert_queries(sqltable4, "", ""), 'INSERT INTO "a_insert_comment_slugs" \ ("a_inserts_id_oid", "slugs", "a_inserts_comments_idx", "idx") \ VALUES(%s, %s, %s, %s);', [('56b8f05cf9fcee1b00000010', 22, 1, 1)])