def test_insert_sql(self): expected_sql = "INSERT INTO basic_table (id, date_field) " \ "VALUES ('12345', '2010-01-01');" row_data = {'id' : '12345', 'date_field' : '2010-01-01'} # Force dict field order so actual SQL is consistent. row_data = collections.OrderedDict( sorted(row_data.items(), reverse=True)) actual_sql = sqlgen.insert_sql(self.basic_table, row_data) self.assertEqual(expected_sql, actual_sql)
def import_row_to_table(c, table, row): fields_to_import = [col.csv_name for col in table.columns.values()] row_data = {} for csv_field, value in sorted(row.items()): if csv_field in fields_to_import: col = csv_util.get_destination_col(table, csv_field) if is_foreign_key(col, table): value = lookup_foreign_key(c, table, col, csv_field, value) else: value = sqlgen.pre_process_value(col, value) row_data[col.name] = value if table.unique_rows == True and row_exists(c, table, row_data): return else: sql = sqlgen.insert_sql(table, row_data) c.execute(sql)