def select_by_columns(class_, cursor, where_column_value_pairs, columns_list=None, order_by=None, group_by=None, limit=None, **kwargs): if class_.columns_list is None: raise dbexceptions.ProgrammingError( "DB class should define columns_list") if columns_list is None: columns_list = class_.columns_list query, bind_vars = sql_builder.select_by_columns_query( columns_list, class_.table_name, where_column_value_pairs, order_by=order_by, group_by=group_by, limit=limit, **kwargs) rowcount = cursor.execute(query, bind_vars) rows = cursor.fetchall() return [sql_builder.DBRow(columns_list, row) for row in rows]
def select_by_ids(class_, cursor, where_column_value_pairs, columns_list=None, order_by=None, group_by=None, limit=None, **kwargs): """This method is used to perform in-clause queries. Such queries can cause vtgate to scatter over multiple shards. This uses execute_entity_ids method of vtgate cursor and the entity column and the associated entity_keyspace_id_map is computed based on the routing used - sharding_key or entity_id_map. """ if class_.columns_list is None: raise dbexceptions.ProgrammingError( "DB class should define columns_list") if columns_list is None: columns_list = class_.columns_list query, bind_vars = sql_builder.select_by_columns_query( columns_list, class_.table_name, where_column_value_pairs, order_by=order_by, group_by=group_by, limit=limit, **kwargs) entity_col_name = None entity_id_keyspace_id_map = {} if cursor.routing.sharding_key is not None: # If the in-clause is based on sharding key entity_col_name = class_.sharding_key_column_name if db_object._is_iterable_container(cursor.routing.sharding_key): for sk in list(cursor.routing.sharding_key): entity_id_keyspace_id_map[sk] = pack_keyspace_id( class_.sharding_key_to_keyspace_id(sk)) else: sk = cursor.routing.sharding_key entity_id_keyspace_id_map[sk] = pack_keyspace_id( class_.sharding_key_to_keyspace_id(sk)) elif cursor.routing.entity_id_sharding_key_map is not None: # If the in-clause is based on entity column entity_col_name = cursor.routing.entity_column_name for en_id, sk in cursor.routing.entity_id_sharding_key_map.iteritems( ): entity_id_keyspace_id_map[en_id] = pack_keyspace_id( class_.sharding_key_to_keyspace_id(sk)) else: dbexceptions.ProgrammingError("Invalid routing method used.") # cursor.routing.entity_column_name is set while creating shard routing. rowcount = cursor.execute_entity_ids(query, bind_vars, entity_id_keyspace_id_map, entity_col_name) rows = cursor.fetchall() return [sql_builder.DBRow(columns_list, row) for row in rows]
def execute_batch_read(cursor, query_list, bind_vars_list): """Method for executing select queries in batch. Args: cursor: original cursor - that is converted to read-only BatchVTGateCursor. query_list: query_list. bind_vars_list: bind variables list. Returns: Result of the form [[q1row1, q1row2,...], [q2row1, ...],..] Raises: dbexceptions.ProgrammingError when dmls are issued to read batch cursor. """ if not isinstance(cursor, vtgate_cursor.VTGateCursor): raise dbexceptions.ProgrammingError( "cursor is not of the type VTGateCursor.") batch_cursor = create_batch_cursor_from_cursor(cursor) for q, bv in zip(query_list, bind_vars_list): if is_dml(q): raise dbexceptions.ProgrammingError("Dml %s for read batch cursor." % q) batch_cursor.execute(q, bv) batch_cursor.flush() rowsets = batch_cursor.rowsets result = [] # rowset is of the type [(results, rowcount, lastrowid, fields),..] for rowset in rowsets: rowset_results = rowset[0] fields = [f[0] for f in rowset[3]] rows = [] for row in rowset_results: rows.append(sql_builder.DBRow(fields, row)) result.append(rows) return result
def test_init_override(self): db_row = sql_builder.DBRow( ['col_a', 'col_b'], ['val_1', 'val_2'], col_b=22, col_c=33) self.assertEqual(db_row.col_a, 'val_1') self.assertEqual(db_row.col_b, 22) self.assertEqual(db_row.col_c, 33) self.assertEqual( repr(db_row), "{ 'col_a': 'val_1', 'col_b': 22, 'col_c': 33}")
def select_by_columns(class_, cursor, where_column_value_pairs, columns_list=None, order_by=None, group_by=None, limit=None): if columns_list is None: columns_list = class_.columns_list query, bind_vars = class_.create_select_query(where_column_value_pairs, columns_list=columns_list, order_by=order_by, group_by=group_by, limit=limit) rowcount = cursor.execute(query, bind_vars) rows = cursor.fetchall() return [sql_builder.DBRow(columns_list, row) for row in rows]
def _stream_fetch(class_, cursor, query, bind_vars, fetch_size=100): stream_cursor = create_stream_cursor_from_cursor(cursor) stream_cursor.execute(query, bind_vars) while True: rows = stream_cursor.fetchmany(size=fetch_size) # NOTE: fetchmany returns an empty list when there are no more items. # But an empty generator is still "true", so we have to count if we # actually returned anything. i = 0 for r in rows: i += 1 yield sql_builder.DBRow(class_.columns_list, r) if i == 0: break stream_cursor.close()
def test_init_no_override(self): db_row = sql_builder.DBRow(['col_a', 'col_b'], ['val_1', 'val_2']) self.assertEqual(db_row.col_a, 'val_1') self.assertEqual(db_row.col_b, 'val_2') self.assertEqual(repr(db_row), "{ 'col_a': 'val_1', 'col_b': 'val_2'}")