def populate_data(count: int, db_cursor: pyodbc.Cursor): ''' Generate user data. ''' fake = Faker() row = lambda n: (n + 1, fake.format('name'), fake.format('city')) for i in range(count): db_cursor.execute(SQL_INSERT_DATA, row(i))
def add_dart_plate_if_doesnt_exist(cursor: pyodbc.Cursor, plate_barcode: str, biomek_labclass: str) -> str: """Adds a plate to DART if it does not already exist. Returns the state of the plate. Arguments: cursor {pyodbc.Cursor} -- The cursor with with to execute queries. plate_barcode {str} -- The barcode of the plate to add. biomek_labclass -- The biomek labware class of the plate. Returns: str -- The state of the plate in DART. """ state = get_dart_plate_state(cursor, plate_barcode) if state == DART_STATE_NO_PLATE: cursor.execute(SQL_DART_ADD_PLATE, (plate_barcode, biomek_labclass, 96)) if set_dart_plate_state_pending(cursor, plate_barcode): state = DART_STATE_PENDING else: raise DartStateError( f"Unable to set the state of a DART plate {plate_barcode} to {DART_STATE_PENDING}" ) elif state == DART_STATE_NO_PROP: raise DartStateError(f"DART plate {plate_barcode} should have a state") return state
def update_rows( self, *, cur: pyodbc.Cursor, table: domain_table.Table, rows: domain_rows.Rows, batch_size: int, column_names: typing.Optional[typing.Set[str]] = None, ) -> None: if column_names is None: column_names = table.column_names for batch in rows.batches(batch_size): sql = self._sql_adapter.update_rows( schema_name=table.schema_name, table_name=table.table_name, pk_cols=set(table.primary_key.columns), column_names=column_names, parameter_placeholder=parameter_placeholder, ) pk_cols = sorted(set(table.primary_key.columns)) non_pk_cols = sorted(col for col in column_names if col not in table.primary_key.columns) unordered_params = batch.as_dicts() param_order = non_pk_cols + pk_cols ordered_params = [ tuple(row[k] for k in param_order) for row in unordered_params ] cur.executemany(sql, ordered_params)
def readSQLScriptFromFile(scriptPath: str, cur: db.Cursor) -> []: result = [] with open(scriptPath, 'r') as inp: i = 0 sqlQuery = '' for line in inp: if line == 'GO\n': i = i + 1 print(f'Query #{i}') # print(sqlQuery) cur.execute(sqlQuery) try: result.append( pd.DataFrame.from_records( cur.fetchall(), columns=[x[0] for x in cur.description])) except: print('no results') # print('****') sqlQuery = '' elif 'PRINT' in line: disp = line.split("'")[1] print(f'{disp}') else: sqlQuery = sqlQuery + line inp.close() return (result)
def get_chores_helper(cursor: pyodbc.Cursor, account_id: str) -> dict: """Helper function for GetChoresParent to get the chores given a parent account id.""" try: cursor.execute(queries.get_chores_by_parent(), account_id) except Exception as exc: abort(500, str(exc)) return GetChoresResponseModel(cursor.fetchall()).get_response()
def select_movie_with_like(cursor: pyodbc.Cursor, slug: str): query = "SELECT * FROM [dbo].[testTable] WHERE title like ?;" cursor.execute(query, (f"%{slug}%")) movies = cursor.fetchall() for movie in movies: print(movie)
def test_change_tracking_after_update(pg_cursor: pyodbc.Cursor) -> None: src_db_adapter = adapter.PostgresAdapter() dest_db_adapter = adapter.PostgresAdapter() pg_cursor.execute(""" UPDATE sales.customer SET customer_last_name = 'Smithers' WHERE customer_first_name = 'Steve' """) pg_cursor.commit() result = pg_cursor.execute(""" SELECT c.customer_last_name FROM sales.customer AS c WHERE c.customer_first_name = 'Steve' """).fetchval() assert result == "Smithers" service.update_history_table( src_cur=pg_cursor, dest_cur=pg_cursor, src_db_adapter=src_db_adapter, dest_db_adapter=dest_db_adapter, src_schema_name="sales", src_table_name="customer", compare_cols=None, # compare all columns recreate=False, ) check_history_table_in_sync(cur=pg_cursor) pg_cursor.execute(""" UPDATE sales.customer SET customer_last_name = 'Smalls' WHERE customer_first_name = 'Steve' """) pg_cursor.commit() service.update_history_table( src_cur=pg_cursor, dest_cur=pg_cursor, src_db_adapter=src_db_adapter, dest_db_adapter=dest_db_adapter, src_schema_name="sales", src_table_name="customer", compare_cols=None, # compare all columns recreate=False, ) check_history_table_in_sync(cur=pg_cursor) result = pg_cursor.execute(""" SELECT h.customer_last_name FROM sales.customer_history AS h WHERE h.customer_first_name = 'Steve' AND h.valid_to = '9999-12-31' """).fetchall() assert result is not None assert len(result) == 1 assert result[0].customer_last_name == "Smalls"
def select_all_movie_year(cursor: pyodbc.Cursor, year: int): # Crear consulta query = "SELECT * FROM [dbo].[testTable] WHERE years > ?;" # Hacer la peticion cursor.execute(query, (year)) # Traemos todos los datos movies = cursor.fetchall() # imprimir los datos for movie in movies: print(movie)
def _execute(self, cursor: pyodbc.Cursor, query: str, parameters: tuple) -> None: try: cursor.execute(query, parameters) except pyodbc.OperationalError as e: self._connect() raise e except Exception as e: self._con.rollback() raise e
def display_users(db_cursor: pyodbc.Cursor): ''' Displays users data. ''' db_cursor.execute('SELECT * FROM users') transferred_data = db_cursor.fetchall() template = '{:<5} {:<20} {:<10}' print(template.format('ID', 'NAME', 'CITY')) print('-' * 32) for row in transferred_data: print(template.format(row.id, row.name, row.city))
def create_table(self, *, cur: pyodbc.Cursor, table: domain_table.Table) -> bool: if self.table_exists(cur=cur, table_name=table.table_name, schema_name=table.schema_name): return False else: sql = self._sql_adapter.table_definition(table) cur.execute(sql) logger.info(f"{table.schema_name}.{table.table_name} was created.") return True
def transfer_data(source_db_cursor: pyodbc.Cursor, dest_db_cursor: pyodbc.Cursor, dest_db_conn: pyodbc.Connection): ''' Extracts users data from source database and stores them in destination database. ''' print('Extracting users data from source database.') source_db_cursor.execute('SELECT * FROM users') rows = source_db_cursor.fetchall() print('Transferring users data to destination database.') for row in rows: dest_db_cursor.execute(SQL_INSERT_DATA, (row.id, row.name, row.city)) print(f'{len(rows)} rows transferred\n') dest_db_conn.commit()
def set_dart_well_properties(cursor: pyodbc.Cursor, plate_barcode: str, well_props: Dict[str, str], well_index: int) -> None: """Calls the DART stored procedure to add or update properties on a well Arguments: cursor {pyodbc.Cursor} -- The cursor with which to execute queries. plate_barcode {str} -- The barcode of the plate whose well properties to update. well_props {Dict[str, str]} -- The names and values of the well properties to update. well_index {int} -- The index of the well to update. """ for prop_name, prop_value in well_props.items(): params = (plate_barcode, prop_name, prop_value, well_index) cursor.execute(SQL_DART_SET_WELL_PROPERTY, params)
def get_parent_id_for_child(cursor: pyodbc.Cursor, child_account_id: str) -> str: """Gets the corresponding parent id for a child's id.""" try: cursor.execute( queries.get_child_by_account_id(("ParentGoogleAccountId",)), child_account_id, ) except Exception as exc: abort(400, str(exc)) child_res = cursor.fetchone() if not child_res: abort(404, "Child Account ID not found") return child_res.ParentGoogleAccountId
def get_dart_plate_state(cursor: pyodbc.Cursor, plate_barcode: str) -> str: """Gets the state of a DART plate. Arguments: cursor {pyodbc.Cursor} -- The cursor with which to execute queries. plate_barcode {str} -- The barcode of the plate whose state to fetch. Returns: str -- The state of the plate in DART. """ params = (plate_barcode, DART_STATE) cursor.execute(SQL_DART_GET_PLATE_PROPERTY, params) return str(cursor.fetchval())
def _executeUpdate(self, cursor: odbc.Cursor) -> None: prop_sets = "" prop_values = [] for prop in self.properties: if prop.updated: if prop_sets != "": prop_sets += "," prop_sets += "{0}=?".format(prop.name) prop_values.append(prop.value) if len(prop_values) > 0: prop_values.append(self.primaryKey) cursor.execute( "UPDATE {0} SET {1} WHERE pk=?".format(self._tableName, prop_sets), prop_values)
def tables(self, *, cur: pyodbc.Cursor) -> typing.Set[str]: sql = "SHOW TABLES" rows = cur.execute(sql=sql).fetchall() if rows: return {row[0] for row in rows} else: return set()
def test_postgres_adapter_fast_row_count(pg_cursor: pyodbc.Cursor) -> None: stats_sql = """ SELECT pc.reltuples , tbl.n_live_tup , tbl.n_dead_tup , tbl.n_mod_since_analyze , tbl.last_analyze , tbl.last_autoanalyze , tbl.last_vacuum , tbl.last_autovacuum , CASE WHEN tbl.last_analyze IS NULL AND tbl.last_autoanalyze IS NULL THEN 0 ELSE 1 END AS has_stats FROM pg_catalog.pg_class AS pc JOIN pg_stat_all_tables tbl ON pc.oid = tbl.relid WHERE pc.relname = 'customer' AND tbl.schemaname = 'sales' """ result = pg_cursor.execute(stats_sql).fetchall() for row in result: print(row) db_adapter = adapter.PostgresAdapter() rows = db_adapter.fast_row_count(cur=pg_cursor, schema_name="sales", table_name="customer") assert rows == 9
def table_exists( self, *, cur: pyodbc.Cursor, table_name: str, schema_name: typing.Optional[str] = None, ) -> bool: if schema_name is None: raise domain.exceptions.SchemaIsRequired( f"A schema is required for PostgresPyodbcDbAdapter's table_exists method" ) sql = self.__sql_adapter.table_exists(schema_name=schema_name, table_name=table_name) result = cur.execute(sql).fetchval() if result: if result == 0: return False elif result == 1: return True else: raise domain.exceptions.InvalidSqlGenerated( sql=sql, message= f"table_exists should return 0 for False, or 1 for True, but it returned {result!r}.", ) return False
def delete_rows( self, *, cur: pyodbc.Cursor, table: domain_table.Table, rows: domain_rows.Rows, batch_size: int, ) -> None: sql = self._sql_adapter.delete_rows( schema_name=table.schema_name, table_name=table.table_name, pk_cols=set(table.primary_key.columns), parameter_placeholder=parameter_placeholder, row_cols=rows.column_names, ) for batch in rows.batches(batch_size): cur.executemany(sql, batch.as_tuples(sort_columns=False))
def drop_table( self, *, cur: pyodbc.Cursor, table_name: str, schema_name: typing.Optional[str] = None, ) -> bool: if self.table_exists(cur=cur, table_name=table_name, schema_name=schema_name): sql = self._sql_adapter.drop_table(schema_name=schema_name, table_name=table_name) cur.execute(sql=sql, params=None) logger.info(f"{schema_name}.{table_name} was dropped.") return True else: return False
def set_dart_plate_state_pending(cursor: pyodbc.Cursor, plate_barcode: str) -> bool: """Sets the state of a DART plate to pending. Arguments: cursor {pyodbc.Cursor} -- The cursor with which to execute queries. plate_barcode {str} -- The barcode of the plate whose state to set. Returns: bool -- Return True if DART was updated successfully, else False. """ params = (plate_barcode, DART_STATE, DART_STATE_PENDING) cursor.execute(SQL_DART_SET_PLATE_PROPERTY, params) # assuming that the stored procedure method returns an error code, convert it to an int to make sure response = int(cursor.fetchval()) return response == DART_SET_PROP_STATUS_SUCCESS
def setup_table(cur: pyodbc.Cursor, data: List): ''' Create table and populate data. ''' print('Create a new table for users.') cur.execute(SQL_CREATE_TABLE) cur.commit() print('Populate users data.') for row in data: cur.execute(SQL_INSERT_DATA, row) cur.commit()
def add_rows( self, *, cur: pyodbc.Cursor, schema_name: typing.Optional[str], table_name: str, rows: domain_rows.Rows, batch_size: int, ) -> None: for batch in rows.batches(batch_size): sql = self._sql_adapter.add_rows( schema_name=schema_name, table_name=table_name, parameter_placeholder=parameter_placeholder, rows=batch, ) params = batch.as_tuples() cur.executemany(sql, params)
def row_count( self, *, cur: pyodbc.Cursor, table_name: str, schema_name: typing.Optional[str] = None, ) -> int: sql = self._sql_adapter.row_count(schema_name=schema_name, table_name=table_name) return cur.execute(sql).fetchval()
def fetch_rows( *, cur: pyodbc.Cursor, sql: str, params: typing.Optional[typing.List[typing.Tuple[typing.Any, ...]]] = None, ) -> domain_rows.Rows: std_sql = sql_formatter.standardize_sql(sql) logger.debug(f"FETCH:\n\t{std_sql}\n\tparams={params}") if params is None: result = cur.execute(std_sql) elif len(params) > 1: result = cur.executemany(std_sql, params) else: result = cur.execute(std_sql, params[0]) column_names = [description[0] for description in cur.description] if rows := result.fetchall(): return domain_rows.Rows(column_names=column_names, rows=[tuple(row) for row in rows])
def check_customer2_table_in_sync(cur: pyodbc.Cursor) -> None: sql = "SELECT COUNT(*) FROM sales.customer" result = cur.execute(sql).fetchval() assert result > 0 sql = "SELECT * FROM sales.customer" result = cur.execute(sql).fetchall() customer_values = { tuple(zip((col[0] for col in cur.description), row)) for row in result } sql = "SELECT * FROM sales.customer2" result = cur.execute(sql).fetchall() customer2_values = { tuple(zip((col[0] for col in cur.description), row)) for row in result } assert ( customer2_values == customer_values ), f"\ncustomer:\n{sorted(customer_values)}\n\ncustomer2:\n{sorted(customer2_values)}"
def table_exists( self, *, cur: pyodbc.Cursor, table_name: str, schema_name: typing.Optional[str] = None, ) -> bool: sql = self._sql_adapter.table_exists(schema_name=schema_name, table_name=table_name) result = cur.execute(sql).fetchval() return True if result else False
def get_strategy_traders_id(cursor: pyodbc.Cursor, strategy_id) -> list: # 获取pm_strategy_info (主要用于获取 Init Capital信息) sql = f''' SELECT [Id] FROM [Platinum.PM].[dbo].[TraderDbo] where strategyid = '{strategy_id}' ''' l_traders_id = [] for row in cursor.execute(sql).fetchall(): l_traders_id.append(row[0]) return l_traders_id
def save_cursor_result_to_df(cursor: pyodbc.Cursor) -> pd.DataFrame: """Save the executed query cursor to a pandas DataFrame :param cursor: query cursor :type cursor: pyodbc.Cursor :return: result Pandas DataFrame :rtype: pd.DataFrame """ descriptions = cursor.description col_names = [desc[0] for desc in descriptions] df = pd.DataFrame.from_records(cursor.fetchall(), columns=col_names) LOG.debug('saved cursor to DataFrame') return df