def _execute_sql_for_fetch(self, sql: str, cursor: jaydebeapi.Cursor): try: logging.debug(f"EXECUTING '{sql}'") cursor.execute(sql) logging.debug(f"SQL EXECUTED'") except Exception as e: logging.error(e)
def get_data(self, cursor: Cursor = None, return_type=tuple, include_none=False, max_rows: int = 0, array_size: int = 1000): """ An iterator using fetchmany to keep the memory usage reasonalble @param cursor: Cursor to query, use current if not specified @param return_type: return type of rows. May be list, tuple (default), dict, or OrderedDict @param include_none: bool return None values in dictionaries, if True. Defaults to False @param max_rows: int maximum number of rows to return before closing the cursor. Negative or zero implies all rows @param array_size: int - the buffer size @return: iterator """ if (not isinstance(array_size, int)) or array_size < 1: array_size = 1 if (not isinstance(max_rows, int)) or max_rows < 0: max_rows = 0 batch_nr = 0 row_count = 0 transformer = DataTransformer(cursor, return_type=return_type, upper_case=self.upper_case, include_none=include_none) while True: batch_nr += 1 fetch_error = None results = [] try: results = cursor.fetchmany(array_size) except Error as error: fetch_error = error if fetch_error is not None: print('Fetch error in batch %d of size %d.' % (batch_nr, array_size), file=sys.stderr) error_msg = str(fetch_error) print(error_msg, file=sys.stderr) raise SQLExcecuteException( 'Failed to fetch data in batch %d: %s' % (batch_nr, error_msg)) if len(results) == 0: self.close(cursor) break for result in results: row_count += 1 yield transformer(result) if (max_rows > 0) and (row_count >= max_rows): self.close(cursor) break
def last_generated_id(cursor: jaydebeapi.Cursor) -> int: cursor.execute("SELECT LAST_INSERT_ID()") return int(str(cursor.fetchone()[0]))
def reset_generated_id(cursor: jaydebeapi.Cursor) -> None: cursor.execute('SELECT LAST_INSERT_ID(NULL)')