def execute(self, connection: ServerConnection): """ Execute the query using the given connection :param connection: The connection object to use when executing the query :param batch_start_callback: A function to run before executing each batch :param batch_end_callback: A function to run after executing each batch :raises RuntimeError: If the query was already executed """ if self._execution_state is ExecutionState.EXECUTED: raise RuntimeError('Cannot execute a query multiple times') self._execution_state = ExecutionState.EXECUTING # Run each batch sequentially try: current_auto_commit_status = connection.autocommit # When Analyze Explain is used we have to disable auto commit if self._disable_auto_commit: connection.autocommit = False for batch_index, batch in enumerate(self._batches): self._current_batch_index = batch_index if self.is_canceled: break batch.execute(connection) finally: # We can only set autocommit when the connection is open. if connection.open: connection.autocommit = current_auto_commit_status self._execution_state = ExecutionState.EXECUTED
def _do_commit(self, connection: ServerConnection, success: Callable, failure: Callable): try: edit_operations = self._session_cache.values() if any(edit_operations) is True: with connection.cursor() as cursor: for operation in edit_operations: # If its a new row that’s being added and tried to delete without committing we just clear it # from cache if isinstance(operation, RowDelete) and operation.row_id >= len( self._result_set.rows): pass else: script: EditScript = operation.get_script() cursor.execute( cursor.mogrify(script.query_template, (script.query_paramters))) operation.apply_changes(cursor) self._session_cache.clear() self._last_row_id = len(self._result_set.rows) - 1 success() except Exception as error: failure(str(error))
def execute(self, conn: ServerConnection) -> None: """ Execute the batch using a cursor retrieved from the given connection :raises DatabaseError: if an error is encountered while running the batch's query """ self._execution_start_time = datetime.now() if self._batch_events and self._batch_events._on_execution_started: self._batch_events._on_execution_started(self) cursor = self.get_cursor(conn) try: cursor.execute(self.batch_text) # Commit the transaction if autocommit is True if conn.autocommit: conn.commit() self.after_execute(cursor) except conn.database_error as error: self._has_error = True raise error finally: # We are doing this because when the execute fails for named cursors # cursor is not activated on the server which results in failure on close # Hence we are checking if the cursor was really executed for us to close it if cursor and cursor.rowcount != -1 and cursor.rowcount is not None: cursor.close() self._has_executed = True self._execution_end_time = datetime.now() # TODO: PyMySQL doesn't support notices from a connection if conn._provider_name == PG_PROVIDER_NAME: self._notices = cursor.connection.notices cursor.connection.notices = [] if self._batch_events and self._batch_events._on_execution_completed: self._batch_events._on_execution_completed(self)
def get_cursor(self, connection: ServerConnection): cursor_name = str(uuid.uuid4()) # Named cursors can be created only in the transaction. As our connection has autocommit set to true # there is not transaction concept with it so we need to have withhold to true and as this cursor is local # and we explicitly close it we are good return connection.cursor(name=cursor_name, withhold=True)
def get_cursor(self, connection: ServerConnection): return connection.cursor()