def execute( self, operation, parameters=None, work_group=None, s3_staging_dir=None, cache_size=0, ): self._reset_state() self._query_id = self._execute( operation, parameters=parameters, work_group=work_group, s3_staging_dir=s3_staging_dir, cache_size=cache_size, ) query_execution = self._poll(self._query_id) if query_execution.state == AthenaQueryExecution.STATE_SUCCEEDED: self._result_set = AthenaResultSet( self._connection, self._converter, query_execution, self.arraysize, self._retry_config, ) else: raise OperationalError(query_execution.state_change_reason) return self
class Cursor(BaseCursor, CursorIterator, WithResultSet): def __init__(self, connection, s3_staging_dir, schema_name, poll_interval, encryption_option, kms_key, converter, formatter, retry_exceptions, retry_attempt, retry_multiplier, retry_max_delay, retry_exponential_base, **kwargs): super(Cursor, self).__init__(connection, s3_staging_dir, schema_name, poll_interval, encryption_option, kms_key, converter, formatter, retry_exceptions, retry_attempt, retry_multiplier, retry_max_delay, retry_exponential_base, **kwargs) @property def rownumber(self): return self._result_set.rownumber if self._result_set else None def close(self): if self._result_set and not self._result_set.is_closed: self._result_set.close() @synchronized def execute(self, operation, parameters=None): self._reset_state() self._query_id = self._execute(operation, parameters) query_execution = self._poll(self._query_id) if query_execution.state == AthenaQueryExecution.STATE_SUCCEEDED: self._result_set = AthenaResultSet( self._connection, self._converter, query_execution, self.arraysize, self.retry_exceptions, self.retry_attempt, self.retry_multiplier, self.retry_max_delay, self.retry_exponential_base) else: raise OperationalError(query_execution.state_change_reason) return self def executemany(self, operation, seq_of_parameters): raise NotSupportedError @synchronized def cancel(self): if not self._query_id: raise ProgrammingError('QueryExecutionId is none or empty.') self._cancel(self._query_id) @synchronized def fetchone(self): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchone() @synchronized def fetchmany(self, size=None): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchmany(size) @synchronized def fetchall(self): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchall()
def execute(self, operation, parameters=None): self._reset_state() self._query_id = self._execute(operation, parameters) query_execution = self._poll(self._query_id) if query_execution.state == AthenaQueryExecution.STATE_SUCCEEDED: self._result_set = AthenaResultSet( self._connection, self._converter, query_execution, self.arraysize, self.retry_exceptions, self.retry_attempt, self.retry_multiplier, self.retry_max_delay, self.retry_exponential_base) else: raise OperationalError(query_execution.state_change_reason)
def execute(self, operation, parameters=None): self._reset_state() self._query_id = self._execute(operation, parameters) query_execution = self._poll(self._query_id) if query_execution.state == AthenaQueryExecution.STATE_SUCCEEDED: self._result_set = AthenaResultSet(self._connection, self._converter, query_execution, self.arraysize, self._retry_config) else: raise OperationalError(query_execution.state_change_reason) return self
def _collect_result_set(self, query_id): query_execution = self._poll(query_id) return AthenaResultSet(self._connection, self._converter, query_execution, self._arraysize, self.retry_exceptions, self.retry_attempt, self.retry_multiplier, self.retry_max_delay, self.retry_exponential_base)
def _collect_result_set(self, query_id): query_execution = self._poll(query_id) return AthenaResultSet(connection=self._connection, converter=self._converter, query_execution=query_execution, arraysize=self._arraysize, retry_config=self._retry_config)
class Cursor(BaseCursor, CursorIterator): def __init__(self, client, s3_staging_dir, schema_name, poll_interval, encryption_option, kms_key, converter, formatter, retry_exceptions, retry_attempt, retry_multiplier, retry_max_delay, retry_exponential_base): super(Cursor, self).__init__(client, s3_staging_dir, schema_name, poll_interval, encryption_option, kms_key, converter, formatter, retry_exceptions, retry_attempt, retry_multiplier, retry_max_delay, retry_exponential_base) self._query_id = None self._result_set = None @property def rownumber(self): return self._result_set.rownumber if self._result_set else None @property def has_result_set(self): return self._result_set is not None @property def description(self): if not self.has_result_set: return None return self._result_set.description @property def query_id(self): return self._query_id @property def query(self): if not self.has_result_set: return None return self._result_set.query @property def state(self): if not self.has_result_set: return None return self._result_set.state @property def state_change_reason(self): if not self.has_result_set: return None return self._result_set.state_change_reason @property def completion_date_time(self): if not self.has_result_set: return None return self._result_set.completion_date_time @property def submission_date_time(self): if not self.has_result_set: return None return self._result_set.submission_date_time @property def data_scanned_in_bytes(self): if not self.has_result_set: return None return self._result_set.data_scanned_in_bytes @property def execution_time_in_millis(self): if not self.has_result_set: return None return self._result_set.execution_time_in_millis @property def output_location(self): if not self.has_result_set: return None return self._result_set.output_location def close(self): if self._result_set and not self._result_set.is_closed: self._result_set.close() def _reset_state(self): self._description = None self._query_id = None if self._result_set and not self._result_set.is_closed: self._result_set.close() self._result_set = None @synchronized def execute(self, operation, parameters=None): self._reset_state() self._query_id = self._execute(operation, parameters) query_execution = self._poll(self._query_id) if query_execution.state == AthenaQueryExecution.STATE_SUCCEEDED: self._result_set = AthenaResultSet( self._connection, self._converter, query_execution, self.arraysize, self.retry_exceptions, self.retry_attempt, self.retry_multiplier, self.retry_max_delay, self.retry_exponential_base) else: raise OperationalError(query_execution.state_change_reason) def executemany(self, operation, seq_of_parameters): raise NotSupportedError @synchronized def cancel(self): if not self._query_id: raise ProgrammingError('QueryExecutionId is none or empty.') self._cancel(self._query_id) @synchronized def fetchone(self): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchone() @synchronized def fetchmany(self, size=None): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchmany(size) @synchronized def fetchall(self): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchall()
class Cursor(BaseCursor, CursorIterator, WithResultSet): def __init__(self, connection, s3_staging_dir, schema_name, work_group, poll_interval, encryption_option, kms_key, converter, formatter, retry_config, **kwargs): super(Cursor, self).__init__( connection=connection, s3_staging_dir=s3_staging_dir, schema_name=schema_name, work_group=work_group, poll_interval=poll_interval, encryption_option=encryption_option, kms_key=kms_key, converter=converter, formatter=formatter, retry_config=retry_config, **kwargs) @property def rownumber(self): return self._result_set.rownumber if self._result_set else None def close(self): if self._result_set and not self._result_set.is_closed: self._result_set.close() @synchronized def execute(self, operation, parameters=None, work_group=None, s3_staging_dir=None, cache_size=0): self._reset_state() self._query_id = self._execute(operation, parameters=parameters, work_group=work_group, s3_staging_dir=s3_staging_dir, cache_size=cache_size) query_execution = self._poll(self._query_id) if query_execution.state == AthenaQueryExecution.STATE_SUCCEEDED: self._result_set = AthenaResultSet( self._connection, self._converter, query_execution, self.arraysize, self._retry_config) else: raise OperationalError(query_execution.state_change_reason) return self def executemany(self, operation, seq_of_parameters): for parameters in seq_of_parameters: self.execute(operation, parameters) # Operations that have result sets are not allowed with executemany. self._reset_state() @synchronized def cancel(self): if not self._query_id: raise ProgrammingError('QueryExecutionId is none or empty.') self._cancel(self._query_id) @synchronized def fetchone(self): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchone() @synchronized def fetchmany(self, size=None): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchmany(size) @synchronized def fetchall(self): if not self.has_result_set: raise ProgrammingError('No result set.') return self._result_set.fetchall()