def test_execute_query2(self): m_execute = Mock() expected = [] returns = [None] def foo(*args): r = returns.pop(0) return r m_fetchone = Mock(side_effect=foo) m_cursor = Mock() m_cursor.execute = m_execute m_cursor.fetchone = m_fetchone conn = Mock() conn.cursor.return_value = m_cursor zipped = zip( dbapi2_util.execute_query_iter(conn, "select * from somewhere"), expected) for x, y in zipped: eq_(x, y) eq_(conn.cursor.call_count, 1) eq_(m_cursor.execute.call_count, 1) m_cursor.execute.assert_called_once_with("select * from somewhere", None)
def test_execute_query1(self): m_execute = Mock() expected = [(17, 22), (19, 24)] returns = [(17, 22), (19, 24), None] def foo(*args): r = returns.pop(0) return r m_fetchone = Mock(side_effect=foo) m_cursor = MagicMock() m_cursor.execute = m_execute m_cursor.fetchone = m_fetchone conn = MagicMock() conn.cursor.return_value.__enter__.return_value = m_cursor zipped = zip( dbapi2_util.execute_query_iter(conn, "select * from somewhere"), expected) for x, y in zipped: assert x == y assert conn.cursor.call_count == 1 assert m_cursor.execute.call_count == 1 m_cursor.execute.assert_called_once_with("select * from somewhere", None)
def __iter__(self): with self.database(name='crontabber-get-apps') as connection: for each in execute_query_iter( connection, "SELECT app_name FROM crontabber" ): yield each[0]
def test_execute_query1(self): m_execute = Mock() expected = [(17, 22), (19, 24)] returns = [(17, 22), (19, 24), None] def foo(*args): r = returns.pop(0) return r m_fetchone = Mock(side_effect=foo) m_cursor = MagicMock() m_cursor.execute = m_execute m_cursor.fetchone = m_fetchone conn = MagicMock() conn.cursor.return_value.__enter__.return_value = m_cursor zipped = zip( dbapi2_util.execute_query_iter( conn, "select * from somewhere" ), expected ) for x, y in zipped: eq_(x, y) eq_(conn.cursor.call_count, 1) eq_(m_cursor.execute.call_count, 1) m_cursor.execute.assert_called_once_with("select * from somewhere", None)
def run(self, connection): for crash_id, in execute_query_iter(connection, _reprocessing_sql): self.queuing_connection_factory.save_raw_crash( DotDict({'legacy_processing': 0}), [], crash_id )
def run(self, connection): for crash_id in execute_query_iter(connection, _reprocessing_sql): self.queue.save_raw_crash( {'legacy_processing': True}, [], crash_id )
def new_crashes(self): self.config.logger.debug("starting new_crashes") with self.config.database_class(self.config)() as conn: self.quit_check() yield_did_not_happen = True for a_crash_id in execute_query_iter(conn, self.config.sql): self.quit_check() yield a_crash_id[0] yield_did_not_happen = False if yield_did_not_happen: yield None
def new_crashes(self): self.config.logger.debug('starting new_crashes') with self.config.database_class(self.config)() as conn: self.quit_check() yield_did_not_happen = True for a_crash_id in execute_query_iter(conn, self.config.sql): self.quit_check() yield a_crash_id[0] yield_did_not_happen = False if yield_did_not_happen: yield None
def run(self, connection): select_sql = """ SELECT crash_id FROM reprocessing_jobs LIMIT 10000 """ crash_ids = [] for (crash_id,) in execute_query_iter(connection, select_sql): crash_ids.append(crash_id) delete_sql = """ DELETE from reprocessing_jobs WHERE crash_id = %(crash_id)s """ for crash_id in crash_ids: self.queuing_connection_factory.save_raw_crash(DotDict({"legacy_processing": 0}), [], crash_id) execute_no_results(connection, delete_sql, {"crash_id": crash_id}) connection.commit()
def run(self, connection): select_sql = """ SELECT crash_id FROM reprocessing_jobs LIMIT 10000 """ crash_ids = [] for crash_id, in execute_query_iter(connection, select_sql): crash_ids.append(crash_id) delete_sql = """ DELETE from reprocessing_jobs WHERE crash_id = %(crash_id)s """ for crash_id in crash_ids: self.queuing_connection_factory.save_raw_crash( DotDict({'legacy_processing': 0}), [], crash_id) execute_no_results(connection, delete_sql, {'crash_id': crash_id}) connection.commit()
def test_new_crashes(self): config = self.get_standard_config() db_sampling = DBSamplingCrashSource(config) m_execute = mock.Mock() expected = sequencer('114559a5-d8e6-428c-8b88-1c1f22120314', 'c44245f4-c93b-49b8-86a2-c15dc3a695cb') db_sampling.new_crashes = mock.Mock(side_effect=expected) m_cursor = mock.Mock() m_cursor.execute = m_execute m_cursor.fetchone = db_sampling.new_crashes conn = mock.Mock() conn.cursor.return_value = m_cursor r = dbapi2_util.execute_query_iter(conn, config.sql) eq_(r.next().next(), '114559a5-d8e6-428c-8b88-1c1f22120314') eq_(conn.cursor.call_count, 1) eq_(m_cursor.execute.call_count, 1) m_cursor.execute.assert_called_once_with(config.sql, None)
def copy(self, connection): sql = """SELECT app_name, next_run, first_run, last_run, last_success, depends_on, error_count, last_error, ongoing FROM crontabber """ columns = ('app_name', 'next_run', 'first_run', 'last_run', 'last_success', 'depends_on', 'error_count', 'last_error', 'ongoing') all = {} for record in execute_query_iter(connection, sql): row = dict(zip(columns, record)) all[row.pop('app_name')] = row return all
def copy(self, connection): sql = """SELECT app_name, next_run, first_run, last_run, last_success, depends_on, error_count, last_error FROM crontabber """ columns = ( 'app_name', 'next_run', 'first_run', 'last_run', 'last_success', 'depends_on', 'error_count', 'last_error' ) all = {} for record in execute_query_iter(connection, sql): row = dict(zip(columns, record)) row['last_error'] = json.loads(row['last_error']) all[row.pop('app_name')] = row return all
def test_new_crashes(self): config = self.get_standard_config() db_sampling = DBSamplingCrashSource(config) m_execute = mock.MagicMock() sequence = [ '114559a5-d8e6-428c-8b88-1c1f22120314', 'c44245f4-c93b-49b8-86a2-c15dc3a695cb' ] expected = sequencer(*tuple(sequence)) conn = mock.MagicMock() conn.cursor.return_value.__enter__().fetchone = expected for a_row in dbapi2_util.execute_query_iter(conn, config.sql): eq_(a_row, sequence.pop()) conn.cursor.assert_called_once_with() conn.cursor.return_value.__enter__.return_value.execute \ .assert_called_once_with( config.sql, None )
def get_all_crash_ids(self): connection = self.source.database.connection() sql = 'select uuid from raw_crashes;' return execute_query_iter(connection, sql)