def _sync_pending_rows(self, session, exit_after_run): while True: LOG.debug("Thread walking database") row = db.get_oldest_pending_db_row_with_lock(session) if not row: LOG.debug("No rows to sync") break # Validate the operation valid = dependency_validations.validate(session, row) if not valid: LOG.info( _LI("%(operation)s %(type)s %(uuid)s is not a " "valid operation yet, skipping for now"), { 'operation': row.operation, 'type': row.object_type, 'uuid': row.object_uuid }) # Set row back to pending. db.update_db_row_state(session, row, odl_const.PENDING) if exit_after_run: break continue LOG.info( _LI("Syncing %(operation)s %(type)s %(uuid)s"), { 'operation': row.operation, 'type': row.object_type, 'uuid': row.object_uuid }) # Add code to sync this to ODL method, urlpath, to_send = self._json_data(row) try: self.client.sendjson(method, urlpath, to_send) db.update_db_row_state(session, row, odl_const.COMPLETED) except exceptions.ConnectionError as e: # Don't raise the retry count, just log an error LOG.error(_LE("Cannot connect to the Opendaylight Controller")) # Set row back to pending db.update_db_row_state(session, row, odl_const.PENDING) # Break our of the loop and retry with the next # timer interval break except Exception as e: LOG.error( _LE("Error syncing %(type)s %(operation)s," " id %(uuid)s Error: %(error)s"), { 'type': row.object_type, 'uuid': row.object_uuid, 'operation': row.operation, 'error': e.message }) db.update_pending_db_row_retry(session, row, self._row_retry_count)
def test_register_validator(self): mock_session = mock.Mock() mock_validator = mock.Mock(return_value=False) mock_row = mock.Mock() mock_row.object_type = self._RESOURCE_DUMMY dependency_validations.register_validator(self._RESOURCE_DUMMY, mock_validator) valid = dependency_validations.validate(mock_session, mock_row) mock_validator.assert_called_once_with(mock_session, mock_row) self.assertFalse(valid)
def test_dependency(self): db.create_pending_row(self.db_session, self.first_type, self.first_id, self.first_operation, get_data(self.first_type, self.first_operation)) db.create_pending_row( self.db_session, self.second_type, self.second_id, self.second_operation, get_data(self.second_type, self.second_operation)) for idx, row in enumerate( sorted(db.get_all_db_rows(self.db_session), key=lambda x: x.seqnum)): if self.expected[idx] is not None: self.assertEqual( self.expected[idx], dependency_validations.validate(self.db_session, row))
def _sync_pending_entries(self, session, exit_after_run): LOG.debug("Start processing journal entries") entry = db.get_oldest_pending_db_row_with_lock(session) if entry is None: LOG.debug("No journal entries to process") return while entry is not None: log_dict = { 'op': entry.operation, 'type': entry.object_type, 'id': entry.object_uuid } valid = dependency_validations.validate(session, entry) if not valid: db.update_db_row_state(session, entry, odl_const.PENDING) LOG.info( "Skipping %(op)s %(type)s %(id)s due to " "unprocessed dependencies.", log_dict) if exit_after_run: break continue LOG.info("Processing - %(op)s %(type)s %(id)s", log_dict) method, urlpath, to_send = self._json_data(entry) try: self.client.sendjson(method, urlpath, to_send) db.update_db_row_state(session, entry, odl_const.COMPLETED) except exceptions.ConnectionError as e: # Don't raise the retry count, just log an error & break db.update_db_row_state(session, entry, odl_const.PENDING) LOG.error("Cannot connect to the OpenDaylight Controller," " will not process additional entries") break except Exception as e: log_dict['error'] = e.message LOG.error( "Error while processing %(op)s %(type)s %(id)s;" " Error: %(error)s", log_dict) db.update_pending_db_row_retry(session, entry, self._max_retry_count) entry = db.get_oldest_pending_db_row_with_lock(session) LOG.debug("Finished processing journal entries")