Пример #1
0
    def sync_pending_entries(self):
        LOG.debug("Start processing journal entries")
        context = nl_context.get_admin_context()
        entry = db.get_oldest_pending_db_row_with_lock(context)
        if entry is None:
            LOG.debug("No journal entries to process")
            return

        while entry is not None:
            stop_processing = self._sync_entry(context, entry)
            if stop_processing:
                break

            entry = db.get_oldest_pending_db_row_with_lock(context)
        LOG.debug("Finished processing journal entries")
Пример #2
0
    def sync_pending_entries(self):
        LOG.debug("Start processing journal entries")
        context = nl_context.get_admin_context()
        entry = db.get_oldest_pending_db_row_with_lock(context.session)
        if entry is None:
            LOG.debug("No journal entries to process")
            return

        while entry is not None:
            stop_processing = self._sync_entry(context, entry)
            if stop_processing:
                break

            entry = db.get_oldest_pending_db_row_with_lock(context.session)
        LOG.debug("Finished processing journal entries")
Пример #3
0
 def test_get_oldest_pending_row_returns_parent_when_dep_pending(self):
     db.create_pending_row(self.db_context, *self.UPDATE_ROW)
     parent_row = db.get_all_db_rows(self.db_context)[0]
     db.create_pending_row(self.db_context, *self.UPDATE_ROW,
                           depending_on=[parent_row])
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertEqual(parent_row, row)
Пример #4
0
 def test_subnet_allocation_pools(self):
     context = self._get_mock_operation_context(odl_const.ODL_SUBNET)
     alloc_pool = context.current['allocation_pools']
     self._call_operation_object(odl_const.ODL_UPDATE,
                                 odl_const.ODL_SUBNET)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertEqual(alloc_pool, row.data['allocation_pools'])
Пример #5
0
 def _test_qos_policy(self, operation):
     self._call_operation_object(operation=operation,
                                 object_type='policy_precommit')
     qos_data = self._get_mock_qos_operation_data()
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertEqual(operation, row['operation'])
     self.assertEqual(qos_data.to_dict()['id'], row['object_uuid'])
Пример #6
0
    def sync_pending_row(self, exit_after_run=False):
        # Block until all pending rows are processed
        session = neutron_db_api.get_session()
        while not self.event.is_set():
            self.event.wait()
            # Clear the event and go back to waiting after
            # the sync block exits
            self.event.clear()
            while True:
                LOG.debug("Thread walking database")
                row = db.get_oldest_pending_db_row_with_lock(session)
                if not row:
                    LOG.debug("No rows to sync")
                    break

                # Validate the operation
                validate_func = db.VALIDATION_MAP[row.object_type]
                valid = validate_func(session, row.object_uuid, row.operation, row.data)
                if not valid:
                    LOG.info(
                        _LI("%(operation)s %(type)s %(uuid)s is not a " "valid operation yet, skipping for now"),
                        {"operation": row.operation, "type": row.object_type, "uuid": row.object_uuid},
                    )
                    continue

                LOG.info(
                    _LI("Syncing %(operation)s %(type)s %(uuid)s"),
                    {"operation": row.operation, "type": row.object_type, "uuid": row.object_uuid},
                )

                # Add code to sync this to ODL
                method, urlpath, to_send = self._json_data(row)

                try:
                    self.client.sendjson(method, urlpath, to_send)
                    db.update_processing_db_row_passed(session, row)
                except exceptions.ConnectionError as e:
                    # Don't raise the retry count, just log an error
                    LOG.error(_LE("Cannot connect to the Opendaylight " "Controller"))
                    # Set row back to pending
                    db.update_db_row_pending(session, row)
                    # Break our of the loop and retry with the next
                    # timer interval
                    break
                except Exception as e:
                    LOG.error(
                        _LE("Error syncing %(type)s %(operation)s," " id %(uuid)s Error: %(error)s"),
                        {
                            "type": row.object_type,
                            "uuid": row.object_uuid,
                            "operation": row.operation,
                            "error": e.message,
                        },
                    )
                    db.update_pending_db_row_retry(session, row, self._row_retry_count)
            LOG.debug("Clearing sync thread event")
            if exit_after_run:
                # Permanently waiting thread model breaks unit tests
                # Adding this arg to exit here only for unit tests
                break
 def test_subnet_allocation_pools(self):
     context = self._get_mock_operation_context(odl_const.ODL_SUBNET)
     alloc_pool = context.current['allocation_pools']
     self._call_operation_object(odl_const.ODL_UPDATE,
                                 odl_const.ODL_SUBNET)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertEqual(alloc_pool, row.data['allocation_pools'])
Пример #8
0
    def _sync_pending_entries(self, session, exit_after_run):
        LOG.debug("Start processing journal entries")
        entry = db.get_oldest_pending_db_row_with_lock(session)
        if entry is None:
            LOG.debug("No journal entries to process")
            return

        while entry is not None:
            log_dict = {
                'op': entry.operation,
                'type': entry.object_type,
                'id': entry.object_uuid
            }

            valid = dependency_validations.validate(session, entry)
            if not valid:
                db.update_db_row_state(session, entry, odl_const.PENDING)
                LOG.info(
                    "Skipping %(op)s %(type)s %(id)s due to "
                    "unprocessed dependencies.", log_dict)

                if exit_after_run:
                    break
                continue

            LOG.info("Processing - %(op)s %(type)s %(id)s", log_dict)
            method, urlpath, to_send = self._json_data(entry)

            try:
                self.client.sendjson(method, urlpath, to_send)
                db.update_db_row_state(session, entry, odl_const.COMPLETED)
            except exceptions.ConnectionError as e:
                # Don't raise the retry count, just log an error & break
                db.update_db_row_state(session, entry, odl_const.PENDING)
                LOG.error("Cannot connect to the OpenDaylight Controller,"
                          " will not process additional entries")
                break
            except Exception as e:
                log_dict['error'] = e.message
                LOG.error(
                    "Error while processing %(op)s %(type)s %(id)s;"
                    " Error: %(error)s", log_dict)
                db.update_pending_db_row_retry(session, entry,
                                               self._max_retry_count)

            entry = db.get_oldest_pending_db_row_with_lock(session)
        LOG.debug("Finished processing journal entries")
Пример #9
0
    def _sync_pending_rows(self, session, exit_after_run):
        while True:
            LOG.debug("Thread walking database")
            row = db.get_oldest_pending_db_row_with_lock(session)
            if not row:
                LOG.debug("No rows to sync")
                break

            # Validate the operation
            validate_func = (
                dependency_validations.VALIDATION_MAP[row.object_type])
            valid = validate_func(session, row)
            if not valid:
                LOG.info(
                    _LI("%(operation)s %(type)s %(uuid)s is not a "
                        "valid operation yet, skipping for now"), {
                            'operation': row.operation,
                            'type': row.object_type,
                            'uuid': row.object_uuid
                        })

                # Set row back to pending.
                db.update_db_row_state(session, row, odl_const.PENDING)
                if exit_after_run:
                    break
                continue

            LOG.info(
                _LI("Syncing %(operation)s %(type)s %(uuid)s"), {
                    'operation': row.operation,
                    'type': row.object_type,
                    'uuid': row.object_uuid
                })

            # Add code to sync this to ODL
            method, urlpath, to_send = self._json_data(row)

            try:
                self.client.sendjson(method, urlpath, to_send)
                db.update_db_row_state(session, row, odl_const.COMPLETED)
            except exceptions.ConnectionError as e:
                # Don't raise the retry count, just log an error
                LOG.error(_LE("Cannot connect to the Opendaylight Controller"))
                # Set row back to pending
                db.update_db_row_state(session, row, odl_const.PENDING)
                # Break our of the loop and retry with the next
                # timer interval
                break
            except Exception as e:
                LOG.error(
                    _LE("Error syncing %(type)s %(operation)s,"
                        " id %(uuid)s Error: %(error)s"), {
                            'type': row.object_type,
                            'uuid': row.object_uuid,
                            'operation': row.operation,
                            'error': e.message
                        })
                db.update_pending_db_row_retry(session, row,
                                               self._row_retry_count)
Пример #10
0
    def _test_get_oldest_pending_row_none(self, state):
        db.create_pending_row(self.db_session, *self.UPDATE_ROW)
        row = db.get_all_db_rows(self.db_session)[0]
        row.state = state
        self._update_row(row)

        row = db.get_oldest_pending_db_row_with_lock(self.db_session)
        self.assertIsNone(row)
Пример #11
0
    def _test_get_oldest_pending_row_none(self, state):
        db.create_pending_row(self.db_context, *self.UPDATE_ROW)
        row = db.get_all_db_rows(self.db_context)[0]
        row.state = state
        self._update_row(row)

        row = db.get_oldest_pending_db_row_with_lock(self.db_context)
        self.assertIsNone(row)
Пример #12
0
 def test_get_oldest_pending_row_returns_parent_when_dep_pending(self):
     db.create_pending_row(self.db_session, *self.UPDATE_ROW)
     parent_row = db.get_all_db_rows(self.db_session)[0]
     db.create_pending_row(self.db_session,
                           *self.UPDATE_ROW,
                           depending_on=[parent_row])
     row = db.get_oldest_pending_db_row_with_lock(self.db_session)
     self.assertEqual(parent_row, row)
Пример #13
0
    def _test_operation_object(self, operation, object_type):
        self._call_operation_object(operation, object_type)

        context = self._get_mock_operation_context(object_type)
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)
        self.assertEqual(operation, row['operation'])
        self.assertEqual(object_type, row['object_type'])
        self.assertEqual(context.current['id'], row['object_uuid'])
Пример #14
0
 def _assert_op(self, operation, object_type, data, precommit=True):
     row = db.get_oldest_pending_db_row_with_lock(self.db_session)
     if precommit:
         self.assertEqual(operation, row['operation'])
         self.assertEqual(object_type, row['object_type'])
         self.assertEqual(data['id'], row['object_uuid'])
     else:
         self.assertIsNone(row)
Пример #15
0
    def _test_operation_object(self, operation, object_type):
        self._call_operation_object(operation, object_type)

        context = self._get_mock_operation_context(object_type)
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)
        self.assertEqual(operation, row['operation'])
        self.assertEqual(object_type, row['object_type'])
        self.assertEqual(context.current['id'], row['object_uuid'])
Пример #16
0
    def test_get_oldest_pending_row_order(self):
        db.create_pending_row(self.db_context, *self.UPDATE_ROW)
        older_row = db.get_all_db_rows(self.db_context)[0]
        older_row.last_retried -= timedelta(minutes=1)
        self._update_row(older_row)

        db.create_pending_row(self.db_context, *self.UPDATE_ROW)
        row = db.get_oldest_pending_db_row_with_lock(self.db_context)
        self.assertEqual(older_row, row)
Пример #17
0
    def test_get_oldest_pending_row_order(self):
        db.create_pending_row(self.db_session, *self.UPDATE_ROW)
        older_row = db.get_all_db_rows(self.db_session)[0]
        older_row.last_retried -= timedelta(minutes=1)
        self._update_row(older_row)

        db.create_pending_row(self.db_session, *self.UPDATE_ROW)
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)
        self.assertEqual(older_row, row)
Пример #18
0
 def _assert_op(self, operation, object_type, data, precommit=True):
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     if precommit:
         self.db_context.session.flush()
         self.assertEqual(operation, row['operation'])
         self.assertEqual(object_type, row['object_type'])
         self.assertEqual(data['id'], row['object_uuid'])
     else:
         self.assertIsNone(row)
    def _test_event(self, operation, timing):
        self._call_operation_object(operation, timing)
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)

        if timing == 'precommit':
            self.assertEqual(operation, row['operation'])
            self.assertEqual(odl_const.ODL_SFC_FLOW_CLASSIFIER,
                             row['object_type'])
        elif timing == 'after':
            self.assertIsNone(row)
Пример #20
0
    def _test_event(self, operation, timing, resource_str, object_type):
        context = self._get_mock_operation_context(object_type)
        self._call_operation_object(operation, timing, resource_str, context)
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)

        if timing == 'precommit':
            self.assertEqual(operation, row['operation'])
            self.assertEqual(object_type, row['object_type'])
        elif timing == 'after':
            self.assertIsNone(row)
 def base_test_operation(self, obj_driver, obj_type, operation, op_const,
                         mock_set_sync_event, mock_successful_completion):
     obj = self._get_faked_model(obj_type)
     getattr(obj_driver, operation)(self.db_context, obj)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context.session)
     self.assertEqual(operation, row['operation'])
     if obj_type != odl_const.ODL_MEMBER:
         self.assertEqual(("lbaas/%s" % obj_type), row['object_type'])
     else:
         self.assertEqual(journal.MAKE_URL[obj_type](row),
                          ("lbaas/pools/%s/member" % obj.pool.id))
Пример #22
0
    def test_get_oldest_pending_row_when_deadlock(self):
        db.create_pending_row(self.db_session, *self.UPDATE_ROW)
        update_mock = mock.MagicMock(side_effect=(DBDeadlock, mock.DEFAULT))

        # Mocking is mandatory to achieve a deadlock regardless of the DB
        # backend being used when running the tests
        with mock.patch.object(db, 'update_db_row_state', new=update_mock):
            row = db.get_oldest_pending_db_row_with_lock(self.db_session)
            self.assertIsNotNone(row)

        self.assertEqual(2, update_mock.call_count)
Пример #23
0
    def test_get_oldest_pending_row_when_deadlock(self):
        db.create_pending_row(self.db_session, *self.UPDATE_ROW)
        update_mock = mock.MagicMock(side_effect=(DBDeadlock, mock.DEFAULT))

        # Mocking is mandatory to achieve a deadlock regardless of the DB
        # backend being used when running the tests
        with mock.patch.object(db, 'update_db_row_state', new=update_mock):
            row = db.get_oldest_pending_db_row_with_lock(self.db_session)
            self.assertIsNotNone(row)

        self.assertEqual(2, update_mock.call_count)
 def base_test_operation(self, obj_driver, obj_type, operation, op_const,
                         mock_set_sync_event, mock_successful_completion):
     obj = self._get_faked_model(obj_type)
     getattr(obj_driver, operation)(self.db_context, obj)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertEqual(operation, row['operation'])
     if obj_type != odl_const.ODL_MEMBER:
         self.assertEqual(("lbaas/%s" % obj_type), row['object_type'])
     else:
         self.assertEqual(journal.MAKE_URL[obj_type](row),
                          ("lbaas/pools/%s/member" % obj.pool.id))
Пример #25
0
    def _test_event(self, operation, timing):
        self._call_operation_object(operation, timing)
        fake_payload = self._fake_trunk_payload()
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)

        if timing == 'precommit':
            self.assertEqual(operation, row['operation'])
            self.assertEqual(odl_const.ODL_TRUNK, row['object_type'])
            self.assertEqual(fake_payload.trunk_id, row['object_uuid'])
        elif timing == 'after':
            self.assertIsNone(row)
Пример #26
0
    def _test_get_oldest_pending_row_with_dep(self, dep_state):
        db.create_pending_row(self.db_context, *self.UPDATE_ROW)
        parent_row = db.get_all_db_rows(self.db_context)[0]
        db.update_db_row_state(self.db_context, parent_row, dep_state)
        db.create_pending_row(self.db_context, *self.UPDATE_ROW,
                              depending_on=[parent_row])
        row = db.get_oldest_pending_db_row_with_lock(self.db_context)
        if row is not None:
            self.assertNotEqual(parent_row.seqnum, row.seqnum)

        return row
Пример #27
0
    def _test_get_oldest_pending_row_with_dep(self, dep_state):
        db.create_pending_row(self.db_session, *self.UPDATE_ROW)
        parent_row = db.get_all_db_rows(self.db_session)[0]
        db.update_db_row_state(self.db_session, parent_row, dep_state)
        db.create_pending_row(self.db_session,
                              *self.UPDATE_ROW,
                              depending_on=[parent_row])
        row = db.get_oldest_pending_db_row_with_lock(self.db_session)
        if row is not None:
            self.assertNotEqual(parent_row.seqnum, row.seqnum)

        return row
Пример #28
0
 def _test_fip_operation(self, event, operation, fip, ops=True):
     method = getattr(self.flavor_driver,
                      '_floatingip_%s_%s' % (operation, event))
     method(odl_const.ODL_FLOATINGIP, mock.ANY, mock.ANY, **fip)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     if ops:
         if operation != odl_const.ODL_DELETE:
             self.assertEqual(fip['floatingip'], row.data)
         self.assertEqual(odl_const.ODL_FLOATINGIP, row.object_type)
         self.assertEqual(fip['floatingip_id'], row.object_uuid)
     else:
         self.assertIsNone(row)
Пример #29
0
 def _test_fip_operation(self, event, operation, fip, ops=True):
     method = getattr(self.flavor_driver,
                      '_floatingip_%s_%s' % (operation, event))
     method(odl_const.ODL_FLOATINGIP, mock.ANY, mock.ANY, **fip)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     if ops:
         if operation != odl_const.ODL_DELETE:
             self.assertEqual(fip['floatingip'], row.data)
         self.assertEqual(odl_const.ODL_FLOATINGIP, row.object_type)
         self.assertEqual(fip['floatingip_id'], row.object_uuid)
     else:
         self.assertIsNone(row)
Пример #30
0
    def _sync_pending_rows(self, session, exit_after_run):
        while True:
            LOG.debug("Thread walking database")
            row = db.get_oldest_pending_db_row_with_lock(session)
            if not row:
                LOG.debug("No rows to sync")
                break

            # Validate the operation
            validate_func = (dependency_validations.
                             VALIDATION_MAP[row.object_type])
            valid = validate_func(session, row)
            if not valid:
                LOG.info(_LI("%(operation)s %(type)s %(uuid)s is not a "
                             "valid operation yet, skipping for now"),
                         {'operation': row.operation,
                          'type': row.object_type,
                          'uuid': row.object_uuid})

                # Set row back to pending.
                db.update_db_row_state(session, row, odl_const.PENDING)
                if exit_after_run:
                    break
                continue

            LOG.info(_LI("Syncing %(operation)s %(type)s %(uuid)s"),
                     {'operation': row.operation, 'type': row.object_type,
                      'uuid': row.object_uuid})

            # Add code to sync this to ODL
            method, urlpath, to_send = self._json_data(row)

            try:
                self.client.sendjson(method, urlpath, to_send)
                db.update_db_row_state(session, row, odl_const.COMPLETED)
            except exceptions.ConnectionError as e:
                # Don't raise the retry count, just log an error
                LOG.error(_LE("Cannot connect to the Opendaylight Controller"))
                # Set row back to pending
                db.update_db_row_state(session, row, odl_const.PENDING)
                # Break our of the loop and retry with the next
                # timer interval
                break
            except Exception as e:
                LOG.error(_LE("Error syncing %(type)s %(operation)s,"
                              " id %(uuid)s Error: %(error)s"),
                          {'type': row.object_type,
                           'uuid': row.object_uuid,
                           'operation': row.operation,
                           'error': e.message})
                db.update_pending_db_row_retry(session, row,
                                               self._row_retry_count)
Пример #31
0
    def _test_event(self, operation, timing):
        with db_api.CONTEXT_WRITER.using(self.db_context):
            self._call_operation_object(operation, timing)
            if timing == 'precommit':
                self.db_context.session.flush()
            row = db.get_oldest_pending_db_row_with_lock(self.db_context)

            if timing == 'precommit':
                self.assertEqual(operation, row['operation'])
                self.assertEqual(odl_const.ODL_SFC_FLOW_CLASSIFIER,
                                 row['object_type'])
            elif timing == 'after':
                self.assertIsNone(row)
    def _test_event(self, operation, timing):
        with db_api.CONTEXT_WRITER.using(self.db_context):
            self._call_operation_object(operation, timing)
            if timing == 'precommit':
                self.db_context.session.flush()
            row = db.get_oldest_pending_db_row_with_lock(self.db_context)

            if timing == 'precommit':
                self.assertEqual(operation, row['operation'])
                self.assertEqual(
                    odl_const.ODL_SFC_FLOW_CLASSIFIER, row['object_type'])
            elif timing == 'after':
                self.assertIsNone(row)
Пример #33
0
 def _test_router_operation(self, event, operation, router, ops=True):
     method = getattr(self.flavor_driver,
                      '_router_%s_%s' % (operation, event))
     method(odl_const.ODL_ROUTER, mock.ANY, mock.ANY, **router)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     if ops:
         if operation in ['del', odl_const.ODL_DELETE]:
             self.assertEqual(router['router_id'], row.object_uuid)
         else:
             self.assertEqual(router['router'], row.data)
         self.assertEqual(_operation_map[operation], row.operation)
     else:
         self.assertIsNone(row)
 def _test_router_operation(self, event, operation, router, ops=True):
     method = getattr(self.flavor_driver,
                      '_router_%s_%s' % (operation, event))
     method(odl_const.ODL_ROUTER, mock.ANY, mock.ANY, **router)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     if ops:
         if operation in ['del', odl_const.ODL_DELETE]:
             self.assertEqual(router['router_id'], row.object_uuid)
         else:
             self.assertEqual(router['router'], row.data)
         self.assertEqual(_operation_map[operation], row.operation)
     else:
         self.assertIsNone(row)
Пример #35
0
    def _test_event(self, operation, timing):
        with db_api.CONTEXT_WRITER.using(self.db_context):
            fake_payload = self._fake_trunk_payload()
            self._call_operation_object(operation, timing, fake_payload)
            if timing == 'precommit':
                self.db_context.session.flush()

            row = db.get_oldest_pending_db_row_with_lock(self.db_context)

            if timing == 'precommit':
                self.assertEqual(operation, row['operation'])
                self.assertEqual(odl_const.ODL_TRUNK, row['object_type'])
                self.assertEqual(fake_payload.trunk_id, row['object_uuid'])
            elif timing == 'after':
                self.assertIsNone(row)
    def _test_event(self, operation, timing):
        with db_api.CONTEXT_WRITER.using(self.db_context):
            fake_payload = self._fake_trunk_payload()
            self._call_operation_object(operation, timing, fake_payload)
            if timing == 'precommit':
                self.db_context.session.flush()

            row = db.get_oldest_pending_db_row_with_lock(self.db_context)

            if timing == 'precommit':
                self.assertEqual(operation, row['operation'])
                self.assertEqual(odl_const.ODL_TRUNK, row['object_type'])
                self.assertEqual(fake_payload.trunk_id, row['object_uuid'])
            elif timing == 'after':
                self.assertIsNone(row)
Пример #37
0
    def _test_event(self, operation, timing, resource_str,
                    object_type):
        with db_api.CONTEXT_WRITER.using(self.db_context):
            context = self._get_mock_operation_context(object_type)
            self._call_operation_object(operation, timing,
                                        resource_str, context)
            if timing == 'precommit':
                self.db_context.session.flush()
            row = db.get_oldest_pending_db_row_with_lock(self.db_context)

            if timing == 'precommit':
                self.assertEqual(operation, row['operation'])
                self.assertEqual(object_type, row['object_type'])
            elif timing == 'after':
                self.assertIsNone(row)
Пример #38
0
    def _test_router_operation(self, event, operation, router, ops=True):
        method = getattr(self.flavor_driver,
                         '_router_%s_%s' % (operation, event))
        if event == 'precommit':
            method(odl_const.ODL_ROUTER, mock.ANY, mock.ANY, **router)
        else:
            payload = events.DBEventPayload(
                router.get('context'), states=(router.get('router_db'),),
                request_body=router.get(resources.ROUTER),
                resource_id=router.get(resources.ROUTER).get('id'))

            method(odl_const.ODL_ROUTER, mock.ANY, mock.ANY, payload=payload)
        row = db.get_oldest_pending_db_row_with_lock(self.db_context)
        if ops:
            if operation in ['del', odl_const.ODL_DELETE]:
                self.assertEqual(router['router_id'], row.object_uuid)
            else:
                self.assertEqual(router['router'], row.data)
            self.assertEqual(_operation_map[operation], row.operation)
        else:
            self.assertIsNone(row)
Пример #39
0
 def test_get_oldest_pending_row_none_when_no_rows(self):
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertIsNone(row)
Пример #40
0
    def sync_pending_row(self, exit_after_run=False):
        # Block until all pending rows are processed
        session = neutron_db_api.get_session()
        while not self.event.is_set():
            self.event.wait()
            # Clear the event and go back to waiting after
            # the sync block exits
            self.event.clear()
            while True:
                LOG.debug("Thread walking database")
                row = db.get_oldest_pending_db_row_with_lock(session)
                if not row:
                    LOG.debug("No rows to sync")
                    break

                # Validate the operation
                validate_func = db.VALIDATION_MAP[row.object_type]
                valid = validate_func(session, row.object_uuid, row.operation,
                                      row.data)
                if not valid:
                    LOG.info(
                        _LI("%(operation)s %(type)s %(uuid)s is not a "
                            "valid operation yet, skipping for now"), {
                                'operation': row.operation,
                                'type': row.object_type,
                                'uuid': row.object_uuid
                            })
                    continue

                LOG.info(
                    _LI("Syncing %(operation)s %(type)s %(uuid)s"), {
                        'operation': row.operation,
                        'type': row.object_type,
                        'uuid': row.object_uuid
                    })

                # Add code to sync this to ODL
                method, urlpath, to_send = self._json_data(row)

                try:
                    self.client.sendjson(method, urlpath, to_send)
                    db.update_processing_db_row_passed(session, row)
                except exceptions.ConnectionError as e:
                    # Don't raise the retry count, just log an error
                    LOG.error(
                        _LE("Cannot connect to the Opendaylight "
                            "Controller"))
                    # Set row back to pending
                    db.update_db_row_pending(session, row)
                    # Break our of the loop and retry with the next
                    # timer interval
                    break
                except Exception as e:
                    LOG.error(
                        _LE("Error syncing %(type)s %(operation)s,"
                            " id %(uuid)s Error: %(error)s"), {
                                'type': row.object_type,
                                'uuid': row.object_uuid,
                                'operation': row.operation,
                                'error': e.message
                            })
                    db.update_pending_db_row_retry(session, row,
                                                   self._row_retry_count)
            LOG.debug("Clearing sync thread event")
            if exit_after_run:
                # Permanently waiting thread model breaks unit tests
                # Adding this arg to exit here only for unit tests
                break
Пример #41
0
 def test_get_oldest_pending_row(self):
     db.create_pending_row(self.db_context, *self.UPDATE_ROW)
     row = db.get_oldest_pending_db_row_with_lock(self.db_context)
     self.assertIsNotNone(row)
     self.assertEqual(odl_const.PROCESSING, row.state)
Пример #42
0
 def test_get_oldest_pending_row_none_when_no_rows(self):
     row = db.get_oldest_pending_db_row_with_lock(self.db_session)
     self.assertIsNone(row)
Пример #43
0
 def test_get_oldest_pending_row(self):
     db.create_pending_row(self.db_session, *self.UPDATE_ROW)
     row = db.get_oldest_pending_db_row_with_lock(self.db_session)
     self.assertIsNotNone(row)
     self.assertEqual(odl_const.PROCESSING, row.state)