Esempio n. 1
0
def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
    """Filter for SQLite duplicate key error.

    note(boris-42): In current versions of DB backends unique constraint
    violation messages follow the structure:

    sqlite:
    1 column - (IntegrityError) column c1 is not unique
    N columns - (IntegrityError) column c1, c2, ..., N are not unique

    sqlite since 3.7.16:
    1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1
    N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2

    sqlite since 3.8.2:
    (IntegrityError) PRIMARY KEY must be unique

    """
    columns = []
    # NOTE(ochuprykov): We can get here by last filter in which there are no
    #                   groups. Trying to access the substring that matched by
    #                   the group will lead to IndexError. In this case just
    #                   pass empty list to exception.DBDuplicateEntry
    try:
        columns = match.group('columns')
        columns = [c.split('.')[-1] for c in columns.strip().split(", ")]
    except IndexError:
        pass

    raise exception.DBDuplicateEntry(columns, integrity_error)
Esempio n. 2
0
    def create(env_template_params, tenant_id):
        """Creates environment-template with specified params,
           in particular - name.

           :param env_template_params: Dict, e.g. {'name': 'temp-name'}
           :param tenant_id: Tenant Id
           :return: Created Template
        """

        env_template_params['id'] = uuidutils.generate_uuid()
        env_template_params['tenant_id'] = tenant_id
        env_template = models.EnvironmentTemplate()
        env_template.update(env_template_params)

        unit = db_session.get_session()
        with unit.begin():
            try:
                unit.add(env_template)
            except db_exc.DBDuplicateEntry:
                msg = _('Environment template specified name already exists')
                LOG.error(msg)
                raise db_exc.DBDuplicateEntry(explanation=msg)
        env_template.update({'description': env_template_params})
        env_template.save(unit)

        return env_template
Esempio n. 3
0
    def _test_duplicate_error_parsing_mysql(self, key):
        fake_context = context.RequestContext(
            user_id='fake', project_id='fake')

        req = webob.Request.blank(
            '/resource_providers',
            method='POST',
            content_type='application/json')
        req.body = b'{"name": "foobar"}'
        req.environ['placement.context'] = fake_context

        parse_version = microversion_parse.parse_version_string
        microversion = parse_version('1.15')
        microversion.max_version = parse_version('9.99')
        microversion.min_version = parse_version('1.0')
        req.environ['placement.microversion'] = microversion

        with mock.patch(
            'placement.objects.resource_provider.ResourceProvider.create',
            side_effect=db_exc.DBDuplicateEntry(columns=[key]),
        ):
            response = req.get_response(
                resource_provider.create_resource_provider)

        self.assertEqual('409 Conflict', response.status)
        self.assertIn(
            'Conflicting resource provider name: foobar already exists.',
            response.text)
Esempio n. 4
0
 def test_retry_failsover(self, mock_ens_agg, mock_time):
     """Confirm that the retry loop used when ensuring aggregates only
     retries 10 times. After that it lets DBDuplicateEntry raise.
     """
     rp = self._create_provider('rp1')
     mock_ens_agg.side_effect = db_exc.DBDuplicateEntry()
     self.assertRaises(
         db_exc.DBDuplicateEntry, rp.set_aggregates, [uuidsentinel.agg])
     self.assertEqual(11, mock_ens_agg.call_count)
Esempio n. 5
0
    def test_create_or_update_agent_concurrent_insert(self):
        # NOTE(rpodolyaka): emulate violation of the unique constraint caused
        #                   by a concurrent insert. Ensure we make another
        #                   attempt on fail
        with mock.patch('sqlalchemy.orm.Session.add') as add_mock:
            add_mock.side_effect = [exc.DBDuplicateEntry(), None]

            self.plugin.create_or_update_agent(self.context, self.agent_status)

            self.assertEqual(add_mock.call_count, 2,
                             "Agent entry creation hasn't been retried")
Esempio n. 6
0
    def test_retry_happens(self, mock_ens_agg, mock_time):
        """Confirm that retrying on DBDuplicateEntry happens when ensuring
        aggregates.
        """
        rp = self._create_provider('rp1')
        agg_id = self.create_aggregate(uuidsentinel.agg)

        mock_ens_agg.side_effect = [db_exc.DBDuplicateEntry(), agg_id]
        rp.set_aggregates([uuidsentinel.agg])
        self.assertEqual([uuidsentinel.agg], rp.get_aggregates())
        self.assertEqual(2, mock_ens_agg.call_count)
Esempio n. 7
0
def _default_dupe_key_error(integrity_error, match, engine_name,
                            is_disconnect):
    """Filter for MySQL or Postgresql duplicate key error.

    note(boris-42): In current versions of DB backends unique constraint
    violation messages follow the structure:

    postgres:
    1 column - (IntegrityError) duplicate key value violates unique
               constraint "users_c1_key"
    N columns - (IntegrityError) duplicate key value violates unique
               constraint "name_of_our_constraint"

    mysql since 8.0.19:
    1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key
               'table_name.c1'")
    N columns - (IntegrityError) (1062, "Duplicate entry 'values joined
               with -' for key 'table_name.name_of_our_constraint'")

    mysql+mysqldb:
    1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key
               'c1'")
    N columns - (IntegrityError) (1062, "Duplicate entry 'values joined
               with -' for key 'name_of_our_constraint'")

    mysql+mysqlconnector:
    1 column - (IntegrityError) 1062 (23000): Duplicate entry 'value_of_c1' for
               key 'c1'
    N columns - (IntegrityError) 1062 (23000): Duplicate entry 'values
               joined with -' for key 'name_of_our_constraint'
    """

    columns = match.group('columns')

    # note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2"
    #                  where `t` it is table name and columns `c1`, `c2`
    #                  are in UniqueConstraint.
    uniqbase = "uniq_"
    if not columns.startswith(uniqbase):
        if engine_name == "postgresql":
            columns = [columns[columns.index("_") + 1:columns.rindex("_")]]
        elif (engine_name == "mysql") and \
             (uniqbase in str(columns.split("0")[:1])):
            columns = columns.split("0")[1:]
        else:
            columns = [columns]
    else:
        columns = columns[len(uniqbase):].split("0")[1:]

    value = match.groupdict().get('value')

    raise exception.DBDuplicateEntry(columns, integrity_error, value)
Esempio n. 8
0
 def _create_in_db(context, updates):
     created_at = updates.get("created_at", "timestamp()")
     updated_at = updates.get("updated_at", "timestamp()")
     query = """
             CREATE (rc:RESOURCE_CLASS {name: '%s', created_at: %s,
                 updated_at: %s})
             RETURN rc
     """ % (updates["name"], created_at, updated_at)
     try:
         result = context.tx.run(query).data()
     except db.ClientError:
         raise db_exc.DBDuplicateEntry()
     return result[0]["rc"]
Esempio n. 9
0
    def test_discover_services_duplicate(self, mock_srv, mock_hm_create,
                                         mock_hm_get, mock_cm):
        mock_cm.return_value = [objects.CellMapping(name='foo', uuid=uuids.cm)]
        mock_srv.return_value = [objects.Service(host='bar')]
        mock_hm_get.side_effect = exception.HostMappingNotFound(name='bar')
        mock_hm_create.side_effect = db_exc.DBDuplicateEntry()

        ctxt = context.get_admin_context()
        exp = self.assertRaises(exception.HostMappingExists,
                                host_mapping.discover_hosts,
                                ctxt,
                                by_service=True)
        expected = "Host 'bar' mapping already exists"
        self.assertIn(expected, str(exp))
Esempio n. 10
0
    def test_create_or_update_agent_concurrent_insert(self):
        # NOTE(rpodolyaka): emulate violation of the unique constraint caused
        #                   by a concurrent insert. Ensure we make another
        #                   attempt on fail
        mock.patch('neutron.objects.base.NeutronDbObject.modify_fields_from_db'
                   ).start()
        mock.patch.object(self.context.session, 'expunge').start()

        with mock.patch('neutron.objects.db.api.create_object') as add_mock:
            add_mock.side_effect = [exc.DBDuplicateEntry(), mock.Mock()]
            self.plugin.create_or_update_agent(self.context, self.agent_status)

            self.assertEqual(add_mock.call_count, 2,
                             "Agent entry creation hasn't been retried")
Esempio n. 11
0
def _db2_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
    """Filter for DB2 duplicate key errors.

    N columns - (IntegrityError) SQL0803N  One or more values in the INSERT
                statement, UPDATE statement, or foreign key update caused by a
                DELETE statement are not valid because the primary key, unique
                constraint or unique index identified by "2" constrains table
                "NOVA.KEY_PAIRS" from having duplicate values for the index
                key.

    """

    # NOTE(mriedem): The ibm_db_sa integrity error message doesn't provide the
    # columns so we have to omit that from the DBDuplicateEntry error.
    raise exception.DBDuplicateEntry([], integrity_error)
Esempio n. 12
0
 def _create_ha_network_tenant_binding(self, context, tenant_id,
                                       network_id):
     ha_network = l3_hamode.L3HARouterNetwork(
         context, project_id=tenant_id, network_id=network_id)
     ha_network.create()
     # we need to check if someone else just inserted at exactly the
     # same time as us because there is no constrain in L3HARouterNetwork
     # that prevents multiple networks per tenant
     if l3_hamode.L3HARouterNetwork.count(
             context, project_id=tenant_id) > 1:
         # we need to throw an error so our network is deleted
         # and the process is started over where the existing
         # network will be selected.
         raise db_exc.DBDuplicateEntry(columns=['tenant_id'])
     return ha_network
Esempio n. 13
0
 def _create_ha_network_tenant_binding(self, context, tenant_id,
                                       network_id):
     with context.session.begin():
         ha_network = L3HARouterNetwork(tenant_id=tenant_id,
                                        network_id=network_id)
         context.session.add(ha_network)
     # we need to check if someone else just inserted at exactly the
     # same time as us because there is no constrain in L3HARouterNetwork
     # that prevents multiple networks per tenant
     with context.session.begin(subtransactions=True):
         items = (context.session.query(L3HARouterNetwork).filter_by(
             tenant_id=tenant_id).all())
         if len(items) > 1:
             # we need to throw an error so our network is deleted
             # and the process is started over where the existing
             # network will be selected.
             raise db_exc.DBDuplicateEntry(columns=['tenant_id'])
     return ha_network
Esempio n. 14
0
 def _create_in_db(context, updates):
     upd_list = []
     for key, val in updates.items():
         if isinstance(val, six.string_types):
             upd_list.append("%s: '%s'" % (key, val))
         else:
             upd_list.append("%s: %s" % (key, val))
     if "created_at" not in updates:
         upd_list.append("created_at: timestamp()")
     if "updated_at" not in updates:
         upd_list.append("updated_at: timestamp()")
     upd_clause = ", ".join(upd_list)
     query = """
     CREATE (trait:TRAIT {%s})
     RETURN trait
     """ % upd_clause
     try:
         result = context.tx.run(query).data()
     except db.ClientError as e:
         raise db_exc.DBDuplicateEntry(e)
     return db.pythonize(result[0]["trait"])
Esempio n. 15
0
 def create_obj_side_effect(obj_cls, context, values, populate_id=True):
     if counter['value'] < 1:
         counter['value'] += 1
         raise exc.DBDuplicateEntry()
     obj_cls.id = uuidutils.generate_uuid()
Esempio n. 16
0
 def _create_ha_network(*args, **kwargs):
     # create the network and then raise the error to simulate another
     # worker creating the network before us.
     created_nets.append(orig_create(*args, **kwargs))
     raise db_exc.DBDuplicateEntry(columns=['tenant_id'])