コード例 #1
0
ファイル: exc_filters.py プロジェクト: varunarya10/oslo.db
def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
    """Filter for SQLite duplicate key error.

    note(boris-42): In current versions of DB backends unique constraint
    violation messages follow the structure:

    sqlite:
    1 column - (IntegrityError) column c1 is not unique
    N columns - (IntegrityError) column c1, c2, ..., N are not unique

    sqlite since 3.7.16:
    1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1
    N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2

    sqlite since 3.8.2:
    (IntegrityError) PRIMARY KEY must be unique

    """
    columns = []
    # NOTE(ochuprykov): We can get here by last filter in which there are no
    #                   groups. Trying to access the substring that matched by
    #                   the group will lead to IndexError. In this case just
    #                   pass empty list to exception.DBDuplicateEntry
    try:
        columns = match.group('columns')
        columns = [c.split('.')[-1] for c in columns.strip().split(", ")]
    except IndexError:
        pass

    raise exception.DBDuplicateEntry(columns, integrity_error)
コード例 #2
0
ファイル: test_ovs_db.py プロジェクト: yangxu2014/neutron
    def test_add_tunnel_endpoint_handle_duplicate_error(self):
        with mock.patch.object(session.Session, 'query') as query_mock:
            error = db_exc.DBDuplicateEntry(['id'])
            query_mock.side_effect = error

            with testtools.ExpectedException(n_exc.NeutronException):
                ovs_db_v2.add_tunnel_endpoint('10.0.0.1', 5)
            self.assertEqual(query_mock.call_count, 5)
コード例 #3
0
    def test_create_or_update_agent_concurrent_insert(self):
        # NOTE(rpodolyaka): emulate violation of the unique constraint caused
        #                   by a concurrent insert. Ensure we make another
        #                   attempt on fail
        with mock.patch('sqlalchemy.orm.Session.add') as add_mock:
            add_mock.side_effect = [exc.DBDuplicateEntry(), None]

            self.plugin.create_or_update_agent(self.context, self.agent_status)

            self.assertEqual(add_mock.call_count, 2,
                             "Agent entry creation hasn't been retried")
コード例 #4
0
def _db2_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
    """Filter for DB2 duplicate key errors.

    N columns - (IntegrityError) SQL0803N  One or more values in the INSERT
                statement, UPDATE statement, or foreign key update caused by a
                DELETE statement are not valid because the primary key, unique
                constraint or unique index identified by "2" constrains table
                "NOVA.KEY_PAIRS" from having duplicate values for the index
                key.

    """

    # NOTE(mriedem): The ibm_db_sa integrity error message doesn't provide the
    # columns so we have to omit that from the DBDuplicateEntry error.
    raise exception.DBDuplicateEntry([], integrity_error)
コード例 #5
0
def _default_dupe_key_error(integrity_error, match, engine_name,
                            is_disconnect):
    """Filter for MySQL or Postgresql duplicate key error.

    note(boris-42): In current versions of DB backends unique constraint
    violation messages follow the structure:

    postgres:
    1 column - (IntegrityError) duplicate key value violates unique
               constraint "users_c1_key"
    N columns - (IntegrityError) duplicate key value violates unique
               constraint "name_of_our_constraint"

    mysql+mysqldb:
    1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key
               'c1'")
    N columns - (IntegrityError) (1062, "Duplicate entry 'values joined
               with -' for key 'name_of_our_constraint'")

    mysql+mysqlconnector:
    1 column - (IntegrityError) 1062 (23000): Duplicate entry 'value_of_c1' for
               key 'c1'
    N columns - (IntegrityError) 1062 (23000): Duplicate entry 'values
               joined with -' for key 'name_of_our_constraint'



    """

    columns = match.group('columns')

    # note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2"
    #                  where `t` it is table name and columns `c1`, `c2`
    #                  are in UniqueConstraint.
    uniqbase = "uniq_"
    if not columns.startswith(uniqbase):
        if engine_name == "postgresql":
            columns = [columns[columns.index("_") + 1:columns.rindex("_")]]
        else:
            columns = [columns]
    else:
        columns = columns[len(uniqbase):].split("0")[1:]

    value = match.groupdict().get('value')

    raise exception.DBDuplicateEntry(columns, integrity_error, value)
コード例 #6
0
def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
    """Filter for SQLite duplicate key error.

    note(boris-42): In current versions of DB backends unique constraint
    violation messages follow the structure:

    sqlite:
    1 column - (IntegrityError) column c1 is not unique
    N columns - (IntegrityError) column c1, c2, ..., N are not unique

    sqlite since 3.7.16:
    1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1
    N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2

    """
    columns = match.group('columns')
    columns = [c.split('.')[-1] for c in columns.strip().split(", ")]
    raise exception.DBDuplicateEntry(columns, integrity_error)
コード例 #7
0
def _raise_if_duplicate_entry_error(integrity_error, engine_name):
    """Raise exception if two entries are duplicated.

    In this function will be raised DBDuplicateEntry exception if integrity
    error wrap unique constraint violation.
    """
    def get_columns_from_uniq_cons_or_name(columns):
        # note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2"
        #                  where `t` it is table name and columns `c1`, `c2`
        #                  are in UniqueConstraint.
        uniqbase = "uniq_"
        if not columns.startswith(uniqbase):
            if engine_name == "postgresql":
                return [columns[columns.index("_") + 1:columns.rindex("_")]]
            return [columns]
        return columns[len(uniqbase):].split("0")[1:]

    if engine_name not in ("ibm_db_sa", "mysql", "sqlite", "postgresql"):
        return

    # FIXME(johannes): The usage of the .message attribute has been
    # deprecated since Python 2.6. However, the exceptions raised by
    # SQLAlchemy can differ when using unicode() and accessing .message.
    # An audit across all three supported engines will be necessary to
    # ensure there are no regressions.
    for pattern in _DUP_KEY_RE_DB[engine_name]:
        match = pattern.match(integrity_error.message)
        if match:
            break
    else:
        return

    # NOTE(mriedem): The ibm_db_sa integrity error message doesn't provide the
    # columns so we have to omit that from the DBDuplicateEntry error.
    columns = ''

    if engine_name != 'ibm_db_sa':
        columns = match.group(1)

    if engine_name == "sqlite":
        columns = [c.split('.')[-1] for c in columns.strip().split(", ")]
    else:
        columns = get_columns_from_uniq_cons_or_name(columns)
    raise exception.DBDuplicateEntry(columns, integrity_error)