Exemple #1
0
class IPv6Address(types.TypeDecorator):
	"""
	Hybrid IPv6 address.
	"""
	impl = types.BINARY(16)

	def load_dialect_impl(self, dialect):
		if _is_pgsql(dialect):
			return postgresql.INET()
		return self.impl

	@property
	def python_type(self):
		return ipaddr.IPv6Address

	def process_bind_param(self, value, dialect):
		if value is None:
			return None
		if _is_pgsql(dialect):
			return str(value)
		return value.packed

	def process_result_value(self, value, dialect):
		if value is None:
			return None
		return ipaddr.IPv6Address(value)
Exemple #2
0
 def load_dialect_impl(self, dialect):
     if dialect.name == 'postgresql' and self.native:
         # Use the native UUID type.
         return dialect.type_descriptor(postgresql.UUID())
     else:
         # Fallback to either a BINARY or a CHAR.
         kind = types.BINARY(16)
         return dialect.type_descriptor(kind)
Exemple #3
0
    def test_binary(self):
        testobj1 = pickleable.Foo('im foo 1')
        testobj2 = pickleable.Foo('im foo 2')
        testobj3 = pickleable.Foo('im foo 3')
        stream1 = self.load_stream('binary_data_one.dat')
        stream2 = self.load_stream('binary_data_two.dat')
        binary_table.insert().execute(
            primary_id=1,
            misc='binary_data_one.dat',
            data=stream1,
            data_image=stream1,
            data_slice=stream1[0:100],
            pickled=testobj1,
            mypickle=testobj3,
        )
        binary_table.insert().execute(
            primary_id=2,
            misc='binary_data_two.dat',
            data=stream2,
            data_image=stream2,
            data_slice=stream2[0:99],
            pickled=testobj2,
        )

        # TODO: pyodbc does not seem to accept "None" for a VARBINARY
        # column (data=None). error:  [Microsoft][ODBC SQL Server
        # Driver][SQL Server]Implicit conversion from data type varchar
        # to varbinary is not allowed. Use the CONVERT function to run
        # this query. (257) binary_table.insert().execute(primary_id=3,
        # misc='binary_data_two.dat', data=None, data_image=None,
        # data_slice=stream2[0:99], pickled=None)

        binary_table.insert().execute(
            primary_id=3,
            misc='binary_data_two.dat', data_image=None,
            data_slice=stream2[0:99], pickled=None)
        for stmt in \
            binary_table.select(order_by=binary_table.c.primary_id), \
                text(
                    'select * from binary_table order by '
                    'binary_table.primary_id',
                    typemap=dict(
                        data=mssql.MSVarBinary(8000),
                        data_image=mssql.MSImage,
                        data_slice=types.BINARY(100), pickled=PickleType,
                        mypickle=MyPickleType),
                    bind=testing.db):
            l = stmt.execute().fetchall()
            eq_(list(stream1), list(l[0]['data']))
            paddedstream = list(stream1[0:100])
            paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
            eq_(paddedstream, list(l[0]['data_slice']))
            eq_(list(stream2), list(l[1]['data']))
            eq_(list(stream2), list(l[1]['data_image']))
            eq_(testobj1, l[0]['pickled'])
            eq_(testobj2, l[1]['pickled'])
            eq_(testobj3.moredata, l[0]['mypickle'].moredata)
            eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
Exemple #4
0
    def _test_binary(self, deprecate_large_types):
        testobj1 = pickleable.Foo('im foo 1')
        testobj2 = pickleable.Foo('im foo 2')
        testobj3 = pickleable.Foo('im foo 3')
        stream1 = self._load_stream('binary_data_one.dat')
        stream2 = self._load_stream('binary_data_two.dat')
        engine = engines.testing_engine(
            options={"deprecate_large_types": deprecate_large_types})

        binary_table = self._fixture(engine)

        with engine.connect() as conn:
            conn.execute(
                binary_table.insert(),
                primary_id=1,
                misc='binary_data_one.dat',
                data=stream1,
                data_image=stream1,
                data_slice=stream1[0:100],
                pickled=testobj1,
                mypickle=testobj3,
            )
            conn.execute(
                binary_table.insert(),
                primary_id=2,
                misc='binary_data_two.dat',
                data=stream2,
                data_image=stream2,
                data_slice=stream2[0:99],
                pickled=testobj2,
            )

        for stmt in \
            binary_table.select(order_by=binary_table.c.primary_id), \
                text(
                    'select * from binary_table order by '
                    'binary_table.primary_id',
                    typemap=dict(
                        data=mssql.MSVarBinary(8000),
                        data_image=mssql.MSImage,
                        data_slice=types.BINARY(100), pickled=PickleType,
                        mypickle=MyPickleType),
                    bind=testing.db):
            with engine.connect() as conn:
                result = conn.execute(stmt).fetchall()
            eq_(list(stream1), list(result[0]['data']))
            paddedstream = list(stream1[0:100])
            paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
            eq_(paddedstream, list(result[0]['data_slice']))
            eq_(list(stream2), list(result[1]['data']))
            eq_(list(stream2), list(result[1]['data_image']))
            eq_(testobj1, result[0]['pickled'])
            eq_(testobj2, result[1]['pickled'])
            eq_(testobj3.moredata, result[0]['mypickle'].moredata)
            eq_(result[0]['mypickle'].stuff, 'this is the right stuff')
Exemple #5
0
    def test_basic_reflection(self):
        meta = self.metadata

        users = Table(
            "engine_users",
            meta,
            Column("user_id", types.INT, primary_key=True),
            Column("user_name", types.VARCHAR(20), nullable=False),
            Column("test1", types.CHAR(5), nullable=False),
            Column("test2", types.Float(5), nullable=False),
            Column("test2.5", types.Float(), nullable=False),
            Column("test3", types.Text()),
            Column("test4", types.Numeric, nullable=False),
            Column("test4.5", types.Numeric(10, 2), nullable=False),
            Column("test5", types.DateTime),
            Column(
                "parent_user_id",
                types.Integer,
                ForeignKey("engine_users.user_id"),
            ),
            Column("test6", types.DateTime, nullable=False),
            Column("test7", types.Text()),
            Column("test8", types.LargeBinary()),
            Column("test_passivedefault2", types.Integer, server_default="5"),
            Column("test9", types.BINARY(100)),
            Column("test_numeric", types.Numeric()),
        )

        addresses = Table(
            "engine_email_addresses",
            meta,
            Column("address_id", types.Integer, primary_key=True),
            Column("remote_user_id", types.Integer,
                   ForeignKey(users.c.user_id)),
            Column("email_address", types.String(20)),
        )
        meta.create_all()

        meta2 = MetaData()
        reflected_users = Table("engine_users",
                                meta2,
                                autoload=True,
                                autoload_with=testing.db)
        reflected_addresses = Table(
            "engine_email_addresses",
            meta2,
            autoload=True,
            autoload_with=testing.db,
        )
        self.assert_tables_equal(users, reflected_users)
        self.assert_tables_equal(addresses, reflected_addresses)
Exemple #6
0
 def _fixture(self, engine):
     self.binary_table = binary_table = Table(
         'binary_table',
         MetaData(),
         Column('primary_id', Integer, Sequence('binary_id_seq',
                optional=True), primary_key=True),
         Column('data', mssql.MSVarBinary(8000)),
         Column('data_image', mssql.MSImage),
         Column('data_slice', types.BINARY(100)),
         Column('misc', String(30)),
         Column('pickled', PickleType),
         Column('mypickle', MyPickleType),
     )
     binary_table.create(engine)
     return binary_table
Exemple #7
0
class SwitchesState(Model):

    __tablename__ = 'switches'

    id = Column(types.Integer(), primary_key=True)
    key = Column(types.BINARY(length=16), nullable=False, unique=True)
    one = Column(types.Boolean(), nullable=False)
    two = Column(types.Boolean(), nullable=False)
    touched = Column(types.TIMESTAMP(timezone=True),
                     nullable=False,
                     server_default=sql.func.now(),
                     onupdate=sql.func.now())

    def __init__(self):
        self.key = uuid4().bytes
        self.one = False
        self.two = False
    def test_binary(self):
        testobj1 = pickleable.Foo('im foo 1')
        testobj2 = pickleable.Foo('im foo 2')
        testobj3 = pickleable.Foo('im foo 3')
        stream1 = self.load_stream('binary_data_one.dat')
        stream2 = self.load_stream('binary_data_two.dat')
        binary_table.insert().execute(
            primary_id=1,
            misc='binary_data_one.dat',
            data=stream1,
            data_image=stream1,
            data_slice=stream1[0:100],
            pickled=testobj1,
            mypickle=testobj3,
        )
        binary_table.insert().execute(
            primary_id=2,
            misc='binary_data_two.dat',
            data=stream2,
            data_image=stream2,
            data_slice=stream2[0:99],
            pickled=testobj2,
        )

        for stmt in \
            binary_table.select(order_by=binary_table.c.primary_id), \
                text(
                    'select * from binary_table order by '
                    'binary_table.primary_id',
                    typemap=dict(
                        data=mssql.MSVarBinary(8000),
                        data_image=mssql.MSImage,
                        data_slice=types.BINARY(100), pickled=PickleType,
                        mypickle=MyPickleType),
                    bind=testing.db):
            l = stmt.execute().fetchall()
            eq_(list(stream1), list(l[0]['data']))
            paddedstream = list(stream1[0:100])
            paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
            eq_(paddedstream, list(l[0]['data_slice']))
            eq_(list(stream2), list(l[1]['data']))
            eq_(list(stream2), list(l[1]['data_image']))
            eq_(testobj1, l[0]['pickled'])
            eq_(testobj2, l[1]['pickled'])
            eq_(testobj3.moredata, l[0]['mypickle'].moredata)
            eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
Exemple #9
0
    def test_basic_reflection(self):
        meta = self.metadata

        users = Table(
            'engine_users',
            meta,
            Column('user_id', types.INT, primary_key=True),
            Column('user_name', types.VARCHAR(20), nullable=False),
            Column('test1', types.CHAR(5), nullable=False),
            Column('test2', types.Float(5), nullable=False),
            Column('test3', types.Text()),
            Column('test4', types.Numeric, nullable=False),
            Column('test5', types.DateTime),
            Column('parent_user_id', types.Integer,
                   ForeignKey('engine_users.user_id')),
            Column('test6', types.DateTime, nullable=False),
            Column('test7', types.Text()),
            Column('test8', types.LargeBinary()),
            Column('test_passivedefault2', types.Integer, server_default='5'),
            Column('test9', types.BINARY(100)),
            Column('test_numeric', types.Numeric()),
        )

        addresses = Table(
            'engine_email_addresses',
            meta,
            Column('address_id', types.Integer, primary_key=True),
            Column('remote_user_id', types.Integer,
                   ForeignKey(users.c.user_id)),
            Column('email_address', types.String(20)),
        )
        meta.create_all()

        meta2 = MetaData()
        reflected_users = Table('engine_users',
                                meta2,
                                autoload=True,
                                autoload_with=testing.db)
        reflected_addresses = Table('engine_email_addresses',
                                    meta2,
                                    autoload=True,
                                    autoload_with=testing.db)
        self.assert_tables_equal(users, reflected_users)
        self.assert_tables_equal(addresses, reflected_addresses)
Exemple #10
0
    def _test_binary_none(self, deprecate_large_types):
        engine = engines.testing_engine(
            options={"deprecate_large_types": deprecate_large_types})

        binary_table = self._fixture(engine)

        stream2 = self._load_stream('binary_data_two.dat')

        with engine.connect() as conn:
            conn.execute(
                binary_table.insert(),
                primary_id=3,
                misc='binary_data_two.dat', data_image=None,
                data_slice=stream2[0:99], pickled=None)
            for stmt in \
                binary_table.select(), \
                    text(
                        'select * from binary_table',
                        typemap=dict(
                            data=mssql.MSVarBinary(8000),
                            data_image=mssql.MSImage,
                            data_slice=types.BINARY(100),
                            pickled=PickleType,
                            mypickle=MyPickleType),
                        bind=testing.db):
                row = conn.execute(stmt).first()
                eq_(
                    row['pickled'], None
                )
                eq_(
                    row['data_image'], None
                )

                # the type we used here is 100 bytes
                # so we will get 100 bytes zero-padded
                paddedstream = list(stream2[0:99])
                if util.py3k:
                    paddedstream.extend([0] * (100 - len(paddedstream)))
                else:
                    paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
                eq_(
                    list(row['data_slice']), paddedstream
                )
Exemple #11
0
class Ip(UserDefinedType):
    # http://docs.sqlalchemy.org/en/latest/_modules/examples/postgis/postgis.html
    # http://docs.sqlalchemy.org/en/latest/core/custom_types.html#creating-new-types
    # http://sqlalchemy-utils.readthedocs.io/en/latest/_modules/sqlalchemy_utils/types/uuid.html
    # https://github.com/zzzeek/sqlalchemy/blob/master/lib/sqlalchemy/sql/sqltypes.py#L852

    impl = types.BINARY(16)

    def __init__(self, version=4):
        self.version = version

    def get_col_spec(self, **kw):
        return "IP"

    def bind_processor(self, dialect):

        DBAPIBinary = dialect.dbapi.Binary

        def process(value):
            if self.version == 6:
                value = socket.inet_pton(socket.AF_INET6, value)
            else:
                value = socket.inet_pton(socket.AF_INET, value)

            return DBAPIBinary(value)

        return process

    def result_processor(self, dialect, coltype):
        def process(value):
            return socket.inet_ntop(value)

        return process

    @property
    def python_type(self):
        return self.impl.type.python_type
 },
 {
     'name': 'datetime',
     'type': types.DATETIME(),
     'nullable': True,
     'default': None
 },
 {
     'name': 'time',
     'type': types.TIME(),
     'nullable': True,
     'default': None
 },
 {
     'name': 'bytes',
     'type': types.BINARY(),
     'nullable': True,
     'default': None
 },
 {
     'name': 'record.name',
     'type': types.String(),
     'nullable': True,
     'default': None
 },
 {
     'name': 'record.age',
     'type': types.Integer(),
     'nullable': True,
     'default': None
 },
Exemple #13
0
    datetime.datetime(2013, 10, 10, 11, 27, 16),
    datetime.time(11, 27, 16),
    'test_bytes'
]

SAMPLE_COLUMNS = [
    {'name': 'integer', 'type': types.Integer(), 'nullable': True, 'default': None},
    {'name': 'timestamp', 'type': types.TIMESTAMP(), 'nullable': True, 'default': None},
    {'name': 'string', 'type': types.String(), 'nullable': True, 'default': None},
    {'name': 'float', 'type': types.Float(), 'nullable': True, 'default': None},
    {'name': 'numeric', 'type': types.DECIMAL(), 'nullable': True, 'default': None},
    {'name': 'boolean', 'type': types.Boolean(), 'nullable': True, 'default': None},
    {'name': 'date', 'type': types.DATE(), 'nullable': True, 'default': None},
    {'name': 'datetime', 'type': types.DATETIME(), 'nullable': True, 'default': None},
    {'name': 'time', 'type': types.TIME(), 'nullable': True, 'default': None},
    {'name': 'bytes', 'type': types.BINARY(), 'nullable': True, 'default': None},
    {'name': 'record', 'type': types.JSON(), 'nullable': True, 'default': None},
    {'name': 'record.name', 'type': types.String(), 'nullable': True, 'default': None},
    {'name': 'record.age', 'type': types.Integer(), 'nullable': True, 'default': None},
    {'name': 'nested_record', 'type': types.JSON(), 'nullable': True, 'default': None},
    {'name': 'nested_record.record', 'type': types.JSON(), 'nullable': True, 'default': None},
    {'name': 'nested_record.record.name', 'type': types.String(), 'nullable': True, 'default': None},
    {'name': 'nested_record.record.age', 'type': types.Integer(), 'nullable': True, 'default': None},
    {'name': 'array', 'type': types.ARRAY(types.Integer()), 'nullable': True, 'default': None},
]


@pytest.fixture(scope='session')
def engine():
    engine = create_engine('bigquery://', echo=True)
    return engine
Exemple #14
0
 def load_dialect_impl(self, dialect):
     if dialect.name == 'postgresql':
         return dialect.type_descriptor(UUID())
     else:
         return dialect.type_descriptor(types.BINARY(16))
Exemple #15
0
class UUIDType(ScalarCoercible, types.TypeDecorator):
    """
    Stores a UUID in the database natively when it can and falls back to
    a BINARY(16) or a CHAR(32) when it can't.

    ::

        from sqlalchemy_utils import UUIDType
        import uuid

        class User(Base):
            __tablename__ = 'user'

            # Pass `binary=False` to fallback to CHAR instead of BINARY
            id = sa.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4)
    """
    impl = types.BINARY(16)

    python_type = uuid.UUID

    def __init__(self, binary=True, native=True):
        """
        :param binary: Whether to use a BINARY(16) or CHAR(32) fallback.
        """
        self.binary = binary
        self.native = native

    def __repr__(self):
        return util.generic_repr(self)

    def load_dialect_impl(self, dialect):
        if self.native and dialect.name in ('postgresql', 'cockroachdb'):
            # Use the native UUID type.
            return dialect.type_descriptor(postgresql.UUID())

        if dialect.name == 'mssql' and self.native:
            # Use the native UNIQUEIDENTIFIER type.
            return dialect.type_descriptor(mssql.UNIQUEIDENTIFIER())

        else:
            # Fallback to either a BINARY or a CHAR.
            kind = self.impl if self.binary else types.CHAR(32)
            return dialect.type_descriptor(kind)

    @staticmethod
    def _coerce(value):
        if value and not isinstance(value, uuid.UUID):
            try:
                value = uuid.UUID(value)

            except (TypeError, ValueError):
                value = uuid.UUID(bytes=value)

        return value

    def process_literal_param(self, value, dialect):
        return "'{}'".format(value) if value else value

    def process_bind_param(self, value, dialect):
        if value is None:
            return value

        if not isinstance(value, uuid.UUID):
            value = self._coerce(value)

        if self.native and dialect.name in ('postgresql', 'mssql',
                                            'cockroachdb'):
            return str(value)

        return value.bytes if self.binary else value.hex

    def process_result_value(self, value, dialect):
        if value is None:
            return value

        if self.native and dialect.name in ('postgresql', 'mssql',
                                            'cockroachdb'):
            if isinstance(value, uuid.UUID):
                # Some drivers convert PostgreSQL's uuid values to
                # Python's uuid.UUID objects by themselves
                return value
            return uuid.UUID(value)

        return uuid.UUID(bytes=value) if self.binary else uuid.UUID(value)
Exemple #16
0
class BinaryTest(fixtures.TestBase):
    __only_on__ = "mssql"
    __requires__ = ("non_broken_binary", )
    __backend__ = True

    @testing.combinations(
        (
            mssql.MSVarBinary(800),
            b("some normal data"),
            None,
            True,
            None,
            False,
        ),
        (
            mssql.VARBINARY("max"),
            "binary_data_one.dat",
            None,
            False,
            None,
            False,
        ),
        (
            mssql.VARBINARY("max"),
            "binary_data_one.dat",
            None,
            True,
            None,
            False,
        ),
        (
            sqltypes.LargeBinary,
            "binary_data_one.dat",
            None,
            False,
            None,
            False,
        ),
        (sqltypes.LargeBinary, "binary_data_one.dat", None, True, None, False),
        (mssql.MSImage, "binary_data_one.dat", None, True, None, False),
        (PickleType, pickleable.Foo("im foo 1"), None, True, None, False),
        (
            MyPickleType,
            pickleable.Foo("im foo 1"),
            pickleable.Foo("im foo 1", stuff="BINDim stuffRESULT"),
            True,
            None,
            False,
        ),
        (types.BINARY(100), "binary_data_one.dat", None, True, 100, False),
        (types.VARBINARY(100), "binary_data_one.dat", None, True, 100, False),
        (mssql.VARBINARY(100), "binary_data_one.dat", None, True, 100, False),
        (types.BINARY(100), "binary_data_two.dat", None, True, 99, True),
        (types.VARBINARY(100), "binary_data_two.dat", None, True, 99, False),
        (mssql.VARBINARY(100), "binary_data_two.dat", None, True, 99, False),
        argnames="type_, data, expected, deprecate_large_types, "
        "slice_, zeropad",
    )
    def test_round_trip(
        self,
        metadata,
        type_,
        data,
        expected,
        deprecate_large_types,
        slice_,
        zeropad,
    ):
        if (testing.db.dialect.deprecate_large_types
                is not deprecate_large_types):
            engine = engines.testing_engine(
                options={"deprecate_large_types": deprecate_large_types})
        else:
            engine = testing.db

        binary_table = Table(
            "binary_table",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", type_),
        )
        binary_table.create(engine)

        if isinstance(data, str) and (data == "binary_data_one.dat"
                                      or data == "binary_data_two.dat"):
            data = self._load_stream(data)

        if slice_ is not None:
            data = data[0:slice_]

        if expected is None:
            if zeropad:
                expected = data[0:slice_] + b"\x00"
            else:
                expected = data

        with engine.begin() as conn:
            conn.execute(binary_table.insert(), dict(data=data))

            eq_(conn.scalar(select(binary_table.c.data)), expected)

            eq_(
                conn.scalar(
                    text("select data from binary_table").columns(
                        binary_table.c.data)),
                expected,
            )

            conn.execute(binary_table.delete())

            conn.execute(binary_table.insert(), dict(data=None))
            eq_(conn.scalar(select(binary_table.c.data)), None)

            eq_(
                conn.scalar(
                    text("select data from binary_table").columns(
                        binary_table.c.data)),
                None,
            )

    def _load_stream(self, name, len_=3000):
        fp = open(os.path.join(os.path.dirname(__file__), "..", "..", name),
                  "rb")
        stream = fp.read(len_)
        fp.close()
        return stream
Exemple #17
0
    datetime.datetime(2013, 10, 10, 11, 27, 16),
    datetime.time(11, 27, 16),
    "test_bytes",
]

SAMPLE_COLUMNS = [
    {"name": "integer", "type": types.Integer(), "nullable": True, "default": None},
    {"name": "timestamp", "type": types.TIMESTAMP(), "nullable": True, "default": None},
    {"name": "string", "type": types.String(), "nullable": True, "default": None},
    {"name": "float", "type": types.Float(), "nullable": True, "default": None},
    {"name": "numeric", "type": types.Numeric(), "nullable": True, "default": None},
    {"name": "boolean", "type": types.Boolean(), "nullable": True, "default": None},
    {"name": "date", "type": types.DATE(), "nullable": True, "default": None},
    {"name": "datetime", "type": types.DATETIME(), "nullable": True, "default": None},
    {"name": "time", "type": types.TIME(), "nullable": True, "default": None},
    {"name": "bytes", "type": types.BINARY(), "nullable": True, "default": None},
    {
        "name": "record",
        "type": types.JSON(),
        "nullable": True,
        "default": None,
        "comment": "In Standard SQL this data type is a STRUCT<name STRING, age INT64>.",
    },
    {"name": "record.name", "type": types.String(), "nullable": True, "default": None},
    {"name": "record.age", "type": types.Integer(), "nullable": True, "default": None},
    {"name": "nested_record", "type": types.JSON(), "nullable": True, "default": None},
    {
        "name": "nested_record.record",
        "type": types.JSON(),
        "nullable": True,
        "default": None,
Exemple #18
0
 },
 {
     "name": "datetime",
     "type": types.DATETIME(),
     "nullable": True,
     "default": None
 },
 {
     "name": "time",
     "type": types.TIME(),
     "nullable": True,
     "default": None
 },
 {
     "name": "bytes",
     "type": types.BINARY(),
     "nullable": True,
     "default": None
 },
 {
     "name":
     "record",
     "type":
     sqlalchemy_bigquery.STRUCT(name=types.String, age=types.Integer),
     "nullable":
     True,
     "default":
     None,
     "comment":
     "In Standard SQL this data type is a STRUCT<name STRING, age INT64>.",
 },
class PasswordType(types.TypeDecorator):

    impl = types.BINARY(60)
    python_type = Password

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.encryption_rounds = kwargs.pop('encryption_rounds', None)

    def process_literal_param(self, value, dialect):
        return self.process_value(value)

    def process_bind_param(self, value, dialect):
        return self.process_value(value)

    def process_value(self, value):
        if isinstance(value, Password):
            if value.secret is not None:
                value.hash = self._hash(value.secret)
                value.secret = None
            return value.hash

        if isinstance(value, str):
            return self._hash(value)

    def process_result_value(self, value, dialect):
        if value is not None:
            return Password(value)

    def load_dialect_impl(self, dialect):
        if dialect.name == 'postgresql':
            # Use a BYTEA type for postgresql.
            impl = postgresql.BYTEA(60)
        elif dialect.name == 'oracle':
            # Use a RAW type for oracle.
            impl = oracle.RAW(60)
        elif dialect.name == 'sqlite':
            # Use a BLOB type for sqlite
            impl = sqlite.BLOB(60)
        elif dialect.name == 'mysql':
            # Use a BINARY type for mysql.
            impl = mysql.BINARY(60)
        else:
            impl = types.VARBINARY(60)
        return dialect.type_descriptor(impl)

    def _hash(self, value) -> bytes:
        return bcrypt.generate_password_hash(value, self.encryption_rounds)

    def coercion_listener(self, target, value, oldvalue, initiator):
        if value is None:
            return

        if not isinstance(value, Password):
            value = self._hash(value)
            return Password(value)
        else:
            if value.secret is not None:
                value.hash = self._hash(value.secret)
                value.secret = None

        return value

    @property
    def python_type(self):
        return self.impl.type.python_type
Exemple #20
0
class UUIDType(types.TypeDecorator, ScalarCoercible):
    """
    Stores a UUID in the database natively when it can and falls back to
    a BINARY(16) or a CHAR(32) when it can't.

    ::

        from libs.DatabaseDatatypes import UUIDType
        import uuid

        class User(Base):
            __tablename__ = 'user'

            # Pass `binary=False` to fallback to CHAR instead of BINARY
            id = sa.Column(UUIDType(binary=False), primary_key=True)
    """
    impl = types.BINARY(16)

    python_type = uuid.UUID

    def __init__(self, binary=True, native=True):
        """
        :param binary: Whether to use a BINARY(16) or CHAR(32) fallback.
        """
        self.binary = binary
        self.native = native

    def load_dialect_impl(self, dialect):
        if dialect.name == 'postgresql' and self.native:
            # Use the native UUID type.
            return dialect.type_descriptor(postgresql.UUID())

        else:
            # Fallback to either a BINARY or a CHAR.
            kind = self.impl if self.binary else types.CHAR(32)
            return dialect.type_descriptor(kind)

    @staticmethod
    def _coerce(value):
        if value and not isinstance(value, uuid.UUID):
            try:
                value = uuid.UUID(bytes=urlsafe_b64decode(str(value)))
            except (TypeError, ValueError):
                logging.exception("Could not coerce as urlsafe base64")
                value = uuid.UUID(value)
        return value

    def process_bind_param(self, value, dialect):
        if value is None:
            return value

        if not isinstance(value, uuid.UUID):
            value = self._coerce(value)

        if self.native and dialect.name == 'postgresql':
            return str(value)

        return value.bytes if self.binary else value.hex

    def process_result_value(self, value, dialect):
        if value is None:
            return value

        if self.native and dialect.name == 'postgresql':
            if isinstance(value, uuid.UUID):
                # Some drivers convert PostgreSQL's uuid values to
                # Python's uuid.UUID objects by themselves
                return urlsafe_b64encode(value.bytes)
            return urlsafe_b64encode(uuid.UUID(value).bytes)

        _value = uuid.UUID(bytes=value) if self.binary else uuid.UUID(value)
        return urlsafe_b64encode(_value.bytes)
Exemple #21
0
class UUIDType(types.TypeDecorator, ScalarCoercible):
    """
    Stores a UUID in the database natively when it can and falls back to
    a BINARY(16) or a CHAR(32) when it can't.

    ::

        from sqlalchemy_utils import UUIDType
        import uuid

        class User(Base):
            __tablename__ = 'user'

            # Pass `binary=False` to fallback to CHAR instead of BINARY
            id = sa.Column(UUIDType(binary=False), primary_key=True)
    """
    impl = types.BINARY(16)

    python_type = uuid.UUID

    def __init__(self, binary=True):
        """
        :param binary: Whether to use a BINARY(16) or CHAR(32) fallback.
        """
        self.binary = binary

    def load_dialect_impl(self, dialect):
        if dialect.name == 'postgresql':
            # Use the native UUID type.
            return dialect.type_descriptor(postgresql.UUID())

        else:
            # Fallback to either a BINARY or a CHAR.
            kind = self.impl if self.binary else types.CHAR(32)
            return dialect.type_descriptor(kind)

    @staticmethod
    def _coerce(value):
        if value and not isinstance(value, uuid.UUID):
            try:
                value = uuid.UUID(value)

            except (TypeError, ValueError):
                value = uuid.UUID(bytes=value)

        return value

    def process_bind_param(self, value, dialect):
        if value is None:
            return value

        if not isinstance(value, uuid.UUID):
            value = self._coerce(value)

        if dialect == 'postgresql':
            return str(value)

        return value.bytes if self.binary else value.hex

    def process_result_value(self, value, dialect):
        if value is None:
            return value

        if dialect == 'postgresql':
            return uuid.UUID(value)

        return uuid.UUID(bytes=value) if self.binary else uuid.UUID(value)