def define_tables(cls, metadata): import json class JSONEncodedDict(TypeDecorator): impl = VARCHAR(50) def process_bind_param(self, value, dialect): if value is not None: value = json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: value = json.loads(value) return value MutableDict = cls._type_fixture() MutableDict.associate_with(JSONEncodedDict) Table('foo', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', JSONEncodedDict), Column('unrelated_data', String(50)) )
def define_tables(cls, metadata): import json class JSONEncodedDict(TypeDecorator): impl = VARCHAR(50) def process_bind_param(self, value, dialect): if value is not None: value = json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: value = json.loads(value) return value MutableDict = cls._type_fixture() MutableDict.associate_with(JSONEncodedDict) Table( 'foo', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', JSONEncodedDict), Column('unrelated_data', String(50)))
def associate_with(sqltype): # TODO(leizhang) When we removed sqlalchemy 0.7 dependence # we can import MutableDict directly and remove ./mutable.py try: from sqlalchemy.ext.mutable import MutableDict as sa_MutableDict sa_MutableDict.associate_with(Json) except ImportError: from heat.db.sqlalchemy.mutable import MutableDict MutableDict.associate_with(Json)
def define_tables(cls, metadata): MutableDict = cls._type_fixture() MutableDict.associate_with(PickleType) Table('foo', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('skip', PickleType), Column('data', PickleType), Column('unrelated_data', String(50)) )
def define_tables(cls, metadata): MutableDict = cls._type_fixture() MutableDict.associate_with(PickleType) Table( 'foo', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('skip', PickleType), Column('data', PickleType), Column('unrelated_data', String(50)))
def define_tables(cls, metadata): MutableDict = cls._type_fixture() MutableDict.associate_with(PickleType) Table( "foo", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("skip", PickleType), Column("data", PickleType), Column("unrelated_data", String(50)), )
def define_tables(cls, metadata): MutableDict = cls._type_fixture() MutableDict.associate_with(PickleType) Table( "foo", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("skip", PickleType), Column("data", PickleType), Column("unrelated_data", String(50)), )
return json.loads(value) class AstonFrameBinary(TypeDecorator): impl = LargeBinary def process_bind_param(self, value, dialect): if value is not None: return value.compress() def process_result_value(self, value, dialect): if value is not None: return decompress(value) MutableDict.associate_with(JSONDict) Base = declarative_base() def initialize_sql(engine): DBSession = scoped_session(sessionmaker(expire_on_commit=False)) DBSession.configure(bind=engine) Base.metadata.bind = engine Base.metadata.create_all(engine) return DBSession def quick_sqlite(filename): from sqlalchemy import create_engine
_null = None _type = object def process_bind_param(self, value, dialect): return json.dumps(value) def process_literal_param(self, value, dialect): return value def process_result_value(self, value, dialect): try: value = json.loads(value) except (ValueError, TypeError): value = self._null return value class List(Json): _null = [] _type = list class Dict(Json): _null = {} _type = dict MutableDict.associate_with(Dict) Base = declarative_base()
import sqlalchemy_utils import sqlalchemy_utils.types.json from sqlalchemy_json import NestedMutable as _NestedMutable class MutableJson(sqlalchemy.types.JSON): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(sqlalchemy.types.JSON): """JSON type for SQLAlchemy with nested change tracking.""" _MutableDict.associate_with(MutableJson) _NestedMutable.associate_with(NestedMutableJson) # Monkey patch sqlalchemy_utils serializer. Note that sqlalchemy_json inherits # from the data type from sqlalchemy_utils - so setting serializer there takes # care of both instances. # # TODO: # 1. This patching should maybe be done differently -- maybe override the # JsonType-method for serializing to the db? # 2. We might want to implement our own Json serializer to deal with custom # data types. The flask one simply extends the stdlib json module with # support for serializing datetime.datatime and uuid.UUID # 3. Note that JsonType only use the json-implementation for serializing to # json-strings for all db-engines *but* postgres -- if using postgres, json # serialization is delegated to the engine, so we'll have to make sure to
impl = HSTORE def process_bind_param(self, value, dialect): if not isinstance(value, dict): return value ret = {k: json.dumps(v) for k, v in value.items()} return ret def process_result_value(self, value, dialect): if not value: return MutableDict() ret = MutableDict({k: json.loads(v) for k, v in value.items()}) return ret MutableDict.associate_with(JSONValuesColumn) class IntegerEnumColumn(types.TypeDecorator): impl = types.INTEGER def __init__(self, enum_values): super(IntegerEnumColumn, self).__init__() self.enum_values = enum_values self.reverse_enum_values = reverse_dict(enum_values) def process_bind_param(self, value, dialect): return self.reverse_enum_values.get(value, value) def process_result_value(self, value, dialect): return self.enum_values.get(value, value)
@property def path(self): return Path(self._path) @path.setter def path(self, value): self._path = str(value.resolve()) self._name = value.name @property def name(self): return self._name MutableList.associate_with(JList) MutableDict.associate_with(JDict) def configure_logger(rundir, name): """ Create the project-specific logger. DEBUG and up is saved to the log, INFO and up appears in the console. :param rundir: Path, to create log sub-path in :param name: str, name for logfile :return: logger object """ from pathlib import Path import logging logfile = Path(rundir) / f'processor_logs/{name}.log' logger = logging.getLogger(name)
return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if value is None: return value if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
return value # This is a bit "magic", but simplifies the interaction between this # validation and SQLAlchemy hybrid properties. If the attribute being # set starts with an underscore, assume that it's due to being set up # as a hybrid property, and remove the underscore prefix when looking # for a field to validate against. if attribute.startswith('_'): attribute = attribute[1:] field = self.schema.fields.get(attribute) if not field or field.dump_only: return value result = self.schema.load({attribute: value}) return result.data[attribute] DatabaseModel = declarative_base( # pylint: disable=invalid-name cls=DatabaseModelBase, name='DatabaseModel', metadata=MetaData(naming_convention=NAMING_CONVENTION), ) # attach the listener for SQLAlchemy ORM attribute "set" events to all models event.listen(DatabaseModel, 'attribute_instrument', attach_set_listener) # associate JSONB columns with MutableDict so value changes are detected MutableDict.associate_with(JSONB)
class Json(types.TypeDecorator): impl = types.Text def process_bind_param(self, value, dialect): return dumps(value) def process_result_value(self, value, dialect): return loads(value) # TODO(leizhang) When we removed sqlalchemy 0.7 dependence # we can import MutableDict directly and remove ./mutable.py try: from sqlalchemy.ext.mutable import MutableDict as sa_MutableDict sa_MutableDict.associate_with(Json) except ImportError: from heat.db.sqlalchemy.mutable import MutableDict MutableDict.associate_with(Json) class HeatBase(object): """Base class for Heat Models.""" __table_args__ = {'mysql_engine': 'InnoDB'} __table_initialized__ = False created_at = sqlalchemy.Column(sqlalchemy.DateTime, default=timeutils.utcnow) updated_at = sqlalchemy.Column(sqlalchemy.DateTime, onupdate=timeutils.utcnow) def save(self, session=None):
return dialect.type_descriptor(mysql.LONGTEXT()) else: return self.impl def process_bind_param(self, value, dialect): return dumps(value) def process_result_value(self, value, dialect): return loads(value) # TODO(leizhang) When we removed sqlalchemy 0.7 dependence # we can import MutableDict directly and remove ./mutable.py try: from sqlalchemy.ext.mutable import MutableDict as sa_MutableDict sa_MutableDict.associate_with(Json) except ImportError: from dragon.db.sqlalchemy.mutable import MutableDict MutableDict.associate_with(Json) class DragonBase(object): """Base class for Heat Models.""" __table_args__ = {'mysql_engine': 'InnoDB'} __table_initialized__ = False created_at = sqlalchemy.Column(sqlalchemy.DateTime, default=timeutils.utcnow) updated_at = sqlalchemy.Column(sqlalchemy.DateTime, onupdate=timeutils.utcnow) def save(self, session=None):