def _join_args(x, y): if x is None: return y if y is None: return x xa, xk = sanitize_args(x) ya, yk = sanitize_args(y) xk = dict(xk) xk.update(yk) return xa + ya, xk
def _add_simple_type(cls, props, table, subname, subcls, sqla_type): col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs) mp = getattr(subcls.Attributes, 'mapper_property', None) if 'name' in col_kwargs: colname = col_kwargs.pop('name') else: colname = subname if not subcls.Attributes.exc_db: if colname in table.c: col = table.c[colname] else: col = Column(colname, sqla_type, *col_args, **col_kwargs) table.append_column(col) _gen_index_info(table, col, subname, subcls) if not subcls.Attributes.exc_mapper: props[subname] = col elif mp is not None: props[subname] = mp
def _get_col_o2o(parent, subname, subcls, fk_col_name, deferrable=None, initially=None, ondelete=None, onupdate=None): """Gets key and child type and returns a column that points to the primary key of the child. """ assert subcls.Attributes.table_name is not None, \ "%r has no table name." % subcls col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(parent, subcls, col_kwargs) # get pkeys from child class pk_column, = get_pk_columns(subcls) # FIXME: Support multi-col keys pk_key, pk_spyne_type = pk_column pk_sqla_type = _get_sqlalchemy_type(pk_spyne_type) # generate a fk to it from the current object (cls) if fk_col_name is None: fk_col_name = subname + "_" + pk_key assert fk_col_name != subname, \ "The column name for the foreign key must be different from the " \ "column name for the object itself." fk = ForeignKey('%s.%s' % (subcls.Attributes.table_name, pk_key), use_alter=True, name='%s_%s_fkey' % (subcls.Attributes.table_name, fk_col_name), deferrable=deferrable, initially=initially, ondelete=ondelete, onupdate=onupdate) return Column(fk_col_name, pk_sqla_type, fk, *col_args, **col_kwargs)
def _get_col_o2m(cls, fk_col_name): """Gets the parent class and returns a column that points to the primary key of the parent. Funky implementation. Yes. """ assert cls.Attributes.table_name is not None, "%r has no table name." % cls col_args, col_kwargs = sanitize_args(cls.Attributes.sqla_column_args) # get pkeys from current class pk_column, = get_pk_columns(cls) # FIXME: Support multi-col keys pk_key, pk_spyne_type = pk_column pk_sqla_type = get_sqlalchemy_type(pk_spyne_type) # generate a fk from child to the current class if fk_col_name is None: fk_col_name = '_'.join([cls.Attributes.table_name, pk_key]) # we jump through all these hoops because we must instantiate the Column # only after we're sure that it doesn't already exist and also because # tinkering with functors is always fun :) yield [(fk_col_name, pk_sqla_type)] col = Column(fk_col_name, pk_sqla_type, ForeignKey('%s.%s' % (cls.Attributes.table_name, pk_key)), *col_args, **col_kwargs) yield col
def _add_complex_type(cls, props, table, subname, subcls): if issubclass(subcls, File): return _add_file_type(cls, props, table, subname, subcls) storage = getattr(subcls.Attributes, 'store_as', None) col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs) if isinstance(storage, c_table): return _add_complex_type_as_table(cls, props, table, subname, subcls, storage, col_args, col_kwargs) if isinstance(storage, c_xml): return _add_complex_type_as_xml(cls, props, table, subname, subcls, storage, col_args, col_kwargs) if isinstance(storage, c_json): return _add_complex_type_as_json(cls, props, table, subname, subcls, storage, col_args, col_kwargs, 'json') if isinstance(storage, c_jsonb): return _add_complex_type_as_json(cls, props, table, subname, subcls, storage, col_args, col_kwargs, 'jsonb') if isinstance(storage, c_msgpack): raise NotImplementedError(c_msgpack) if storage is None: return raise ValueError(storage)
def _add_complex_type(cls, props, table, k, v): if issubclass(v, File): return _add_file_type(cls, props, table, k, v) p = getattr(v.Attributes, 'store_as', None) col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) if isinstance(p, c_table): return _add_complex_type_as_table(cls, props, table, k, v, p, col_args, col_kwargs) elif isinstance(p, c_xml): return _add_complex_type_as_xml(cls, props, table, k, v, p, col_args, col_kwargs) elif isinstance(p, c_json): return _add_complex_type_as_json(cls, props, table, k, v, p, col_args, col_kwargs) elif isinstance(p, c_msgpack): raise NotImplementedError(c_msgpack) elif p is None: return raise ValueError(p)
def _get_col_o2o(parent, k, v, fk_col_name, deferrable=None, initially=None): """Gets key and child type and returns a column that points to the primary key of the child. """ assert v.Attributes.table_name is not None, "%r has no table name." % v col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(parent, v, col_kwargs) # get pkeys from child class pk_column, = get_pk_columns(v) # FIXME: Support multi-col keys pk_key, pk_spyne_type = pk_column pk_sqla_type = _get_sqlalchemy_type(pk_spyne_type) # generate a fk to it from the current object (cls) if fk_col_name is None: fk_col_name = k + "_" + pk_key fk = ForeignKey('%s.%s' % (v.Attributes.table_name, pk_key), use_alter=True, name='%s_%s_fkey' % (v.Attributes.table_name, fk_col_name), deferrable=deferrable, initially=initially) return Column(fk_col_name, pk_sqla_type, fk, *col_args, **col_kwargs)
def _add_file_type(cls, props, table, subname, subcls): storage = getattr(subcls.Attributes, 'store_as', None) col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs) if isinstance(storage, HybridFileStore): if subname in table.c: col = table.c[subname] else: assert isabs(storage.store) #FIXME: Add support for storage markers from spyne.model.complex if storage.db_format == 'json': t = PGFileJson(storage.store, storage.type) elif storage.db_format == 'jsonb': t = PGFileJson(storage.store, storage.type, dbt='jsonb') else: raise NotImplementedError(storage.db_format) col = Column(subname, t, **col_kwargs) props[subname] = col if not subname in table.c: table.append_column(col) else: raise NotImplementedError(storage)
def _get_col_o2o(parent, k, v, fk_col_name): """Gets key and child type and returns a column that points to the primary key of the child. """ assert v.Attributes.table_name is not None, "%r has no table name." % v col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(parent, v, col_kwargs) # get pkeys from child class pk_column, = get_pk_columns(v) # FIXME: Support multi-col keys pk_key, pk_spyne_type = pk_column pk_sqla_type = get_sqlalchemy_type(pk_spyne_type) # generate a fk to it from the current object (cls) if fk_col_name is None: fk_col_name = k + "_" + pk_key fk = ForeignKey('%s.%s' % (v.Attributes.table_name, pk_key), use_alter=True, name='%s_%s_fkey' % (v.Attributes.table_name, fk_col_name)) return Column(fk_col_name, pk_sqla_type, fk, *col_args, **col_kwargs)
def _gen_mapper(cls, props, table, cls_bases): """Generate SQLAlchemy mapper from Spyne definition data. :param cls: La Class. :param props: a dict. :param table: a Table instance. Not a `_FakeTable` or anything. :param cls_bases: Sequence of class bases. """ inheritance, base_class, base_mapper, inc = _check_inheritance( cls, cls_bases) mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + list(props.keys()) po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: logger.warning("Deleted invalid 'polymorphic_on' value %r for %r.", po, cls) del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table, ) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) def on_load(target, context): d = target.__dict__ for k, v in cls.get_flat_type_info(cls).items(): if not k in d: if isclass(v) and issubclass(v, ComplexModelBase): pass else: d[k] = None event.listen(cls, 'load', on_load) return cls_mapper
def gen_spyne_info(cls): table = cls.Attributes.sqla_table _type_info = cls._type_info for c in table.c: _type_info[c.name] = get_spyne_type(c) # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) cls_mapper = mapper(cls, table, *mapper_args, **mapper_kwargs) cls.Attributes.table_name = cls.__tablename__ = table.name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper
def _gen_mapper(cls, props, table, cls_bases): """Generate SQLAlchemy mapper from Spyne definition data. :param cls: La Class. :param props: a dict. :param table: a Table instance. Not a `_FakeTable` or anything. :param cls_bases: Sequence of class bases. """ inheritance, base_class, base_mapper, inc = _check_inheritance(cls, cls_bases) mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + list(props.keys()) po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: logger.warning("Deleted invalid 'polymorphic_on' value %r for %r.", po, cls) del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table,) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) def on_load(target, context): d = target.__dict__ for k, v in cls.get_flat_type_info(cls).items(): if not k in d: if isclass(v) and issubclass(v, ComplexModelBase): pass else: d[k] = None event.listen(cls, 'load', on_load) return cls_mapper
def _convert_fake_table(cls, table): metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name _table = table table_args, table_kwargs = sanitize_args(cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) for index_args, index_kwargs in _table.indexes: Index(*index_args, **index_kwargs) return table
def _add_simple_type(cls, props, table, k, v, sqla_type): col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) if k in table.c: col = table.c[k] else: col = Column(k, sqla_type, *col_args, **col_kwargs) table.append_column(col) _gen_index_info(table, col, k, v) if not v.Attributes.exc_mapper: props[k] = col
def _gen_mapper(cls, props, table, cls_bases): """ :param cls: La Class. :param props: a dict. :param table: a Table instance. Not a _FakeTable. :param cls_bases: Class bases. """ inheritance, base_class, base_mapper, inc = _check_inheritance(cls, cls_bases) mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table,) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) def on_load(target, context): d = target.__dict__ for k, v in cls.get_flat_type_info(cls).items(): if not k in d: if isclass(v) and issubclass(v, ComplexModelBase): pass else: d[k] = None event.listen(cls, 'load', on_load) return cls_mapper
def gen_spyne_info(cls): table = cls.Attributes.sqla_table _type_info = cls._type_info mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) if len(_type_info) == 0: for c in table.c: _type_info[c.name] = get_spyne_type(c) else: mapper_kwargs['include_properties'] = _type_info.keys() # Map the table to the object cls_mapper = own_mapper(cls)(cls, table, *mapper_args, **mapper_kwargs) cls.Attributes.table_name = cls.__tablename__ = table.name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper
def _get_col_o2o(k, v, fk_col_name): """Gets key and child type and returns a column that points to the primary key of the child. """ assert v.Attributes.table_name is not None, "%r has no table name." % v col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) # get pkeys from child class pk_column, = get_pk_columns(v) # FIXME: Support multi-col keys pk_key, pk_spyne_type = pk_column pk_sqla_type = get_sqlalchemy_type(pk_spyne_type) # generate a fk to it from the current object (cls) if fk_col_name is None: fk_col_name = k + "_" + pk_key fk = ForeignKey('%s.%s' % (v.Attributes.table_name, pk_key)) return Column(fk_col_name, pk_sqla_type, fk, *col_args, **col_kwargs)
def _get_col_o2m(cls, fk_col_name): """Gets the parent class and returns a column that points to the primary key of the parent. """ assert cls.Attributes.table_name is not None, "%r has no table name." % cls col_args, col_kwargs = sanitize_args(cls.Attributes.sqla_column_args) # get pkeys from current class pk_column, = get_pk_columns(cls) # FIXME: Support multi-col keys pk_key, pk_spyne_type = pk_column pk_sqla_type = get_sqlalchemy_type(pk_spyne_type) # generate a fk from child to the current class if fk_col_name is None: fk_col_name = '_'.join([cls.Attributes.table_name, pk_key]) col = Column(fk_col_name, pk_sqla_type, ForeignKey('%s.%s' % (cls.Attributes.table_name, pk_key)), *col_args, **col_kwargs) return col
def _add_file_type(cls, props, table, k, v): p = getattr(v.Attributes, 'store_as', None) col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) if isinstance(p, HybridFileStore): if k in table.c: col = table.c[k] else: assert isabs(p.store) #FIXME: Add support for storage markers from spyne.model.complex if p.db_format == 'json': t = PGFileJson(p.store) else: raise NotImplementedError(p.db_format) col = Column(k, t, *col_args, **col_kwargs) props[k] = col if not k in table.c: table.append_column(col) else: raise NotImplementedError(p)
def gen_sqla_info(cls, cls_bases=()): """Return SQLAlchemy table object corresponding to the passed Spyne object. Also maps given class to the returned table. """ metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name inc = [] # include_properties # check inheritance inheritance = None base_class = getattr(cls, '__extends__', None) if base_class is None: for b in cls_bases: if getattr(b, '_type_info', None) is not None and b.__mixin__: base_class = b if base_class is not None: base_table_name = base_class.Attributes.table_name if base_table_name is not None: if base_table_name == table_name: inheritance = _SINGLE else: inheritance = _JOINED raise NotImplementedError("Joined table inheritance is not yet " "implemented.") inc_prop = base_class.Attributes.sqla_mapper.include_properties if inc_prop is not None: inc.extend(inc_prop) exc_prop = base_class.Attributes.sqla_mapper.exclude_properties if exc_prop is not None: inc = [_p for _p in inc if not _p in exc_prop] # check whether the object already has a table table = None if table_name in metadata.tables: table = metadata.tables[table_name] else: # We need FakeTable because table_args can contain all sorts of stuff # that can require a fully-constructed table, and we don't have that # information here yet. table = _FakeTable() # check whether the base classes are already mapped base_mapper = None if base_class is not None: base_mapper = base_class.Attributes.sqla_mapper if base_mapper is None: for b in cls_bases: bm = _mapper_registry.get(b, None) if bm is not None: assert base_mapper is None, "There can be only one base mapper." base_mapper = bm inheritance = _SINGLE props = {} # For each Spyne field for k, v in cls._type_info.items(): if v.Attributes.exc_table: continue col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) t = get_sqlalchemy_type(v) if t is None: p = getattr(v.Attributes, 'store_as', None) if p is not None and issubclass(v, Array) and isinstance(p, c_table): child_cust, = v._type_info.values() if child_cust.__orig__ is not None: child = child_cust.__orig__ else: child = child_cust if p.multi != False: # many to many col_own, col_child = _get_cols_m2m(cls, k, v, p.left, p.right) p.left = col_own.key p.right = col_child.key if p.multi == True: rel_table_name = '_'.join([cls.Attributes.table_name, k]) else: rel_table_name = p.multi # FIXME: Handle the case where the table already exists. rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) props[k] = relationship(child, secondary=rel_t, backref=p.backref) elif issubclass(child, SimpleModel): # one to many simple type # get left (fk) column info _gen_col = _get_col_o2m(cls, p.left) col_info = _gen_col.next() # gets the column name p.left, child_left_col_type = col_info[0] # FIXME: Add support for multi-column primary keys. child_left_col_name = p.left # get right(data) column info child_right_col_type = get_sqlalchemy_type(child_cust) child_right_col_name = p.right # this is the data column if child_right_col_name is None: child_right_col_name = k # get table name child_table_name = child_cust.Attributes.table_name if child_table_name is None: child_table_name = '_'.join([table_name, k]) if child_table_name in metadata.tables: # table exists, get releavant info child_t = metadata.tables[child_table_name] assert child_right_col_type is \ child_t.c[child_right_col_name].type.__class__ assert child_left_col_type is \ child_t.c[child_left_col_name].type.__class__ child_right_col = child_t.c[child_right_col_name] child_left_col = child_t.c[child_left_col_name] else: # table does not exist, generate table child_right_col = Column(child_right_col_name, child_right_col_type) _sp_attrs_to_sqla_constraints(cls, child_cust, col=child_right_col) child_left_col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=child_left_col) child_t = Table(child_table_name , metadata, Column('id', sqlalchemy.Integer, primary_key=True), child_left_col, child_right_col) # generate temporary class for association proxy cls_name = ''.join(x.capitalize() or '_' for x in child_table_name.split('_')) # generates camelcase class name. def _i(self, *args): setattr(self, child_right_col_name, args[0]) cls_ = type("_" + cls_name, (object,), {'__init__': _i}) own_mapper(cls_)(cls_, child_t) props["_" + k] = relationship(cls_) # generate association proxy setattr(cls, k, association_proxy("_" + k, child_right_col_name)) else: # one to many complex type _gen_col = _get_col_o2m(cls, p.right) col_info = _gen_col.next() # gets the column name p.right, col_type = col_info[0] # FIXME: Add support for multi-column primary keys. assert p.left is None, \ "'left' is ignored in one-to-many relationships " \ "with complex types (because they already have a " \ "table). You probably meant to use 'right'." child_t = child.__table__ if p.right in child_t.c: # FIXME: This branch MUST be tested. assert col_type is child_t.c[p.right].type.__class__ # if the column is there, the decision about whether # it should be in child's mapper should also have been # made. # # so, not adding the child column to to child mapper # here. col = child_t.c[p.right] else: col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=col) child_t.append_column(col) child.__mapper__.add_property(col.name, col) props[k] = relationship(child, foreign_keys=[col], backref=p.backref) elif p is not None and issubclass(v, ComplexModelBase): # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ if isinstance(p, c_table): assert not getattr(p, 'multi', False), ( 'Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) rel = relationship(real_v, uselist=False, foreign_keys=[col], backref=p.backref) p.left = col.key props[k] = rel elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectXml(v, p.root_tag, p.no_ns), *col_args, **col_kwargs) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectJson(v, ignore_wrappers=p.ignore_wrappers, complex_as=p.complex_as ), *col_args, **col_kwargs ) elif isinstance(p, c_msgpack): raise NotImplementedError() else: raise ValueError(p) props[col.name] = col if not k in table.c: table.append_column(col) else: logger.debug("Skipping %s.%s.%s: %r, store_as: %r" % ( cls.get_namespace(), cls.get_type_name(), k, v, p)) else: unique = v.Attributes.unique index = v.Attributes.index if unique and not index: index = True try: index_name, index_method = v.Attributes.index except (TypeError, ValueError): index_name = "%s_%s%s" % (table_name, k, '_unique' if unique else '') index_method = v.Attributes.index if k in table.c: col = table.c[k] else: col = Column(k, t, *col_args, **col_kwargs) table.append_column(col) if index in (False, None): pass else: if index == True: index_args = (index_name, col), dict(unique=unique) else: index_args = (index_name, col), dict(unique=unique, postgresql_using=index_method) if isinstance(table, _FakeTable): table.indexes.append(index_args) else: Index(*index_args[0], **index_args[1]) if not v.Attributes.exc_mapper: props[k] = col if isinstance(table, _FakeTable): _table = table table_args, table_kwargs = sanitize_args(cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) for index_args, index_kwargs in _table.indexes: Index(*index_args, **index_kwargs) del _table # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table,) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) def my_load_listener(target, context): d = target.__dict__ for k, v in cls.get_flat_type_info(cls).items(): if not k in d: if isclass(v) and issubclass(v, ComplexModelBase): pass else: d[k] = None event.listen(cls, 'load', my_load_listener) cls.__tablename__ = cls.Attributes.table_name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper cls.Attributes.sqla_table = cls.__table__ = table return table
def gen_sqla_info(cls, cls_bases=()): """Return SQLAlchemy table object corresponding to the passed Spyne object. Also maps given class to the returned table. """ metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name inc = [] # include_properties # check inheritance inheritance = None base_class = getattr(cls, '__extends__', None) if base_class is None: for b in cls_bases: if getattr(b, '_type_info', None) is not None and b.__mixin__: base_class = b if base_class is not None: base_table_name = base_class.Attributes.table_name if base_table_name is not None: if base_table_name == table_name: inheritance = _SINGLE else: inheritance = _JOINED raise NotImplementedError( "Joined table inheritance is not yet " "implemented.") inc_prop = base_class.Attributes.sqla_mapper.include_properties if inc_prop is not None: inc.extend(inc_prop) exc_prop = base_class.Attributes.sqla_mapper.exclude_properties if exc_prop is not None: inc = [_p for _p in inc if not _p in exc_prop] # check whether the object already has a table table = None if table_name in metadata.tables: table = metadata.tables[table_name] else: # We need FakeTable because table_args can contain all sorts of stuff # that can require a fully-constructed table, and we don't have that # information here yet. table = _FakeTable() # check whether the base classes are already mapped base_mapper = None if base_class is not None: base_mapper = base_class.Attributes.sqla_mapper if base_mapper is None: for b in cls_bases: bm = _mapper_registry.get(b, None) if bm is not None: assert base_mapper is None, "There can be only one base mapper." base_mapper = bm inheritance = _SINGLE props = {} # For each Spyne field for k, v in cls._type_info.items(): if v.Attributes.exc_table: continue col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) t = get_sqlalchemy_type(v) if t is None: p = getattr(v.Attributes, 'store_as', None) if p is not None and issubclass(v, Array) and isinstance( p, c_table): child_cust, = v._type_info.values() if child_cust.__orig__ is not None: child = child_cust.__orig__ else: child = child_cust if p.multi != False: # many to many col_own, col_child = _get_cols_m2m(cls, k, v, p.left, p.right) p.left = col_own.key p.right = col_child.key if p.multi == True: rel_table_name = '_'.join( [cls.Attributes.table_name, k]) else: rel_table_name = p.multi # FIXME: Handle the case where the table already exists. rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) props[k] = relationship(child, secondary=rel_t, backref=p.backref) elif issubclass(child, SimpleModel): # one to many simple type # get left (fk) column info _gen_col = _get_col_o2m(cls, p.left) col_info = _gen_col.next() # gets the column name p.left, child_left_col_type = col_info[ 0] # FIXME: Add support for multi-column primary keys. child_left_col_name = p.left # get right(data) column info child_right_col_type = get_sqlalchemy_type(child_cust) child_right_col_name = p.right # this is the data column if child_right_col_name is None: child_right_col_name = k # get table name child_table_name = child_cust.Attributes.table_name if child_table_name is None: child_table_name = '_'.join([table_name, k]) if child_table_name in metadata.tables: # table exists, get releavant info child_t = metadata.tables[child_table_name] assert child_right_col_type is \ child_t.c[child_right_col_name].type.__class__ assert child_left_col_type is \ child_t.c[child_left_col_name].type.__class__ child_right_col = child_t.c[child_right_col_name] child_left_col = child_t.c[child_left_col_name] else: # table does not exist, generate table child_right_col = Column(child_right_col_name, child_right_col_type) _sp_attrs_to_sqla_constraints(cls, child_cust, col=child_right_col) child_left_col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=child_left_col) child_t = Table( child_table_name, metadata, Column('id', sqlalchemy.Integer, primary_key=True), child_left_col, child_right_col) # generate temporary class for association proxy cls_name = ''.join(x.capitalize() or '_' for x in child_table_name.split('_')) # generates camelcase class name. def _i(self, *args): setattr(self, child_right_col_name, args[0]) cls_ = type("_" + cls_name, (object, ), {'__init__': _i}) own_mapper(cls_)(cls_, child_t) props["_" + k] = relationship(cls_) # generate association proxy setattr(cls, k, association_proxy("_" + k, child_right_col_name)) else: # one to many complex type _gen_col = _get_col_o2m(cls, p.right) col_info = _gen_col.next() # gets the column name p.right, col_type = col_info[ 0] # FIXME: Add support for multi-column primary keys. assert p.left is None, \ "'left' is ignored in one-to-many relationships " \ "with complex types (because they already have a " \ "table). You probably meant to use 'right'." child_t = child.__table__ if p.right in child_t.c: # FIXME: This branch MUST be tested. assert col_type is child_t.c[p.right].type.__class__ # if the column is there, the decision about whether # it should be in child's mapper should also have been # made. # # so, not adding the child column to to child mapper # here. col = child_t.c[p.right] else: col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=col) child_t.append_column(col) child.__mapper__.add_property(col.name, col) props[k] = relationship(child, foreign_keys=[col], backref=p.backref) elif p is not None and issubclass(v, ComplexModelBase): # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ if isinstance(p, c_table): assert not getattr(p, 'multi', False), ( 'Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) rel = relationship(real_v, uselist=False, foreign_keys=[col], backref=p.backref) p.left = col.key props[k] = rel _gen_index_info(table, table_name, col, k, v) elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectXml(v, p.root_tag, p.no_ns), *col_args, **col_kwargs) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: col = Column( k, PGObjectJson(v, ignore_wrappers=p.ignore_wrappers, complex_as=p.complex_as), *col_args, **col_kwargs) elif isinstance(p, c_msgpack): raise NotImplementedError() else: raise ValueError(p) props[col.name] = col if not k in table.c: table.append_column(col) else: logger.debug( "Skipping %s.%s.%s: %r, store_as: %r" % (cls.get_namespace(), cls.get_type_name(), k, v, p)) else: if k in table.c: col = table.c[k] else: col = Column(k, t, *col_args, **col_kwargs) table.append_column(col) _gen_index_info(table, table_name, col, k, v) if not v.Attributes.exc_mapper: props[k] = col if isinstance(table, _FakeTable): _table = table table_args, table_kwargs = sanitize_args( cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) for index_args, index_kwargs in _table.indexes: Index(*index_args, **index_kwargs) del _table # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table, ) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) def my_load_listener(target, context): d = target.__dict__ for k, v in cls.get_flat_type_info(cls).items(): if not k in d: if isclass(v) and issubclass(v, ComplexModelBase): pass else: d[k] = None event.listen(cls, 'load', my_load_listener) cls.__tablename__ = cls.Attributes.table_name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper cls.Attributes.sqla_table = cls.__table__ = table return table
def gen_sqla_info(cls, cls_bases=()): """Return SQLAlchemy table object corresponding to the passed Spyne object. Also maps given class to the returned table. """ metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name inc = [] # include_properties # check inheritance inheritance = None base_class = getattr(cls, '__extends__', None) if base_class is None: for b in cls_bases: if getattr(b, '_type_info', None) is not None and b.__mixin__: base_class = b else: base_table_name = base_class.Attributes.table_name if base_table_name is not None: if base_table_name == table_name: inheritance = _SINGLE else: inheritance = _JOINED raise NotImplementedError("Joined table inheritance is not yet " "implemented.") inc_prop = base_class.Attributes.sqla_mapper.include_properties if inc_prop is not None: inc.extend(inc_prop) exc_prop = base_class.Attributes.sqla_mapper.exclude_properties if exc_prop is not None: inc = [_p for _p in inc if not _p in exc_prop] # check whether the object is already mapped table = None if table_name in metadata.tables: if inheritance is None: return metadata.tables[table_name] else: table = base_class.Attributes.sqla_table else: # We need FakeTable because table_args can contain all sorts of stuff # that can require a fully-constructed table, and we don't have that # information here yet. table = _FakeTable() props = {} # For each Spyne field for k, v in cls._type_info.items(): if v.Attributes.exc_table: continue col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) t = get_sqlalchemy_type(v) if t is None: p = getattr(v.Attributes, 'store_as', None) if p is not None and issubclass(v, Array) and isinstance(p, c_table): child_cust, = v._type_info.values() if child_cust.__orig__ is not None: child = child_cust.__orig__ else: child = child_cust if p.multi != False: # many to many col_own, col_child = _get_cols_m2m(cls, k, v, p.left, p.right) p.left = col_own.key p.right = col_child.key if p.multi == True: rel_table_name = '_'.join([cls.Attributes.table_name, k]) else: rel_table_name = p.multi # FIXME: Handle the case where the table already exists. rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) props[k] = relationship(child, secondary=rel_t, backref=p.backref) else: # one to many assert p.left is None, "'left' is ignored in one-to-many " \ "relationships. You probebly meant " \ "to use 'right'." child_t = child.__table__ _gen_col = _get_col_o2m(cls, p.right) col_info = _gen_col.next() # gets the column name p.right, col_type = col_info[0] # FIXME: Add support for multi-column primary keys. if p.right in child_t.c: # FIXME: This branch MUST be tested. assert col_type == child_t.c[p.right].type # if the column is there, the decision about whether # it should be in child's mapper should also have been # made. # # so, not adding the child column to to child mapper # here. else: col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=col) child_t.append_column(col) child.__mapper__.add_property(col.name, col) props[k] = relationship(child) elif p is not None and issubclass(v, ComplexModelBase): # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ if isinstance(p, c_table): assert not getattr(p, 'multi', False), ( 'Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) rel = relationship(real_v, uselist=False) p.left = col.key props[k] = rel elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectXml(v, p.root_tag, p.no_ns), *col_args, **col_kwargs) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectJson(v, p.skip_depth), *col_args, **col_kwargs) elif isinstance(p, c_msgpack): raise NotImplementedError() else: raise ValueError(p) props[col.name] = col if not k in table.c: table.append_column(col) else: logger.debug("Skipping %s.%s.%s: %r, store_as: %r" % ( cls.get_namespace(), cls.get_type_name(), k, v, p)) else: unique = v.Attributes.unique index = v.Attributes.index if unique and not index: index = True try: index_name, index_method = v.Attributes.index except (TypeError, ValueError): index_name = "%s_%s%s" % (table_name, k, '_unique' if unique else '') index_method = v.Attributes.index if k in table.c: col = table.c[k] else: col = Column(k, t, *col_args, **col_kwargs) table.append_column(col) if index in (False, None): pass else: if index == True: index_args = (index_name, col), dict(unique=unique) else: index_args = (index_name, col), dict(unique=unique, postgresql_using=index_method) if isinstance(table, _FakeTable): table.indexes.append(index_args) else: Index(*index_args[0], **index_args[1]) if not v.Attributes.exc_mapper: props[k] = col if isinstance(table, _FakeTable): _table = table table_args, table_kwargs = sanitize_args(cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) for index_args, index_kwargs in _table.indexes: Index(*index_args, **index_kwargs) del _table # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if inheritance is not None: mapper_kwargs['inherits'] = base_class.Attributes.sqla_mapper if inheritance is not _SINGLE: mapper_args = (table,) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) cls.__tablename__ = cls.Attributes.table_name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper cls.Attributes.sqla_table = cls.__table__ = table return table
def _add_complex_type(cls, props, table, k, v): p = getattr(v.Attributes, 'store_as', None) table_name = cls.Attributes.table_name col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) if isinstance(p, c_table): if _is_array(v): child_cust = v if issubclass(v, Array): child_cust, = v._type_info.values() child = child_cust if child_cust.__orig__ is not None: child = child_cust.__orig__ if p.multi != False: # many to many _gen_array_m2m(cls, props, k, child, p) elif issubclass(child, SimpleModel): # one to many simple type _gen_array_simple(cls, props, k, child_cust, p) else: # one to many complex type _gen_array_o2m(cls, props, k, child, child_cust, p) else: # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ assert not getattr(p, 'multi', False), ( 'Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) p.left = col.name if col.name in table.c: col = table.c[col.name] else: table.append_column(col) rel = relationship(real_v, uselist=False, cascade=p.cascade, foreign_keys=[col], backref=p.backref, lazy=p.lazy) _gen_index_info(table, table_name, col, k, v) props[k] = rel props[col.name] = col elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: t = PGObjectXml(v, p.root_tag, p.no_ns) col = Column(k, t, *col_args, **col_kwargs) props[k] = col if not k in table.c: table.append_column(col) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: t = PGObjectJson(v, ignore_wrappers=p.ignore_wrappers, complex_as=p.complex_as) col = Column(k, t, *col_args, **col_kwargs) props[k] = col if not k in table.c: table.append_column(col) elif isinstance(p, c_msgpack): raise NotImplementedError(c_msgpack) elif p is None: pass else: raise ValueError(p)
def _add_complex_type(cls, props, table, k, v): if issubclass(v, File): return _add_file_type(cls, props, table, k, v) p = getattr(v.Attributes, 'store_as', None) col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) if isinstance(p, c_table): if _is_array(v): child_cust = v if issubclass(v, Array): child_cust, = v._type_info.values() child = child_cust if child_cust.__orig__ is not None: child = child_cust.__orig__ if p.multi != False: # many to many _gen_array_m2m(cls, props, k, child, p) elif issubclass(child, SimpleModel): # one to many simple type _gen_array_simple(cls, props, k, child_cust, p) else: # one to many complex type _gen_array_o2m(cls, props, k, child, child_cust, p) else: # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ assert not getattr( p, 'multi', False), ('Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) p.left = col.name if col.name in table.c: col = table.c[col.name] if col_kwargs.get('nullable') is False: col.nullable = False else: table.append_column(col) rel = relationship(real_v, uselist=False, cascade=p.cascade, foreign_keys=[col], back_populates=p.back_populates, backref=p.backref, lazy=p.lazy) _gen_index_info(table, col, k, v) props[k] = rel props[col.name] = col elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: t = PGObjectXml(v, p.root_tag, p.no_ns, p.pretty_print) col = Column(k, t, *col_args, **col_kwargs) props[k] = col if not k in table.c: table.append_column(col) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: t = PGObjectJson(v, ignore_wrappers=p.ignore_wrappers, complex_as=p.complex_as) col = Column(k, t, *col_args, **col_kwargs) props[k] = col if not k in table.c: table.append_column(col) elif isinstance(p, c_msgpack): raise NotImplementedError(c_msgpack) elif p is None: pass else: raise ValueError(p)
def gen_sqla_info(cls, cls_bases=()): """Return SQLAlchemy table object corresponding to the passed Spyne object. Also maps given class to the returned table. """ metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name inc = [] # include_properties # check inheritance inheritance = None base_class = getattr(cls, '__extends__', None) if base_class is None: for b in cls_bases: if getattr(b, '_type_info', None) is not None and b.__mixin__: base_class = b if base_class is not None: base_table_name = base_class.Attributes.table_name if base_table_name is not None: if base_table_name == table_name: inheritance = _SINGLE else: inheritance = _JOINED raise NotImplementedError("Joined table inheritance is not yet " "implemented.") inc_prop = base_class.Attributes.sqla_mapper.include_properties if inc_prop is not None: inc.extend(inc_prop) exc_prop = base_class.Attributes.sqla_mapper.exclude_properties if exc_prop is not None: inc = [_p for _p in inc if not _p in exc_prop] # check whether the object already has a table table = None if table_name in metadata.tables: table = metadata.tables[table_name] else: # We need FakeTable because table_args can contain all sorts of stuff # that can require a fully-constructed table, and we don't have that # information here yet. table = _FakeTable() # check whether the base classes are already mapped base_mapper = None if base_class is not None: base_mapper = base_class.Attributes.sqla_mapper if base_mapper is None: for b in cls_bases: bm = _mapper_registry.get(b, None) if bm is not None: assert base_mapper is None, "There can be only one base mapper." base_mapper = bm inheritance = _SINGLE props = {} # For each Spyne field for k, v in cls._type_info.items(): if v.Attributes.exc_table: continue col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) t = get_sqlalchemy_type(v) if t is None: p = getattr(v.Attributes, 'store_as', None) if p is not None and issubclass(v, Array) and isinstance(p, c_table): child_cust, = v._type_info.values() if child_cust.__orig__ is not None: child = child_cust.__orig__ else: child = child_cust if p.multi != False: # many to many col_own, col_child = _get_cols_m2m(cls, k, v, p.left, p.right) p.left = col_own.key p.right = col_child.key if p.multi == True: rel_table_name = '_'.join([cls.Attributes.table_name, k]) else: rel_table_name = p.multi # FIXME: Handle the case where the table already exists. rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) props[k] = relationship(child, secondary=rel_t, backref=p.backref) else: # one to many assert p.left is None, "'left' is ignored in one-to-many " \ "relationships. You probebly meant " \ "to use 'right'." child_t = child.__table__ _gen_col = _get_col_o2m(cls, p.right) col_info = _gen_col.next() # gets the column name p.right, col_type = col_info[0] # FIXME: Add support for multi-column primary keys. if p.right in child_t.c: # FIXME: This branch MUST be tested. assert col_type == child_t.c[p.right].type # if the column is there, the decision about whether # it should be in child's mapper should also have been # made. # # so, not adding the child column to to child mapper # here. else: col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=col) child_t.append_column(col) child.__mapper__.add_property(col.name, col) props[k] = relationship(child) elif p is not None and issubclass(v, ComplexModelBase): # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ if isinstance(p, c_table): assert not getattr(p, 'multi', False), ( 'Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) rel = relationship(real_v, uselist=False) p.left = col.key props[k] = rel elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectXml(v, p.root_tag, p.no_ns), *col_args, **col_kwargs) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectJson(v, p.skip_depth), *col_args, **col_kwargs) elif isinstance(p, c_msgpack): raise NotImplementedError() else: raise ValueError(p) props[col.name] = col if not k in table.c: table.append_column(col) else: logger.debug("Skipping %s.%s.%s: %r, store_as: %r" % ( cls.get_namespace(), cls.get_type_name(), k, v, p)) else: unique = v.Attributes.unique index = v.Attributes.index if unique and not index: index = True try: index_name, index_method = v.Attributes.index except (TypeError, ValueError): index_name = "%s_%s%s" % (table_name, k, '_unique' if unique else '') index_method = v.Attributes.index if k in table.c: col = table.c[k] else: col = Column(k, t, *col_args, **col_kwargs) table.append_column(col) if index in (False, None): pass else: if index == True: index_args = (index_name, col), dict(unique=unique) else: index_args = (index_name, col), dict(unique=unique, postgresql_using=index_method) if isinstance(table, _FakeTable): table.indexes.append(index_args) else: Index(*index_args[0], **index_args[1]) if not v.Attributes.exc_mapper: props[k] = col if isinstance(table, _FakeTable): _table = table table_args, table_kwargs = sanitize_args(cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) for index_args, index_kwargs in _table.indexes: Index(*index_args, **index_kwargs) del _table # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table,) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) cls.__tablename__ = cls.Attributes.table_name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper cls.Attributes.sqla_table = cls.__table__ = table return table
def gen_sqla_info(cls, cls_bases=()): """Return SQLAlchemy table object corresponding to the passed Spyne object. Also maps given class to the returned table. """ metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name inc = [] # include_properties # check inheritance inheritance = None base_class = getattr(cls, '__extends__', None) if base_class is None: for b in cls_bases: if getattr(b, '_type_info', None) is not None and b.__mixin__: base_class = b if base_class is not None: base_table_name = base_class.Attributes.table_name if base_table_name is not None: if base_table_name == table_name: inheritance = _SINGLE else: inheritance = _JOINED raise NotImplementedError("Joined table inheritance is not yet " "implemented.") inc_prop = base_class.Attributes.sqla_mapper.include_properties if inc_prop is not None: inc.extend(inc_prop) # check whether the object is already mapped table = None if table_name in metadata.tables: if inheritance is None: return metadata.tables[table_name] else: table = base_class.Attributes.sqla_table else: # We need FakeTable because table_args can contain all sorts of stuff # that can require a fully-constructed table, and we don't have that # information here yet. table = _FakeTable() props = {} # For each Spyne field for k, v in cls._type_info.items(): if v.Attributes.exc_table: continue col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) if v.Attributes.nullable == False: col_kwargs['nullable'] = False if k in table.c: continue t = get_sqlalchemy_type(v) if t is None: p = getattr(v.Attributes, 'store_as', None) if p is not None and issubclass(v, Array) and isinstance(p, c_table): child, = v._type_info.values() if child.__orig__ is not None: child = child.__orig__ if p.multi != False: # many to many col_own, col_child = _get_cols_m2m(cls, k, v, p.left, p.right) if p.multi == True: rel_table_name = '_'.join([cls.Attributes.table_name, k]) else: rel_table_name = p.multi # FIXME: Handle the case where the table already exists. rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) props[k] = relationship(child, secondary=rel_t) else: # one to many assert p.left is None, "'left' is ignored." col = _get_col_o2m(cls, p.right) child.__table__.append_column(col) child.__mapper__.add_property(col.name, col) props[k] = relationship(child) elif p is not None and issubclass(v, ComplexModelBase): # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ if isinstance(p, c_table): if getattr(p, 'multi', False): raise Exception('Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored" col = _get_col_o2o(k, v, p.left) rel = relationship(real_v, uselist=False) props[k] = rel elif isinstance(p, c_xml): col = Column(k, PGObjectXml(v, p.root_tag, p.no_ns), *col_args, **col_kwargs) elif isinstance(p, c_json): col = Column(k, PGObjectJson(v, p.skip_depth), *col_args, **col_kwargs) elif isinstance(p, c_msgpack): raise NotImplementedError() else: raise ValueError(p) props[col.name] = col table.append_column(col) else: logger.debug("Skipping %s.%s.%s: %r, store_as: %r" % ( cls.get_namespace(), cls.get_type_name(), k, v, p)) else: col = Column(k, t, *col_args, **col_kwargs) table.append_column(col) if not v.Attributes.exc_mapper: props[k] = col if isinstance(table, _FakeTable): table_args, table_kwargs = sanitize_args(cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if inheritance is not None: mapper_kwargs['inherits'] = base_class.Attributes.sqla_mapper if inheritance is not _SINGLE: mapper_args = (table,) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) cls.__tablename__ = cls.Attributes.table_name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper cls.Attributes.sqla_table = cls.__table__ = table return table