def upgrade(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine base_transfer_action = sqlalchemy.Table('base_transfer_action', meta, autoload=True) base_transfer_action.c.info.alter(type=types.Binary(4294967295))
def get_columns(self, engine, table_name, schema=None, **kw): cols = engine.connect().connection.get_columns(table_name) get_coldef = lambda x, y: { "name": x, "type": BQDialect.__TYPE_MAPPINGS.get(y, types.Binary()), "nullable": True, "default": None } return [get_coldef(*col) for col in cols]
class IP(types.TypeDecorator): """Implements an IP as an integer.""" impl = types.Binary(length=16) def __init__(self): pass def process_bind_param(self, value, engine): try: return inet.pton(value); except: l = len (value) if l == 4: return inet.IPV4PREFIX + value if l == 16: return value raise Exception ('Invalid IP:(' + value +')') def process_result_value(self, value, engine): return inet.ntop(value); def is_mutable(self): return False
def test_should_unknown_sqlalchemy_field_raise_exception(): re_err = "Don't know how to convert the SQLAlchemy field" with pytest.raises(Exception, match=re_err): get_field(types.Binary())
class Assay(Base, AssayMixIn): __tablename__ = 'assays' id = Column(types.Integer, primary_key=True) filename = Column(types.String(128), nullable=False, index=True) runtime = Column(types.DateTime, nullable=False) rss = Column(types.Float, nullable=False, default=-1) dp = Column(types.Float, nullable=False, default=-1) score = Column(types.Float, nullable=False, default=-1) z = deferred(Column(NPArray)) ladder_peaks = Column(types.Integer, nullable=False, default=-1) size_standard = Column(types.String(32), nullable=False, default='') sample_id = Column(types.Integer, ForeignKey('samples.id', ondelete='CASCADE'), nullable=False) sample = relationship(Sample, uselist=False, backref=backref('assays', lazy='dynamic')) panel_id = Column(types.Integer, ForeignKey('panels.id'), nullable=False) panel = relationship(Panel, uselist=False) ladder_id = Column(types.Integer, ForeignKey('channels.id', use_alter=True, name='ladderchannel_fk'), nullable=True) #channels = relationship('Channel', primaryjoin = "Assay.id == Channel.id", lazy='dynamic', # post_update = True, # backref = backref('assay', uselist=False)) ladder = relationship('Channel', uselist=False, primaryjoin="Assay.ladder_id == Channel.id") status = Column(types.String(32), nullable=False) method = deferred(Column(types.String(16), nullable=False)) report = deferred(Column(types.String(512), nullable=False, default='')) remark = deferred(Column(types.String(1024), nullable=False, default='')) exclude = deferred(Column(types.String(128), nullable=False, default='')) raw_data = deferred(Column(types.Binary(), nullable=False)) """ raw data for this assay (FSA file content) """ __table_args__ = (UniqueConstraint('filename', 'panel_id', 'sample_id'), ) def new_channel(self, raw_data, data, dye, wavelen, status, median, mean, max_height, min_height, std_dev, initial_marker=None, initial_panel=None): """ create new channel and added to this assay """ if not initial_marker: initial_marker = Marker.search('undefined', session=object_session(self)) if not initial_panel: initial_panel = Panel.search('undefined', session=object_session(self)) channel = Channel(raw_data=data, data=data, dye=dye, wavelen=wavelen, status=status, median=median, mean=mean, max_height=max_height, min_height=min_height, std_dev=std_dev) channel.assay = self channel.marker = initial_marker channel.panel = initial_panel return channel def get_ladder(self): """ get ladder channel """ assert self.ladder_id, "ERR/PROG - pls make sure ladder_id is not null!" session = object_session(self) return Channel.get(self.ladder_id, session)
sm = sessionmaker(autoflush=True, bind=engine, expire_on_commit=False, extension=ZopeTransactionExtension()) meta.engine = engine meta.Session = scoped_session(sm) users_table = Table("users", metadata, Column('id', types.Integer, primary_key=True), Column('username', types.Unicode(50)), # ElGamal user's public key. Column('pubkey', types.Text), # Used in the login/authenticate challenge Column('logging_token', types.Binary(35)), # Time until the token is valid. Column('logging_timeout', types.DateTime), # This stamp is used to wipe users which haven't 'setup' # their account before this date/time Column('waiting_setup', types.DateTime, nullable=True), Column('created_time', types.DateTime, default=datetime.now), # Admin flag, allows to add users, and grant access. Column('is_admin', types.Boolean, default=False) ) usergroups_table = Table('users_groups', metadata, Column('id', types.Integer, primary_key=True), Column('user_id', types.Integer, ForeignKey('users.id')),
person = schema.Table( 'person', meta.metadata, schema.Column('id', types.Integer, schema.ForeignKey('node.id'), primary_key=True), schema.Column('user_id', types.Integer, schema.ForeignKey('user.id'), nullable=True), schema.Column('event_id', types.Integer, schema.ForeignKey('event.id'), nullable=False), schema.Column('fullname', types.Unicode(50), nullable=False), schema.Column('avatar', types.Binary()), schema.Column('details', types.Unicode(500)), schema.Column('created', types.DateTime(), default=datetime.now()), schema.Column('updated', types.DateTime(), onupdate=datetime.now())) article = schema.Table( 'article', meta.metadata, schema.Column('id', types.Integer, schema.ForeignKey('node.id'), primary_key=True), schema.Column('title', types.Unicode(50), nullable=False), schema.Column('path', types.Unicode(), nullable=False, unique=True), schema.Column('label', types.Unicode(50)), schema.Column('content', types.Unicode(), nullable=False), schema.Column('published', types.DateTime),