class Workspace(DeclarativeBase): __tablename__ = 'workspaces' workspace_id = Column(Integer, Sequence('seq__workspaces__workspace_id'), autoincrement=True, primary_key=True) label = Column(Unicode(1024), unique=False, nullable=False, default='') description = Column(Text(), unique=False, nullable=False, default='') calendar_enabled = Column(Boolean, unique=False, nullable=False, default=False) # Default value datetime.utcnow, see: http://stackoverflow.com/a/13370382/801924 (or http://pastebin.com/VLyWktUn) created = Column(DateTime, unique=False, nullable=False, default=datetime.utcnow) # Default value datetime.utcnow, see: http://stackoverflow.com/a/13370382/801924 (or http://pastebin.com/VLyWktUn) updated = Column(DateTime, unique=False, nullable=False, default=datetime.utcnow) is_deleted = Column(Boolean, unique=False, nullable=False, default=False) revisions = relationship("ContentRevisionRO") @hybrid_property def contents(self) -> ['Content']: # Return a list of unique revisions parent content contents = [] for revision in self.revisions: # TODO BS 20161209: This ``revision.node.workspace`` make a lot # of SQL queries ! if revision.node.workspace == self and revision.node not in contents: contents.append(revision.node) return contents @property def calendar_url(self) -> str: # TODO - 20160531 - Bastien: Cyclic import if import in top of file from tracim.lib.calendar import CalendarManager calendar_manager = CalendarManager(None) return calendar_manager.get_workspace_calendar_url(self.workspace_id) def get_user_role(self, user: User) -> int: for role in user.roles: if role.workspace.workspace_id == self.workspace_id: return role.role return UserRoleInWorkspace.NOT_APPLICABLE def get_label(self): """ this method is for interoperability with Content class""" return self.label def get_allowed_content_types(self): # @see Content.get_allowed_content_types() return [ContentType('folder')] def get_valid_children( self, content_types: list = None, show_deleted: bool = False, show_archived: bool = False, ): for child in self.contents: # we search only direct children if not child.parent \ and (show_deleted or not child.is_deleted) \ and (show_archived or not child.is_archived): if not content_types or child.type in content_types: yield child
class FacebookConfig(Base): __tablename__ = 'FBK_CONFIG' id = Column(NUMBER, Sequence('FBK_CONFIG_SEQ'), primary_key=True, name='ID') key = Column(VARCHAR(100), name='KEY') value = Column(VARCHAR(200), name='VALUE') description = Column(VARCHAR(200), name='DESCRIPTION')
class Alumno(connector.Manager.Base): __tablename__ = 'users' id = Column(Integer, Sequence('codigo_alumno'), primary_key=True) nombre = Column(String(20)) apellido = Column(String(30)) carrera = Column(String(50))
def test_create_drop_ddl(self): self.assert_compile( CreateSequence(Sequence("foo_seq")), "CREATE SEQUENCE foo_seq START WITH 1", ) self.assert_compile( CreateSequence(Sequence("foo_seq", start=5)), "CREATE SEQUENCE foo_seq START WITH 5", ) self.assert_compile( CreateSequence(Sequence("foo_seq", increment=2)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1", ) self.assert_compile( CreateSequence(Sequence("foo_seq", increment=2, start=5)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 5", ) self.assert_compile( CreateSequence( Sequence("foo_seq", increment=2, start=0, minvalue=0)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 MINVALUE 0", ) self.assert_compile( CreateSequence( Sequence("foo_seq", increment=2, start=1, maxvalue=5)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 MAXVALUE 5", ) self.assert_compile( CreateSequence( Sequence("foo_seq", increment=2, start=1, nomaxvalue=True)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 NO MAXVALUE", ) self.assert_compile( CreateSequence( Sequence("foo_seq", increment=2, start=0, nominvalue=True)), "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 NO MINVALUE", ) self.assert_compile( CreateSequence( Sequence("foo_seq", start=1, maxvalue=10, cycle=True)), "CREATE SEQUENCE foo_seq START WITH 1 MAXVALUE 10 CYCLE", ) self.assert_compile( CreateSequence(Sequence("foo_seq", cache=1000, order=True)), "CREATE SEQUENCE foo_seq START WITH 1 CACHE 1000 ORDER", ) self.assert_compile( CreateSequence(Sequence("foo_seq", order=True)), "CREATE SEQUENCE foo_seq START WITH 1 ORDER", ) self.assert_compile(DropSequence(Sequence("foo_seq")), "DROP SEQUENCE foo_seq")
def setup_class(cls): cls.seq = Sequence("my_sequence") cls.seq.create(testing.db)
def test_execute(self, connection): s = Sequence("my_sequence") self._assert_seq_result(connection.execute(s))
def test_execute_optional_next_value(self, connection): """test func.next_value().execute()/.scalar() works with connectionless execution. """ s = Sequence("my_sequence", optional=True) self._assert_seq_result(connection.scalar(s.next_value()))
class PaidServiceType(Base): """ Paid service type object. """ __tablename__ = 'paid_types' __table_args__ = (Comment('Paid service types'), Index('paid_types_i_qp_type', 'qp_type'), Index('paid_types_u_name', 'name', unique=True), { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'info': { 'cap_menu': 'BASE_PAIDSERVICES', 'cap_read': 'PAIDSERVICES_LIST', 'cap_create': 'PAIDSERVICETYPES_CREATE', 'cap_edit': 'PAIDSERVICETYPES_EDIT', 'cap_delete': 'PAIDSERVICETYPES_DELETE', 'menu_main': True, 'menu_name': _('Paid Services'), 'show_in_menu': 'modules', 'default_sort': ({ 'property': 'name', 'direction': 'ASC' }, ), 'grid_view': ('paidid', 'name', 'isum', 'qsum', 'qp_amount', 'qp_unit', 'qp_type', 'sp_amount'), 'grid_hidden': ('paidid', 'sp_amount'), 'form_view': ('name', 'isum', 'qsum', 'qp_amount', 'qp_unit', 'qp_type', 'qp_order', 'sp_amount', 'cb_before', 'cb_success', 'cb_failure', 'cb_ratemod', 'descr'), 'easy_search': ('name', 'descr'), 'extra_data': ('formatted_initial_sum', 'formatted_quota_sum'), 'detail_pane': ('netprofile_core.views', 'dpane_simple'), 'create_wizard': SimpleWizard(title=_('Add new type')) } }) id = Column('paidid', UInt32(), Sequence('paid_types_paidid_seq'), Comment('Paid service ID'), primary_key=True, nullable=False, info={'header_string': _('ID')}) name = Column(Unicode(255), Comment('Paid service name'), nullable=False, info={ 'header_string': _('Name'), 'column_flex': 3 }) initial_sum = Column('isum', Money(), Comment('Initial payment sum'), nullable=False, default=0, server_default=text('0'), info={ 'header_string': _('Initial Payment'), 'column_flex': 1, 'column_xtype': 'templatecolumn', 'template': '{formatted_initial_sum}' }) quota_sum = Column('qsum', Money(), Comment('Quota sum'), nullable=False, default=0, server_default=text('0'), info={ 'header_string': _('Quota Payment'), 'column_flex': 1, 'column_xtype': 'templatecolumn', 'template': '{formatted_quota_sum}' }) quota_period_type = Column('qp_type', PaidServiceQPType.db_type(), Comment('Quota period type'), nullable=False, default=PaidServiceQPType.independent, server_default=PaidServiceQPType.independent, info={'header_string': _('Type')}) pay_order = Column('qp_order', UInt8(), Comment('Pay order for linked services'), nullable=False, default=0, server_default=text('0'), info={'header_string': _('Pay Order')}) skipped_periods = Column('sp_amount', UInt16(), Comment('Number of skipped periods'), nullable=False, default=0, server_default=text('0'), info={'header_string': _('Skipped Periods')}) quota_period_amount = Column( 'qp_amount', UInt16(), Comment('Quota period amount'), nullable=False, default=1, server_default=text('1'), info={'header_string': _('Quota Period Amount')}) quota_period_unit = Column('qp_unit', QuotaPeriodUnit.db_type(), Comment('Quota period unit'), nullable=False, default=QuotaPeriodUnit.calendar_month, server_default=QuotaPeriodUnit.calendar_month, info={'header_string': _('Quota Period Unit')}) cb_before = Column(ASCIIString(255), Comment('Callback before charging'), nullable=True, default=None, server_default=text('NULL'), info={'header_string': _('Callback Before Charging')}) cb_success = Column(ASCIIString(255), Comment('Callback on success'), nullable=True, default=None, server_default=text('NULL'), info={'header_string': _('Callback on Success')}) cb_failure = Column(ASCIIString(255), Comment('Callback on failure'), nullable=True, default=None, server_default=text('NULL'), info={'header_string': _('Callback on Failure')}) cb_ratemod = Column(ASCIIString(255), Comment('Callback on linked rate'), nullable=True, default=None, server_default=text('NULL'), info={'header_string': _('Callback on Linked Rate')}) description = Column('descr', UnicodeText(), Comment('Paid service description'), nullable=True, default=None, server_default=text('NULL'), info={'header_string': _('Description')}) def __str__(self): return '%s' % (str(self.name), ) def formatted_initial_sum(self, req): return money_format(req, self.initial_sum) def formatted_quota_sum(self, req): return money_format(req, self.quota_sum)
def test_next_value_type(self): seq = Sequence("my_sequence", data_type=BigInteger) assert isinstance(seq.next_value().type, BigInteger)
class Borrower(Base): __tablename__ = 'tblborrowers' id = Column(Integer, Sequence('borrowers_id_seq'), primary_key=True) appid = Column(String) ApplicationDate = Column(Date) FirstName = Column(String) LastName = Column(String) MiddleName = Column(String) Alias = Column(String) Sex = Column(String) MaritalStatus = Column(String) DOB = Column(Date) Address = Column(String) Length_reside = Column(String) PrevAddr = Column(String) PrevAddr2 = Column(String) PrevAddr3 = Column(String) Length_reside_prev = Column(String) HomeTel = Column(String) WorkTel = Column(String) OwnHome = Column(String) Rent = Column(String) Amt_Rent = Column(String) Other = Column(String) Morgage_monthly = Column(String) occupation = Column(String) nameofemployer = Column(String) EmpAddress1 = Column(String) EmpAddress2 = Column(String) EmpAddress3 = Column(String) EmpStartDate = Column(Date) Department = Column(String) PrevEmpAddress1 = Column(String) PrevEmpAddress2 = Column(String) PrevEmpAddress3 = Column(String) PrevEmpStartDate = Column(Date) Extension = Column(String) PostionHeld = Column(String) PaymentPeriod = Column(String) PreviousEmployer = Column(String) otherIncomeSource = Column(String) nameofspouse = Column(String) addressofspouse = Column(String) spouse_employment = Column(String) spouse_emp_address = Column(String) spouse_emp_tel = Column(String) SpouseAlias = Column(String) SpouseTRN = Column(String) SpouseDOB = Column(Date) Dependent = Column(String) LoanAmtNeed = Column(String) PurposeOfLoan = Column(String) DurationOfLoan = Column(String) ReturningCustomer = Column(String) YesAmount = Column(String) OutstandingDebtName = Column(String) OutstandingDebtAmount = Column(String) SalaryAmount = Column(String) ProcessDate = Column(Date) Name1 = Column(String) Address1 = Column(String) WorkNum1 = Column(String) HomeNum1 = Column(String) Relations1 = Column(String) Name2 = Column(String) Address2 = Column(String) WorkNum2 = Column(String) HomeNum2 = Column(String) Relations2 = Column(String) Employer2 = Column(String) Name3 = Column(String) Address3 = Column(String) WorkNum3 = Column(String) HomeNum3 = Column(String) Relation3 = Column(String) Employer3 = Column(String) Name4 = Column(String) Address4 = Column(String) WorkNum4 = Column(String) HomeNum4 = Column(String) Relations4 = Column(String) Employer4 = Column(String) Employer1 = Column(String) Picture = Column(String) incomeid = Column(String) filename = Column(String) Comment = Column(String) TRN = Column(String) digicell = Column(String) digicel2 = Column(String) branch= Column(String) branchname = Column(String) stamp = Column(String) validateby = Column(String) validatedate = Column(Date)
class PaidService(Base): """ Paid service mapping object. """ __tablename__ = 'paid_def' __table_args__ = (Comment('Paid service mappings'), Index('paid_def_i_entityid', 'entityid'), Index('paid_def_i_aeid', 'aeid'), Index('paid_def_i_hostid', 'hostid'), Index('paid_def_i_stashid', 'stashid'), Index('paid_def_i_paidid', 'paidid'), Index('paid_def_i_active', 'active'), Index('paid_def_i_qpend', 'qpend'), Trigger('before', 'insert', 't_paid_def_bi'), Trigger('before', 'update', 't_paid_def_bu'), Trigger('after', 'insert', 't_paid_def_ai'), Trigger('after', 'update', 't_paid_def_au'), Trigger('after', 'delete', 't_paid_def_ad'), { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'info': { 'cap_menu': 'BASE_PAIDSERVICES', 'cap_read': 'PAIDSERVICES_LIST', 'cap_create': 'PAIDSERVICES_CREATE', 'cap_edit': 'PAIDSERVICES_EDIT', 'cap_delete': 'PAIDSERVICES_DELETE', 'default_sort': ({ 'property': 'qpend', 'direction': 'DESC' }, ), 'grid_view': ('epid', 'entity', 'stash', 'type', 'active', 'qpend'), 'grid_hidden': ('epid', ), 'form_view': (), 'detail_pane': ('netprofile_core.views', 'dpane_simple') } }) id = Column('epid', UInt32(), Sequence('paid_def_epid_seq'), Comment('Paid service mapping ID'), primary_key=True, nullable=False, info={'header_string': _('ID')}) entity_id = Column('entityid', UInt32(), Comment('Entity ID'), ForeignKey('entities_def.entityid', name='paid_def_fk_entityid', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, info={ 'header_string': _('Entity'), 'filter_type': 'none', 'column_flex': 2 }) access_entity_id = Column('aeid', UInt32(), Comment('Access entity ID'), ForeignKey('entities_access.entityid', name='paid_def_fk_aeid', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string': _('Access Entity'), 'filter_type': 'none', 'column_flex': 2 }) host_id = Column('hostid', UInt32(), Comment('Host ID'), ForeignKey('hosts_def.hostid', name='paid_def_fk_hostid', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string': _('Host'), 'filter_type': 'none', 'column_flex': 2 }) stash_id = Column('stashid', UInt32(), ForeignKey('stashes_def.stashid', name='paid_def_fk_stashid', onupdate='CASCADE', ondelete='CASCADE'), Comment('Used stash ID'), nullable=False, info={ 'header_string': _('Stash'), 'column_flex': 3 }) paid_id = Column('paidid', UInt32(), ForeignKey('paid_types.paidid', name='paid_def_fk_paidid', onupdate='CASCADE'), Comment('Type ID'), nullable=False, info={ 'header_string': _('Stash'), 'column_flex': 3 }) active = Column(NPBoolean(), Comment('Is service active'), nullable=False, default=True, server_default=npbool(True), info={'header_string': _('Active')}) quota_period_end = Column('qpend', TIMESTAMP(), Comment('End of quota period'), nullable=True, default=None, server_default=FetchedValue(), info={'header_string': _('Ends')}) description = Column('descr', UnicodeText(), Comment('Description'), nullable=True, default=None, server_default=text('NULL'), info={'header_string': _('Description')}) type = relationship('PaidServiceType', innerjoin=True, lazy='joined', backref='paid_services') entity = relationship('Entity', foreign_keys=entity_id, innerjoin=True, backref=backref('paid_services', cascade='all, delete-orphan', passive_deletes=True)) access_entity = relationship('AccessEntity', foreign_keys=access_entity_id, backref=backref('paid_services_access', cascade='all, delete-orphan', passive_deletes=True)) host = relationship('Host', backref=backref('paid_services', cascade='all, delete-orphan', passive_deletes=True)) stash = relationship('Stash', innerjoin=True, backref=backref('paid_services', cascade='all, delete-orphan', passive_deletes=True)) def __str__(self): return '%s: %s' % (str(self.stash), str(self.type))
class CrawlerLOG(Base): __tablename__ = 'log' id = Column(Integer, Sequence('log_id_seq'), primary_key=True) level = Column(Integer) text = Column(Text) create_at = Column(DateTime(timezone=True), default=func.now())
class Keyword(Base): __tablename__ = 'keyword' id = Column(Integer, Sequence('keyword_id_seq'), primary_key=True) text = Column(String(256)) create_at = Column(DateTime(timezone=True), default=func.now())
def test_fetchid_trigger(self): """ Verify identity return value on inserting to a trigger table. MSSQL's OUTPUT INSERTED clause does not work for the case of a table having an identity (autoincrement) primary key column, and which also has a trigger configured to fire upon each insert and subsequently perform an insert into a different table. SQLALchemy's MSSQL dialect by default will attempt to use an OUTPUT_INSERTED clause, which in this case will raise the following error: ProgrammingError: (ProgrammingError) ('42000', 334, "[Microsoft][SQL Server Native Client 10.0][SQL Server]The target table 't1' of the DML statement cannot have any enabled triggers if the statement contains an OUTPUT clause without INTO clause.", 7748) 'INSERT INTO t1 (descr) OUTPUT inserted.id VALUES (?)' ('hello',) This test verifies a workaround, which is to rely on the older SCOPE_IDENTITY() call, which still works for this scenario. To enable the workaround, the Table must be instantiated with the init parameter 'implicit_returning = False'. """ # todo: this same test needs to be tried in a multithreaded context # with multiple threads inserting to the same table. # todo: check whether this error also occurs with clients other # than the SQL Server Native Client. Maybe an assert_raises # test should be written. meta = MetaData(testing.db) t1 = Table( 't1', meta, Column('id', Integer, Sequence('fred', 100, 1), primary_key=True), Column('descr', String(200)), # the following flag will prevent the # MSSQLCompiler.returning_clause from getting called, # though the ExecutionContext will still have a # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will # hopefully be called instead. implicit_returning=False) t2 = Table( 't2', meta, Column('id', Integer, Sequence('fred', 200, 1), primary_key=True), Column('descr', String(200))) meta.create_all() con = testing.db.connect() con.execute("""create trigger paj on t1 for insert as insert into t2 (descr) select descr from inserted""") try: tr = con.begin() r = con.execute(t2.insert(), descr='hello') self.assert_(r.inserted_primary_key == [200]) r = con.execute(t1.insert(), descr='hello') self.assert_(r.inserted_primary_key == [100]) finally: tr.commit() con.execute("""drop trigger paj""") meta.drop_all()
def test_func_explicit(self): s = Sequence("my_sequence") self._assert_seq_result(testing.db.scalar(s.next_value()))
class User(db.Model): __tablename__ = 't_roles' __table_args__ = {'schema': 'utilisateurs'} TABLE_ID = Sequence( 't_roles_id_seq', schema="utilisateurs", ) groupe = db.Column(db.Boolean) id_role = db.Column( db.Integer, TABLE_ID, primary_key=True, ) # TODO: make that unique ? identifiant = db.Column(db.Unicode) nom_role = db.Column(db.Unicode) prenom_role = db.Column(db.Unicode) desc_role = db.Column(db.Unicode) _password = db.Column('pass', db.Unicode) _password_plus = db.Column('pass_plus', db.Unicode) email = db.Column(db.Unicode) id_organisme = db.Column(db.Integer) organisme = db.Column(db.Unicode) id_unite = db.Column(db.Integer) remarques = db.Column(db.Unicode) pn = db.Column(db.Boolean) session_appli = db.Column(db.Unicode) date_insert = db.Column(db.DateTime) date_update = db.Column(db.DateTime) # applications_droits = db.relationship('AppUser', lazy='joined') @property def password(self): if (current_app.config['PASS_METHOD'] == 'md5'): return self._password elif (current_app.config['PASS_METHOD'] == 'hash'): return self._password_plus else: raise # TODO: change password digest algorithm for something stronger such # as bcrypt. This need to be done at usershub level first. @password.setter def password(self, pwd): self._password = hashlib.md5(pwd.encode('utf8')).hexdigest() check_password = fn_check_password def to_json(self): out = { 'id': self.id_role, 'login': self.identifiant, 'email': self.email, 'applications': [] } for app_data in self.applications_droits: app = { 'id': app_data.application_id, 'nom': app_data.application.nom_application, 'niveau': app_data.id_droit_max } out['applications'].append(app) return out def __repr__(self): return "<User '{!r}' id='{}'>".format(self.identifiant, self.id_role) def __str__(self): return self.identifiant or ''
def test_func_implicit_connectionless_scalar(self): """test func.next_value().execute()/.scalar() works. """ s = Sequence("my_sequence", metadata=MetaData(testing.db)) self._assert_seq_result(s.next_value().scalar())
class Knowledgebase(Base): __tablename__ = 'knowledgebase' id = Column(Integer, Sequence('wids'), primary_key=True) name = Column(String, nullable=False)
def test_execute_optional(self, connection): """test dialect executes a Sequence, returns nextval, whether or not "optional" is set """ s = Sequence("my_sequence", optional=True) self._assert_seq_result(connection.execute(s))
class KnowledgebaseFeature(Base): __tablename__ = 'knowledgebase_feature' id = Column(Integer, Sequence('wids'), primary_key=True) feature_type = Column(String, nullable=False) feature = Column(String, nullable=False) knowledgebase_gene_id = Column(Integer, ForeignKey(KnowledgebaseGene.id, ondelete='CASCADE'), nullable=False)
def test_func_embedded_select(self, connection): """test can use next_value() in select column expr""" s = Sequence("my_sequence") self._assert_seq_result(connection.scalar(select([s.next_value()])))
class Factura(Base): __tablename__ = 'facturas' id = Column(Integer, Sequence('factura_id_seq'), primary_key=True) nota_de_credito = relationship('NotaDeCredito', uselist=False, backref='factura') cliente_id = Column(Integer, ForeignKey('clientes.id')) zoho_id = Column(Integer) print_date = Column(DateTime) descuento = Column(Integer) productos = relationship('Producto') cliente = relationship('Cliente') created_at = Column(Date, default=datetime.now) updated_at = Column(Date, default=datetime.now, onupdate=datetime.now) def __init__(self, cliente): self.cliente_id = cliente.id self.descuento = 0 @classmethod def with_productos(self, cliente, productos): self.cliente_id = cliente.id p = [ Producto(self.id, producto['nombre'], producto['cantidad'], producto['tasa'], producto['precio']) for producto in productos ] return (self, p) def codigo(self): return "@COD:%06d" % self.id def get_descuento(self): return "p-%04d" % self.descuento def create_productos_from_dict(self, productos): return [ Producto(self.id, producto['nombre'], producto['cantidad'], producto['tasa'], producto['precio']) for producto in productos ] def get_productos_str(self): return "\n".join([str(producto) for producto in self.productos]) def get_total(self): return sum([p.precio for p in self.productos]) def has_nota_de_credito(self): if self.nota_de_credito: return True else: return False def to_json(self): json = { 'cliente': self.cliente.empresa, 'productos': [p.nombre for p in self.productos], 'total': float(self.get_total()), 'nota_de_credito': self.has_nota_de_credito() } return json def __str__(self): factura = "\n".join([ str(self.cliente), self.codigo(), self.get_productos_str(), self.get_descuento(), "101\r\n" ]) return factura def para_nota(self): factura = "\n".join( [str(self.cliente), self.get_productos_str(), "101\r\n"]) return factura
class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): __requires__ = ("sequences", ) __backend__ = True @testing.combinations( (Sequence("foo_seq"), ), (Sequence("foo_seq", start=8), ), (Sequence("foo_seq", increment=5), ), ) def test_start_increment(self, seq): seq.create(testing.db) try: with testing.db.connect() as conn: values = [conn.execute(seq) for i in range(3)] start = seq.start or testing.db.dialect.default_sequence_base inc = seq.increment or 1 eq_(values, list(range(start, start + inc * 3, inc))) finally: seq.drop(testing.db) def _has_sequence(self, connection, name): return testing.db.dialect.has_sequence(connection, name) def test_nextval_unsupported(self): """test next_value() used on non-sequence platform raises NotImplementedError.""" s = Sequence("my_seq") d = sqlite.dialect() assert_raises_message( NotImplementedError, "Dialect 'sqlite' does not support sequence increments.", s.next_value().compile, dialect=d, ) def test_checkfirst_sequence(self, connection): s = Sequence("my_sequence") s.create(connection, checkfirst=False) assert self._has_sequence(connection, "my_sequence") s.create(connection, checkfirst=True) s.drop(connection, checkfirst=False) assert not self._has_sequence(connection, "my_sequence") s.drop(connection, checkfirst=True) def test_checkfirst_metadata(self, connection): m = MetaData() Sequence("my_sequence", metadata=m) m.create_all(connection, checkfirst=False) assert self._has_sequence(connection, "my_sequence") m.create_all(connection, checkfirst=True) m.drop_all(connection, checkfirst=False) assert not self._has_sequence(connection, "my_sequence") m.drop_all(connection, checkfirst=True) def test_checkfirst_table(self, connection): m = MetaData() s = Sequence("my_sequence") t = Table("t", m, Column("c", Integer, s, primary_key=True)) t.create(connection, checkfirst=False) assert self._has_sequence(connection, "my_sequence") t.create(connection, checkfirst=True) t.drop(connection, checkfirst=False) assert not self._has_sequence(connection, "my_sequence") t.drop(connection, checkfirst=True) @testing.provide_metadata def test_table_overrides_metadata_create(self, connection): metadata = self.metadata Sequence("s1", metadata=metadata) s2 = Sequence("s2", metadata=metadata) s3 = Sequence("s3") t = Table("t", metadata, Column("c", Integer, s3, primary_key=True)) assert s3.metadata is metadata t.create(connection, checkfirst=True) s3.drop(connection) # 't' is created, and 's3' won't be # re-created since it's linked to 't'. # 's1' and 's2' are, however. metadata.create_all(connection) assert self._has_sequence(connection, "s1") assert self._has_sequence(connection, "s2") assert not self._has_sequence(connection, "s3") s2.drop(connection) assert self._has_sequence(connection, "s1") assert not self._has_sequence(connection, "s2") metadata.drop_all(connection) assert not self._has_sequence(connection, "s1") assert not self._has_sequence(connection, "s2") @testing.requires.returning @testing.requires.supports_sequence_for_autoincrement_column @testing.provide_metadata def test_freestanding_sequence_via_autoinc(self, connection): t = Table( "some_table", self.metadata, Column( "id", Integer, autoincrement=True, primary_key=True, default=Sequence("my_sequence", metadata=self.metadata).next_value(), ), ) self.metadata.create_all(connection) result = connection.execute(t.insert()) eq_(result.inserted_primary_key, (1, ))
) class Property(IntegerIdModel): # TODO add unique key name = Column(String(255), nullable=False) granted = Column(Boolean, nullable=False) # many to one from Property to PropertyGroup # nullable=True property_group_id = Column(Integer, ForeignKey(PropertyGroup.id), nullable=False, index=True) #TODO prüfen, ob cascade Properties löscht, wenn zugehörige PGroup deleted property_group = relationship( PropertyGroup, backref=backref("properties", cascade="all, delete-orphan", collection_class=attribute_mapped_collection("name")) ) unix_account_uid_seq = Sequence('unix_account_uid_seq', start=1000, metadata=ModelBase.metadata) class UnixAccount(IntegerIdModel): uid = Column(Integer, nullable=False, unique=True, server_default=unix_account_uid_seq.next_value()) gid = Column(Integer, nullable=False, default=100) login_shell = Column(String, nullable=False, default="/bin/bash") home_directory = Column(String, nullable=False, unique=True)
metadata, Column('station_id', String(64), primary_key=True, index=True), Column('station_location', String(128)), Column('station_name', String(128)), Column('station_latitude', Float()), Column('station_longitude', Float()), Column('station_altitude', Float()), Column('station_country', String(128)), Column('station_state', String(128)), keep_existing=True, ) timeseries = Table( 'timeseries', metadata, Column('id', Integer, Sequence('mes_id_seq'), primary_key=True, index=True), Column('station_id', None, ForeignKey('stationmetacore.station_id'), index=True), Column('parameter', String(60)), Column('unit', String(60)), Column('averagingPeriod', String(80)), keep_existing=True, ) meseaurement = Table( 'measurement', metadata, Column('series_id', None, ForeignKey('timeseries.id'), index=True),
def test_implicit_connectionless(self): s = Sequence("my_sequence", metadata=MetaData(testing.db)) self._assert_seq_result(s.execute())
def id_sequence(cls): return Sequence(cls.id_sequence_name, schema=cls.metadata.schema)
def test_explicit_optional(self): """test dialect executes a Sequence, returns nextval, whether or not "optional" is set """ s = Sequence("my_sequence", optional=True) self._assert_seq_result(s.execute(testing.db))
class transaction(Base): __tablename__ = 'transactions' id = Column(Integer, Sequence('user_id_seq'), primary_key=True) book_id = Column(Integer()) customer_id = Column(Integer()) cost = Column(Float(100)) due_date = Column(String(50)) status = Column(String(20)) Transactions = [] def __init__(self, book_id, customer_id, due_date): self.book_id = book_id self.customer_id = customer_id self.due_date = due_date def __repr__(self): return "<User(id='%i',book_id='%i',customer_id='%i',cost='%f',due_date='%i',status='%s')>" % ( self.id, self.book_id, self.customer_id, self.cost, self.due_date, self.status) @staticmethod def CheckOut(): book_id = input("Insert Book ID:") customer_id = input("Insert Customer ID:") due_date = 0 #for i in Books: # break new_transaction = transaction(book_id, customer_id, due_date) transaction.Transactions.append(new_transaction) pass @staticmethod def CheckIn(): pass @staticmethod def ListOverdue(): pass @staticmethod def ListBooks(): pass @staticmethod def UpdateStatus(): pass @staticmethod def ReCheck(): pass @staticmethod def EditLoan(): pass @staticmethod def ReportLost(): pass @staticmethod def menu(): tranInput = -1 while tranInput != 9: print("Transaction System\n") print(" [1] Check Out") print(" [2] Check In") print(" [3] List Overdue") print(" [4] List Books") print(" [5] Update Status") print(" [6] Recheck Out") print(" [7] Edit Loan") print(" [8] Report Lost") print(" [9] Return to Main Menu") tranInput = int(input()) if tranInput == 1: #check out transaction.CheckOut() elif tranInput == 2: #check in transaction.CheckIn() elif tranInput == 3: #list overdue transaction.ListOverdue() elif tranInput == 4: #list books transaction.ListBooks() break elif tranInput == 5: #update status transaction.UpdateStatus() break elif tranInput == 6: #Recheck out transaction.ReCheck() break elif tranInput == 7: #Edit Loan transaction.EditLoan() break elif tranInput == 8: #Report Loan transaction.ReportLost() break elif tranInput == 9: # return to main menu break else: print("Error, please try again.\n")
def test_decimal_notation(self): numeric_table = Table( 'numeric_table', metadata, Column('id', Integer, Sequence('numeric_id_seq', optional=True), primary_key=True), Column('numericcol', Numeric(precision=38, scale=20, asdecimal=True))) metadata.create_all() test_items = [ decimal.Decimal(d) for d in ( '1500000.00000000000000000000', '-1500000.00000000000000000000', '1500000', '0.0000000000000000002', '0.2', '-0.0000000000000000002', '-2E-2', '156666.458923543', '-156666.458923543', '1', '-1', '-1234', '1234', '2E-12', '4E8', '3E-6', '3E-7', '4.1', '1E-1', '1E-2', '1E-3', '1E-4', '1E-5', '1E-6', '1E-7', '1E-1', '1E-8', '0.2732E2', '-0.2432E2', '4.35656E2', '-02452E-2', '45125E-2', '1234.58965E-2', '1.521E+15', '-1E-25', '1E-25', '1254E-25', '-1203E-25', '0', '-0.00', '-0', '4585E12', '000000000000000000012', '000000000000.32E12', '00000000000000.1E+12', '000000000000.2E-32', ) ] for value in test_items: numeric_table.insert().execute(numericcol=value) for value in select([numeric_table.c.numericcol]).execute(): assert value[0] in test_items, "%r not in test_items" % value[0]