def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) metadata.reflect() try: SampleDataset_table = Table( "sample_dataset", metadata, autoload=True ) except NoSuchTableError: SampleDataset_table = None log.debug( "Failed loading table 'sample_dataset'" ) if SampleDataset_table is not None: cmd = "SELECT id, file_path FROM sample_dataset" result = migrate_engine.execute( cmd ) filepath_dict = {} for r in result: id = int(r[0]) filepath_dict[id] = r[1] # remove the 'file_path' column try: SampleDataset_table.c.file_path.drop() except Exception: log.exception("Deleting column 'file_path' from the 'sample_dataset' table failed.") # create the column again try: col = Column( "file_path", TEXT ) col.create( SampleDataset_table ) assert col is SampleDataset_table.c.file_path except Exception: log.exception("Creating column 'file_path' in the 'sample_dataset' table failed.") for id, file_path in filepath_dict.items(): cmd = "update sample_dataset set file_path='%s' where id=%i" % (file_path, id) migrate_engine.execute( cmd )
def upgrade(migrate_engine): meta.bind = migrate_engine records_table = Table('records', meta, autoload=True) disabled = Column('disabled', TINYINT(1), server_default='0') disabled.create(records_table)
def upgrade(migrate_engine): meta.bind = migrate_engine dialect = migrate_engine.url.get_dialect().name domains_table = Table('domains', meta, autoload=True) if dialect.startswith('sqlite'): # SQLite can't drop a constraint. Yay. This will be fun.. # Create a new name column without the unique index name_tmp_column = Column('name_tmp', String(255)) name_tmp_column.create(domains_table) # Copy the data over. query = update(domains_table).values(name_tmp=domains_table.c.name) migrate_engine.execute(query) # Delete the name column domains_table.c.name.drop() # Rename the name_tmp column to name domains_table.c.name_tmp.alter(name='name') elif dialect.startswith('postgresql'): constraint = UniqueConstraint('name', name='domains_name_key', table=domains_table) constraint.drop() else: constraint = UniqueConstraint('name', name='name', table=domains_table) constraint.drop()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = MetaData(bind=migrate_engine) store = Table('store', meta, autoload=True) datecol = Column('timestamp', DateTime, default=datetime.datetime.utcnow) datecol.create(store)
def upgrade(migrate_engine): metadata.bind = migrate_engine # dummy definitions to satisfy foreign keys Table('instance', metadata, autoload=True) Table('group', metadata, autoload=True) # add the column for the polymorphic identity # we have to use 'nullable=True' cause the values are # null when the column is created type_col = Column('type', String(40), nullable=True) type_col.create(badge_table) # fill column with the right values select = badge_table.select().with_only_columns( ['id', 'title', 'badge_delegateable', 'badge_delegateable_category']) badges_query_result = migrate_engine.execute(select) for values in badges_query_result: (id_, title, delegateable, category) = values if category: type_ = CATEGORY_BADGE elif delegateable: type_ = DELEGATEABLE_BADGE else: type_ = USER_BADGE update = badge_table.update().values(type=type_).where( badge_table.c.id == id_) migrate_engine.execute(update) # drop the old columns badge_table.c.badge_delegateable.drop() badge_table.c.badge_delegateable_category.drop() type_col.alter(nullable=False)
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instances = Table('instances', meta, autoload=True) instance_types = Table('instance_types', meta, autoload=True) for table in (instances, instance_types): local_gb = Column('local_gb', Integer) local_gb.create(table) try: for table in (instances, instance_types): if FLAGS.connection_type == 'libvirt': column = table.c.ephemeral_gb else: column = table.c.root_gb table.update().values(local_gb=column).execute() except Exception: for table in (instances, instance_types): table.drop_column('local_gb') raise default_ephemeral_device = instances.c.default_ephemeral_device default_ephemeral_device.alter(name='default_local_device') for table in (instances, instance_types): table.drop_column('root_gb') table.drop_column('ephemeral_gb')
def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) metadata.reflect() try: LibraryDataset_table = Table("library_dataset", metadata, autoload=True) c = Column("purged", Boolean, index=True, default=False) c.create(LibraryDataset_table, index_name='ix_library_dataset_purged') assert c is LibraryDataset_table.c.purged except Exception: log.exception("Adding purged column to library_dataset table failed.") # Update the purged flag to the default False cmd = "UPDATE library_dataset SET purged = %s;" % engine_false(migrate_engine) try: migrate_engine.execute(cmd) except Exception: log.exception("Setting default data for library_dataset.purged column failed.") # Update the purged flag for those LibaryDatasets whose purged flag should be True. This happens # when the LibraryDataset has no active LibraryDatasetDatasetAssociations. cmd = "SELECT * FROM library_dataset WHERE deleted = %s;" % engine_true(migrate_engine) deleted_lds = migrate_engine.execute(cmd).fetchall() for row in deleted_lds: cmd = "SELECT * FROM library_dataset_dataset_association WHERE library_dataset_id = %d AND library_dataset_dataset_association.deleted = %s;" % (int(row.id), engine_false(migrate_engine)) active_lddas = migrate_engine.execute(cmd).fetchall() if not active_lddas: print("Updating purged column to True for LibraryDataset id : ", int(row.id)) cmd = "UPDATE library_dataset SET purged = %s WHERE id = %d;" % (engine_true(migrate_engine), int(row.id)) migrate_engine.execute(cmd)
def upgrade(migrate_engine): meta.bind = migrate_engine instance_table = Table('instance', meta, Column('id', Integer, primary_key=True), Column('key', Unicode(20), nullable=False, unique=True), Column('label', Unicode(255), nullable=False), Column('description', UnicodeText(), nullable=True), Column('required_majority', Float, nullable=False), Column('activation_delay', Integer, nullable=False), Column('create_time', DateTime, default=func.now()), Column('access_time', DateTime, default=func.now(), onupdate=func.now()), Column('delete_time', DateTime, nullable=True), Column('creator_id', Integer, ForeignKey('user.id'), nullable=False), Column('default_group_id', Integer, ForeignKey('group.id'), nullable=True), Column('allow_adopt', Boolean, default=True), Column('allow_delegate', Boolean, default=True), Column('allow_index', Boolean, default=True), Column('hidden', Boolean, default=False), Column('locale', Unicode(7), nullable=True), Column('css', UnicodeText(), nullable=True), Column('use_norms', Boolean, nullable=True, default=True) ) propose = Column('allow_propose', Boolean, default=True) propose.create(instance_table) u = instance_table.update(values={'allow_propose': True}) migrate_engine.execute(u)
def upgrade(migrate_engine): meta.bind = migrate_engine records_table = Table('records', meta, autoload=True) # Create the new inherit_ttl column inherit_ttl = Column('inherit_ttl', Boolean(), default=True) inherit_ttl.create(records_table) # Semi-Populate the new inherit_ttl column. We'll need to do a cross-db # join from powerdns.records -> powerdns.domains -> designate.domains, so # we can't perform the second half here. query = records_table.update().values(inherit_ttl=False) query = query.where(records_table.c.ttl != None) query.execute() # If there are records without an explicity configured TTL, we'll need # a manual post-migration step. query = records_table.select() query = query.where(records_table.c.ttl == None) c = query.count() if c > 0: pmq = ('UPDATE powerdns.records JOIN powerdns.domains ON powerdns.reco' 'rds.domain_id = powerdns.domains.id JOIN designate.domains ON ' 'powerdns.domains.designate_id = designate.domains.id SET power' 'dns.records.ttl = designate.domains.ttl WHERE powerdns.records' '.inherit_ttl = 1;') LOG.warning(_LW('**** A manual post-migration step is required ****')) LOG.warning(_LW('Please issue this query: %s' % pmq))
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instances = Table('instances', meta, autoload=True) integer_column = instances.c.instance_type_id string_column = Column('instance_type_id_str', String(length=255, convert_unicode=False, assert_unicode=None, unicode_error=None, _warn_on_bytestring=False), nullable=True) types = {} for instance in migrate_engine.execute(instances.select()): if instance.instance_type_id is None: types[instance.id] = None else: types[instance.id] = str(instance.instance_type_id) string_column.create(instances) for instance_id, instance_type_id in types.iteritems(): update = instances.update().\ where(instances.c.id == instance_id).\ values(instance_type_id_str=instance_type_id) migrate_engine.execute(update) integer_column.alter(name='instance_type_id_int') string_column.alter(name='instance_type_id') integer_column.drop()
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instances = Table('instances', meta, autoload=True) volumes = Table('volumes', meta, autoload=True) instance_id_column = Column('instance_id', Integer) instance_id_column.create(volumes) try: volumes.update().values( instance_id=select( [instances.c.id], instances.c.uuid == volumes.c.instance_uuid) ).execute() except Exception: instance_id_column.drop() fkeys = list(volumes.c.instance_id.foreign_keys) if fkeys: try: fk_name = fkeys[0].constraint.name ForeignKeyConstraint( columns=[volumes.c.instance_id], refcolumns=[instances.c.id], name=fk_name).create() except Exception: LOG.error(_("foreign key could not be created")) raise volumes.c.instance_uuid.drop()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instance_metadata = Table("instance_metadata", meta, autoload=True) instances = Table("instances", meta, autoload=True) uuid_column = Column("instance_uuid", String(36)) uuid_column.create(instance_metadata) try: instance_metadata.update().values( instance_uuid=select([instances.c.uuid], instances.c.id == instance_metadata.c.instance_id) ).execute() except Exception: uuid_column.drop() raise fkeys = list(instance_metadata.c.instance_id.foreign_keys) if fkeys: try: fkey_name = fkeys[0].constraint.name ForeignKeyConstraint( columns=[instance_metadata.c.instance_id], refcolumns=[instances.c.id], name=fkey_name ).drop() except Exception: LOG.error(_("foreign key constraint couldn't be removed")) raise instance_metadata.c.instance_id.drop()
def upgrade(migrate_engine): metadata.bind = migrate_engine instance_table = Table( "instance", metadata, Column("id", Integer, primary_key=True), Column("key", Unicode(20), nullable=False, unique=True), Column("label", Unicode(255), nullable=False), Column("description", UnicodeText(), nullable=True), Column("required_majority", Float, nullable=False), Column("activation_delay", Integer, nullable=False), Column("create_time", DateTime, default=func.now()), Column("access_time", DateTime, default=func.now(), onupdate=func.now()), Column("delete_time", DateTime, nullable=True), Column("creator_id", Integer, ForeignKey("user.id"), nullable=False), Column("default_group_id", Integer, ForeignKey("group.id"), nullable=True), Column("allow_adopt", Boolean, default=True), Column("allow_delegate", Boolean, default=True), Column("allow_propose", Boolean, default=True), Column("allow_index", Boolean, default=True), Column("hidden", Boolean, default=False), Column("locale", Unicode(7), nullable=True), Column("css", UnicodeText(), nullable=True), Column("frozen", Boolean, default=False), Column("milestones", Boolean, default=False), Column("use_norms", Boolean, nullable=True, default=True), Column("require_selection", Boolean, nullable=True, default=False), Column("is_authenticated", Boolean, nullable=True, default=False), ) hide_categories = Column("hide_global_categories", Boolean, nullable=True, default=False) hide_categories.create(instance_table) u = instance_table.update(values={"hide_global_categories": False}) migrate_engine.execute(u)
def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) metadata.reflect() try: if migrate_engine.name == 'mysql': # Strip slug index prior to creation so we can do it manually. slug_index = None for ix in Page_table.indexes: if ix.name == 'ix_page_slug': slug_index = ix Page_table.indexes.remove(slug_index) Page_table.create() if migrate_engine.name == 'mysql': # Create slug index manually afterward. i = Index("ix_page_slug", Page_table.c.slug, mysql_length=200) i.create() except Exception: log.exception("Could not create page table") try: PageRevision_table.create() except Exception: log.exception("Could not create page_revision table") # Add 1 column to the user table User_table = Table("galaxy_user", metadata, autoload=True) col = Column('username', String(255), index=True, unique=True, default=False) col.create(User_table, index_name='ix_user_username', unique_name='username') assert col is User_table.c.username
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instances = Table('instances', meta, autoload=True) types = {} for instance in migrate_engine.execute(instances.select()): if instance.instance_type_id is None: types[instance.id] = None continue try: types[instance.id] = int(instance.instance_type_id) except ValueError: LOG.warn("Instance %s did not have instance_type_id " "converted to an integer because its value is %s" % (instance.id, instance.instance_type_id)) types[instance.id] = None integer_column = Column('instance_type_id_int', Integer(), nullable=True) string_column = instances.c.instance_type_id integer_column.create(instances) for instance_id, instance_type_id in types.iteritems(): update = instances.update().\ where(instances.c.id == instance_id).\ values(instance_type_id_int=instance_type_id) migrate_engine.execute(update) string_column.alter(name='instance_type_id_str') integer_column.alter(name='instance_type_id') string_column.drop()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine dialect = migrate_engine.url.get_dialect().name instance_actions = Table('instance_actions', meta, autoload=True) instances = Table('instances', meta, autoload=True) uuid_column = Column('instance_uuid', String(36)) uuid_column.create(instance_actions) try: instance_actions.update().values( instance_uuid=select( [instances.c.uuid], instances.c.id == instance_actions.c.instance_id) ).execute() except Exception: uuid_column.drop() raise if not dialect.startswith('sqlite'): fkeys = list(instance_actions.c.instance_id.foreign_keys) if fkeys: try: fkey_name = fkeys[0].constraint.name ForeignKeyConstraint(columns=[instance_actions.c.instance_id], refcolumns=[instances.c.id], name=fkey_name).drop() except Exception: LOG.error(_("foreign key constraint couldn't be removed")) raise instance_actions.c.instance_id.drop()
def upgrade(migrate_engine): meta.bind = migrate_engine records_table = Table('records', meta, autoload=True) recordset_id = Column('designate_recordset_id', UUID()) recordset_id.create(records_table)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine dialect = migrate_engine.url.get_dialect().name block_device_mapping = Table('block_device_mapping', meta, autoload=True) instances = Table('instances', meta, autoload=True) uuid_column = Column('instance_uuid', String(36)) uuid_column.create(block_device_mapping) try: block_device_mapping.update().values( instance_uuid=select( [instances.c.uuid], instances.c.id == block_device_mapping.c.instance_id) ).execute() except Exception: uuid_column.drop() raise fkeys = list(block_device_mapping.c.instance_id.foreign_keys) if fkeys: try: fkey_name = fkeys[0].constraint.name ForeignKeyConstraint( columns=[block_device_mapping.c.instance_id], refcolumns=[instances.c.id], name=fkey_name).drop() except Exception: LOG.error(_("foreign key constraint couldn't be removed")) raise block_device_mapping.c.instance_id.drop()
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # Drop missing_test_components and tool_test_results from the repository_metadata table and add tool_test_errors to the repository_metadata table. RepositoryMetadata_table = Table("repository_metadata", metadata, autoload=True) # Drop the missing_test_components column. try: RepositoryMetadata_table.c.missing_test_components.drop() except Exception: log.exception("Dropping column missing_test_components from the repository_metadata table failed.") # Drop the tool_test_results column. try: RepositoryMetadata_table.c.tool_test_results.drop() except Exception: log.exception("Dropping column tool_test_results from the repository_metadata table failed.") # Create the tool_test_errors column. c = Column("tool_test_errors", JSONType, nullable=True) try: c.create(RepositoryMetadata_table) assert c is RepositoryMetadata_table.c.tool_test_errors except Exception: log.exception("Adding tool_test_errors column to the repository_metadata table failed.")
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine volumes = Table('volumes', meta, autoload=True) attach_string = Column('attachtime_string', String(255)) attach_string.create(volumes) old_attachtime = volumes.c.attach_time try: volumes_list = list(volumes.select().execute()) for v in volumes_list: attach_time = select([volumes.c.attach_time], volumes.c.id == v['id']) volumes.update().\ where(volumes.c.id == v['id']).\ values(attach_string=attach_time).execute() except Exception: attach_datetime.drop() raise old_attachtime.alter(name='attach_time_old') attach_string.alter(name='attach_time') old_attachtime.drop()
def upgrade(migrate_engine): metadata.bind = migrate_engine show_norms_navigation = Column('show_norms_navigation', Boolean, default=True) show_norms_navigation.create(instance_table)
def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) metadata.reflect() ToolShedRepository_table = Table("tool_shed_repository", metadata, autoload=True) col = Column("installed_changeset_revision", TrimmedString(255)) try: col.create(ToolShedRepository_table) assert col is ToolShedRepository_table.c.installed_changeset_revision except Exception: log.exception("Adding installed_changeset_revision column to the tool_shed_repository table failed.") # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision. # This will be problematic if the value of changeset_revision was updated to something other than the value # that it was when the repository was installed (because the install path determined in real time will attempt to # find the repository using the updated changeset_revison instead of the required installed_changeset_revision), # but at the time this script was written, this scenario is extremely unlikely. cmd = "SELECT id AS id, " \ + "installed_changeset_revision AS installed_changeset_revision, " \ + "changeset_revision AS changeset_revision " \ + "FROM tool_shed_repository;" tool_shed_repositories = migrate_engine.execute(cmd).fetchall() update_count = 0 for row in tool_shed_repositories: cmd = "UPDATE tool_shed_repository " \ + "SET installed_changeset_revision = '%s' " % row.changeset_revision \ + "WHERE changeset_revision = '%s';" % row.changeset_revision migrate_engine.execute(cmd) update_count += 1 print("Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. ")
def wants_notifications(db): """Add a wants_notifications field to User model""" metadata = MetaData(bind=db.bind) user_table = inspect_table(metadata, "core__users") col = Column('wants_notifications', Boolean, default=True) col.create(user_table) db.commit()
def upgrade(migrate_engine): metadata.bind = migrate_engine message_table.create() message_recipient_table.create() email_messages = Column('email_messages', Boolean, default=True) email_messages.create(user_table)
def check_create_fk(self, from_table_id, to_table_id, ignoreexisting=False): from_type = setobject_type_registry.lookup_by_table(from_table_id) to_type = setobject_type_registry.lookup_by_table(to_table_id) pk = to_type.get_primary_key_attr_name() # Now add foreign key if not existant yet if not field_exists(from_table_id, self.foreignkeycol): col = Column( self.foreignkeycol, getattr(to_type.get_table_class().c, pk).type, ForeignKey(to_table_id + "." + pk), ) col.create(from_type.get_table_class()) # The foreign key column has been newly created Session().flush() # deferred import from p2.datashackle.core.models.mapping import map_tables map_tables(exclude_sys_tables=True) else: # it exists, check whether it is what we want or something else fkset = getattr(from_type.get_table_class().c, self.foreignkeycol).foreign_keys if len(fkset) > 0: for fk in fkset: if str(fk.column) == to_table_id + "." + pk \ and ignoreexisting == True: return # this is what we want! fine. raise UserException("A relation with a similar Data Field Name but targetting the table '" + \ str(fk.column).split('.',1)[0] + "' already exists. Please use another Data Field Name.") raise UserException("The column '" + self.foreignkeycol + "' in the table '" + to_table_id + \ "' does already exist. Please choose a unique Data Field Name that doesn't collide with existing data columns.")
def upgrade(migrate_engine): metadata.bind = migrate_engine display_migration_details() # Load existing tables metadata.reflect() try: User_table = Table( "galaxy_user", metadata, autoload=True ) except NoSuchTableError: User_table = None log.debug( "Failed loading table galaxy_user" ) if User_table is not None: try: col = Column( "form_values_id", Integer, index=True ) col.create( User_table, index_name='ix_user_form_values_id') assert col is User_table.c.form_values_id except Exception, e: log.debug( "Adding column 'form_values_id' to galaxy_user table failed: %s" % ( str( e ) ) ) try: FormValues_table = Table( "form_values", metadata, autoload=True ) except NoSuchTableError: FormValues_table = None log.debug( "Failed loading table form_values" ) if migrate_engine.name != 'sqlite': # Add 1 foreign key constraint to the form_values table if User_table is not None and FormValues_table is not None: try: cons = ForeignKeyConstraint( [User_table.c.form_values_id], [FormValues_table.c.id], name='user_form_values_id_fk' ) # Create the constraint cons.create() except Exception, e: log.debug( "Adding foreign key constraint 'user_form_values_id_fk' to table 'galaxy_user' failed: %s" % ( str( e ) ) )
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() # Initialize. if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': default_false = "0" elif migrate_engine.name in ['postgresql', 'postgres']: default_false = "false" try: RepositoryMetadata_table = Table("repository_metadata", metadata, autoload=True) except NoSuchTableError: RepositoryMetadata_table = None log.debug("Failed loading table repository_metadata.") if RepositoryMetadata_table is not None: # Create the test_install_error column. c = Column("test_install_error", Boolean, default=False, index=True) try: c.create(RepositoryMetadata_table, index_name="ix_repository_metadata_ttie") assert c is RepositoryMetadata_table.c.test_install_error migrate_engine.execute("UPDATE repository_metadata SET test_install_error=%s" % default_false) except Exception: log.exception("Adding test_install_error column to the repository_metadata table failed.") # Create skip_tool_test table. try: SkipToolTest_table.create() except Exception: log.exception("Creating the skip_tool_test table failed.")
def upgrade(migrate_engine): print __doc__ metadata.bind = migrate_engine metadata.reflect() # Initialize. if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': default_false = "0" elif migrate_engine.name in ['postgresql', 'postgres']: default_false = "false" try: RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True ) except NoSuchTableError: RepositoryMetadata_table = None log.debug( "Failed loading table repository_metadata." ) if RepositoryMetadata_table is not None: # Drop the tool_test_errors column from the repository_metadata table as it is poorly named. It will be replaced with the new # tool_test_results column. try: col = RepositoryMetadata_table.c.tool_test_errors col.drop() except Exception, e: log.debug( "Dropping column 'tool_test_errors' from repository_metadata table failed: %s" % ( str( e ) ) ) # Create the tool_test_results column to replace the ill-named tool_test_errors column just dropped above. c = Column( "tool_test_results", JSONType, nullable=True ) try: c.create( RepositoryMetadata_table ) assert c is RepositoryMetadata_table.c.tool_test_results except Exception, e: print "Adding tool_test_results column to the repository_metadata table failed: %s" % str( e )
def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) metadata.reflect() try: Request_table = Table("request", metadata, autoload=True) except NoSuchTableError: Request_table = None log.debug("Failed loading table 'request'") if Request_table is not None: # create the column again as JSONType try: col = Column("notification", JSONType()) col.create(Request_table) assert col is Request_table.c.notification except Exception: log.exception("Creating column 'notification' in the 'request' table failed.") cmd = "SELECT id, user_id, notify FROM request" result = migrate_engine.execute(cmd) for r in result: id = int(r[0]) notify_new = dict(email=[], sample_states=[], body='', subject='') cmd = "UPDATE request SET notification='%s' WHERE id=%i" % (dumps(notify_new), id) migrate_engine.execute(cmd) # remove the 'notify' column for non-sqlite databases. if migrate_engine.name != 'sqlite': try: Request_table.c.notify.drop() except Exception: log.exception("Deleting column 'notify' from the 'request' table failed.")
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) table = Table('user', meta, autoload=True) col = Column('optional_attributes', MutationDict.as_mutable(JSONEncodedDict)) col.create(table)
class Post(Base): __tablename__ = 'post' id = Column(Integer, primary_key=True, autoincrement=True) tags = Column(ARRAY(String(10)))
class Artist(BaseModel): __tablename__ = 'artist' name = Column(String(256)) description = Column(String(256)) albums = relationship('Album', backref='artist') genre_id = Column(Integer, ForeignKey('genre.id'))
class DummyModel2(Entity): list_attr = Column(JSONList()) dict_attr = Column(JSONDict()) uuid = Column(UUID())
from sqlalchemy import Integer, Column, Table, ForeignKey, Boolean, DateTime, Text, String, Enum from sqlalchemy.orm import relationship, backref app = AppFactory(DevelopmentConfig).get_app(__name__) root = DevelopmentConfig.ROOT_PATH class Super(super): def __getattr__(self, attr): return self.__self__.__getattr__(attr) pages_macros = Table('pages_macros', BaseMixin.metadata, Column('page_id', Integer, ForeignKey('pages.id')), Column('macro_id', Integer, ForeignKey('macros.id')), extend_existing=True) pages_template_blocks = Table('pages_template_blocks', BaseMixin.metadata, Column('page_id', Integer, ForeignKey('pages.id')), Column('template_block_id', Integer, ForeignKey('template_blocks.id')), extend_existing=True) class Template(BaseMixin): _filename = None
class TrackerModel(Base): __tablename__ = 'tracker' id = Column(Integer, primary_key=True) track = Column(Boolean, default=True, server_default=expression.true())
class Patient(Base): """ SQLAlchemy ORM class for fictional patients. """ __tablename__ = 'patient' __table_args__ = TABLE_KWARGS patient_id = Column(Integer, primary_key=True, autoincrement=False) forename = Column(String(50)) surname = Column(String(50)) dob = Column(Date) nullfield = Column(Integer) nhsnum = Column(BigInteger) phone = Column(String(50)) postcode = Column(String(50)) optout = Column(Boolean, default=False) related_patient_id = Column(Integer) colour = Column(Enum(EnumColours), nullable=True) # new in v0.18.41
class Page(BaseMixin): _use_base_template = None name = Column(String(255)) description = Column(Text) template_id = Column(Integer, ForeignKey('templates.id')) template = relationship(Template) slug = Column(String(255)) title = Column(String(255)) add_to_nav = Column(Boolean, default=False) add_left_sidebar = Column(Boolean, default=False) add_right_sidebar = Column(Boolean, default=False) date_added = Column(DateTime, default=datetime.datetime) visible = Column(Boolean, default=False) meta_title = Column(String(255)) added_by = relationship('flask_cms.auth.models.User') user_id = Column(Integer, ForeignKey('users.id')) short_url = Column(String(255)) macros = relationship(Macro, secondary='pages_macros', lazy='dynamic') _blocks = relationship(TemplateBlock, secondary='pages_template_blocks', lazy='dynamic') content = Column(Text) def __repr__(self): if self.name is None: rtn = '<Page: Unnamed | {}'.format(self.line_count) else: rtn = '<Page: {} | {} lines'.format(self.name, self.line_count) return rtn @property def use_base_template(self): return bool( self.template ) if self._use_base_template is None else self._use_base_template @use_base_template.setter def use_base_template(self, val): self._use_base_template = val def _get_page_url(self): return url_for('page.pages', slug=self.slug) @staticmethod def _get_create_url(): return url_for('admin.add_page') @classmethod def get_by_slug(cls, slug): return cls.query.filter(cls.slug == slug).first() def _get_absolute_url(self): return url_for('admin.page_view', slug=self.slug) def _get_edit_url(self): return url_for('admin.edit_page', item_id=int(self.id)) def _get_edit_content_url(self): return url_for('admin.edit_page_content', item_id=int(self.id)) @property def line_count(self): try: rtn = len(self.content.split('\n')) except: rtn = 0 return rtn @property def template_name(self): return self.template.name or '' @property def block_count(self): return self._template.blocks.count() @property def blocks(self): return self._template.blocks.keys() def get_block(self, block): return self._template.blocks[block] return str(self._blocks.get_by_name(block)) @property def _template(self): class template(object): def __init__(self): self.blocks = { 'blocka': '', 'blockb': '', 'blockc': 'x', 'blockd': '' } return template()
class Template(BaseMixin): _filename = None name = Column(String(255), nullable=False, unique=True) description = Column(Text) body = Column(Text) base_template = Column(String(255)) def __init__(self, *args, **kwargs): self._raw_template = '' self._head = '' base = kwargs.pop('base_template', None) super(Template, self).__init__(*args, **kwargs) if base is None: self.is_base_template = True else: self.is_base_template = False self._add_to_head(self._create_extend(base)) if self.body: self._set_template() def _add_to_head(self, itm): self._head = self._head + '\n' + itm def _create_extend(self, parent): return '{% extends "%s" %}' % parent def _set_template(self): from jinja2 import Template self._raw_template = Template(self._head + self.body[:]) @property def filename(self): return self._filename if self._filename is not None else "{}.html".format( self.name) @property def block_count(self): return len(Template(self._head + self.body[:]).blocks) @property def blocks(self): return self._raw_template.blocks.keys() def set_body(self, data): self.body = data self._set_template() @property def body_body(self): return self.body @property def content(self): return self.body[:] if self.body is not None else "" def _get_edit_url(self): return url_for('admin.edit_template', item_id=int(self.id)) def _get_absolute_url(self): return url_for('admin.template_view', name=self.name) def __repr_(self): if self.name is None: rtn = '<Template: Unnamed | {} lines'.format(self.line_count) else: rtn = '<Template: {} | {} lines'.format(self.name, self.line_count) return rtn def __str__(self): return self.body or '' @staticmethod def _get_create_url(): return url_for('admin.add_template') @property def line_count(self): try: rtn = len(self.body.split('\n')) except: rtn = 0 return rtn def get_block_count(self): return len(self._raw_template.blocks.keys()) @staticmethod def get_base_templates(): return [] @property def filename(self): return "{}.html".format(self.name if self.name else self.id)
class Macro(BaseMixin): MACRO_FILE = os.path.join(os.path.abspath(root), 'macros.html') name = Column(String(255), nullable=False) content = Column(Text) _args = Column(Text) def __init__(self, name='', content='', arguments=''): self.name = name self.content = content self.args = arguments @hybrid_property def args(self): return pickle.loads(self._args) @args.setter def args(self, args): if args: self._args = pickle.dumps(args) @property def head(self): start = '{% macro ' mid = '%s' % self.name end = '(' if self._args: arg_amount = len(self.args) for k, v in self.args: end += '%s=%s' % (k, v) if k != self.args[-1][0]: end += ',' end += ') %}' return start + mid + end @property def foot(self): return '{% endmacro %}' def __str__(self): return self.head +'\n'+\ self.content +'\n'+\ self.foot + '\n' @property def import_statement(self): return self._generate_import() def _generate_import(self): start = '{% from "' end = ' %}' mid = '%s" import %s' % (self.MACRO_FILE, self.name) return start + mid + end @staticmethod def _generate_macro_file(names=None): res = '' if names is None: macros = Macro.query.all() else: macros = [ Macro.query.filter(Macro.name == x).all()[0] for x in names ] for macro in macros: res += str(macro) res += '\n' try: with open(Macro.MACRO_FILE, 'w') as fp: fp.write(res) except IOError, e: return False return True
class Block(BaseMixin): name = Column(String(255)) content = Column(Text) def __init__(self, name='', content=''): self.name = name self.content = content @property def head(self): start = '{% block ' mid = self.name end = ' %}' return start + mid + end @property def foot(self): start = '{% endblock ' mid = self.name end = ' %}' return start + mid + end def set_content(self, data): self.content = data def __call__(self): return self.render() def __str__(self): return self.head + '\n' + self.content + '\n' + self.foot def render(self): s = self.content.split('\n') return self.head + '<br />'.join(map(str, s)) + self.foot def __repr__(self): if self.name is None: rtn = '<Block: Unnamed | {} lines'.format(self.line_count) else: rtn = '<block: {} | {} lines'.format(self.name, self.line_count) return rtn @staticmethod def _get_create_url(): return url_for('admin.add_block') def _get_edit_content_url(self): return url_for('admin.edit_block_content', item_id=int(self.id)) def _get_edit_url(self): return url_for('admin.edit_block', item_id=int(self.id)) def _get_absolute_url(self): return url_for('admin.block_view', name=self.name) @property def line_count(self): try: rtn = len(self.content.split('\n')) except: rtn = 0 return rtn
class Pet(Base): __tablename__ = 'pet' id = Column(Integer, primary_key=True) _kind = Column(String(50), nullable=False) _place = Column(String(100)) _history = Column(String(1000)) _owner_id = Column(Integer, ForeignKey('owner.id'), nullable=False) _owner = relationship('Owner', backref=backref('pets', lazy=True)) _cuteness_level = Column(Integer) _hungry_level = Column(Integer) _color = Column(String(100)) _gender = Column(String(40)) _breed = Column(String(100)) _weight = Column(Integer) _height = Column(Integer) _name = Column(String(100)) def __init__(self, kind="unknown", place="unknown", cuteness_level=10, hungry_level=10, # owner="unknown", color="unknown", gender="unknown", breed="unknown", weight=1, height=1, name="unknown"): """Constructor for Pet class for default arguments :_kind: TODO :_gender: TODO :_cuteness_level: TODO :_hungry_level: TODO :_owner: TODO :_color: TODO :_gender: TODO :_breed: TODO :_weight: TODO :_height: TODO :_name: TODO """ self._kind = kind self._cuteness_level = cuteness_level self._hungry_level = hungry_level # self._owner = owner self._color = color self._gender = gender self._breed = breed self._weight = weight self._height = height self._name = name self.update_place(place) def update_place(self, place): self._place = place try: places = defaultdict(list, loads(self._history)) except TypeError: places = defaultdict(list) places[place].append(datetime.utcnow().timestamp()) self._history = dumps(places) def __repr__(self): return "Pet({0._kind!r}, {0._place!r}, {0._cuteness_level!r}, {0._hungry_level!r}, {0._color!r}, {0._gender!r}, {0._breed!r}, {0._weight!r}, {0._height!r}, {0._name!r})".format( self)
class Classification(db.Model): __tablename__ = 'Classification' id = Column(Integer, primary_key=True) UNCLASSIFIED = Column(String)
class Domains(db.Model): __tablename__ = 'domains' id = Column(Integer, primary_key=True) domain = Column(String) last_run = Column(DateTime) scan_spf = Column(Boolean) scan_dmarc = Column(Boolean) scan_dmarc_psl = Column(Boolean) scan_mx = Column(Boolean) scan_dkim = Column(Boolean) scan_https = Column(Boolean) scan_ssl = Column(Boolean) dmarc_phase = Column(Integer) organization_id = Column(Integer, ForeignKey('organizations.id')) organization = relationship("Organizations", back_populates="domains", cascade="all, delete") scans = relationship("Scans", back_populates="domain", cascade="all, delete")
class IncidentType(Base): id = Column(Integer, primary_key=True) name = Column(String, unique=True) slug = Column(String) description = Column(String) exclude_from_metrics = Column(Boolean, default=False) default = Column(Boolean, default=False) visibility = Column(String, default=Visibility.open.value) plugin_metadata = Column(JSON, default=[]) template_document_id = Column(Integer, ForeignKey("document.id")) template_document = relationship("Document") commander_service_id = Column(Integer, ForeignKey("service.id")) commander_service = relationship("Service", foreign_keys=[commander_service_id]) liaison_service_id = Column(Integer, ForeignKey("service.id")) liaison_service = relationship("Service", foreign_keys=[liaison_service_id]) search_vector = Column(TSVectorType("name", "description")) @hybrid_method def get_meta(self, slug): if not self.plugin_metadata: return for m in self.plugin_metadata: if m["slug"] == slug: return m
class Ssl_scans(db.Model): __tablename__ = 'ssl_scans' id = Column(Integer, ForeignKey('scans.id'), primary_key=True) ssl_scan = Column(JSONB) ssl_flagged_scan = relationship("Scans", back_populates="ssl", cascade="all, delete")
class Store(Base): __tablename__ = 'store' id = Column(Integer, primary_key=True) name = Column(String(150), nullable=False) user_id = Column(Integer, ForeignKey('user.id'))
class Flat(BASE): """ SQLAlchemy ORM model to store a flat. """ __tablename__ = "flats" __searchable__ = ["title", "text", "station", "location", "details"] # Weboob data id = Column(String, primary_key=True) area = Column(Float) bedrooms = Column(Float) cost = Column(Float) currency = Column(String) utilities = Column(Enum(FlatUtilities), default=FlatUtilities.unknown) date = Column(DateTime) details = Column(MagicJSON) location = Column(String) phone = Column(String) photos = Column(MagicJSON) rooms = Column(Float) station = Column(String) text = Column(Text) title = Column(String) urls = Column(MagicJSON) merged_ids = Column(MagicJSON) notes = Column(Text) notation = Column(SmallInteger, default=0) # Flatisfy data # TODO: Should be in another table with relationships flatisfy_stations = Column(MagicJSON) flatisfy_postal_code = Column(String) flatisfy_time_to = Column(MagicJSON) flatisfy_constraint = Column(String) # Status status = Column(Enum(FlatStatus), default=FlatStatus.new) @staticmethod def from_dict(flat_dict): """ Create a Flat object from a flat dict as manipulated by the filtering pass. """ # Handle flatisfy metadata flat_dict = flat_dict.copy() if "flatisfy" in flat_dict: flat_dict["flatisfy_stations"] = (flat_dict["flatisfy"].get( "matched_stations", [])) flat_dict["flatisfy_postal_code"] = (flat_dict["flatisfy"].get( "postal_code", None)) flat_dict["flatisfy_time_to"] = (flat_dict["flatisfy"].get( "time_to", {})) flat_dict["flatisfy_constraint"] = (flat_dict["flatisfy"].get( "constraint", "default")) del flat_dict["flatisfy"] # Handle utilities field if not isinstance(flat_dict["utilities"], FlatUtilities): if flat_dict["utilities"] == "C.C.": flat_dict["utilities"] = FlatUtilities.included elif flat_dict["utilities"] == "H.C.": flat_dict["utilities"] = FlatUtilities.excluded else: flat_dict["utilities"] = FlatUtilities.unknown # Handle status field flat_status = flat_dict.get("status", "new") if not isinstance(flat_status, FlatStatus): try: flat_dict["status"] = getattr(FlatStatus, flat_status) except AttributeError: if "status" in flat_dict: del flat_dict["status"] LOGGER.warn("Unkown flat status %s, ignoring it.", flat_status) # Handle date field flat_dict["date"] = arrow.get(flat_dict["date"]).naive flat_object = Flat() flat_object.__dict__.update(flat_dict) return flat_object def __repr__(self): return "<Flat(id=%s, urls=%s)>" % (self.id, self.urls) def json_api_repr(self): """ Return a dict representation of this flat object that is JSON serializable. """ flat_repr = { k: v for k, v in self.__dict__.items() if not k.startswith("_") } if isinstance(flat_repr["status"], FlatStatus): flat_repr["status"] = flat_repr["status"].name if isinstance(flat_repr["utilities"], FlatUtilities): flat_repr["utilities"] = flat_repr["utilities"].name return flat_repr
class Ciphers(db.Model): __tablename__ = 'ciphers' id = Column(Integer, primary_key=True) cipher_type = Column(String)
from datetime import datetime from sqlalchemy.orm import relationship, backref from sqlalchemy import Column, Integer, String, Table, ForeignKey, Boolean from modules.core.source import Base # what groups does user belongs to user_group_association = Table( 'schedule_user_group_association', Base.metadata, Column('user', Integer, ForeignKey('schedule_users.id')), Column('group', Integer, ForeignKey('schedule_groups.name'))) class User(Base): __tablename__ = "schedule_users" id = Column(Integer, primary_key=True) alias = Column(String) is_configured = Column(Boolean) # chosen groups are saved groups = relationship("Group", secondary=user_group_association) def __init__(self, id_, alias_): self.id = id_ self.alias = alias_ def __repr__(self): return f"User({self.id}, {self.alias})" class Group(Base):
class Permission(Base): """A class to hold permissions. Permissions in Stalker defines what one can do or do not. A Permission instance is composed by three attributes; access, action and class_name. Permissions for all the classes in SOM are generally created by Stalker when initializing the database. If you created any custom classes to extend SOM you are also responsible to create the Permissions for it by calling :meth:`stalker.db.register` and passing your class to it. See the :mod:`stalker.db` documentation for details. :param str access: An Enum value which can have the one of the values of ``Allow`` or ``Deny``. :param str action: An Enum value from the list ['Create', 'Read', 'Update', 'Delete', 'List']. Can not be None. The list can be changed from stalker.config.Config.default_actions. :param str class_name: The name of the class that this action is applied to. Can not be None or an empty string. Example: Let say that you want to create a Permission specifying a Group of Users are allowed to create Projects:: from stalker import db from stalker import db from stalker.models.auth import User, Group, Permission # first setup the db with the default database # # stalker.db.init() will create all the Actions possible with the # SOM classes automatically # # What is left to you is to create the permissions db.setup() user1 = User( name='Test User', login='******', password='******', email='*****@*****.**' ) user2 = User( name='Test User', login='******', password='******', email='*****@*****.**' ) group1 = Group(name='users') group1.users = [user1, user2] # get the permissions for the Project class project_permissions = Permission.query\ .filter(Permission.access='Allow')\ .filter(Permission.action='Create')\ .filter(Permission.class_name='Project')\ .first() # now we have the permission specifying the allowance of creating a # Project # to make group1 users able to create a Project we simply add this # Permission to the groups permission attribute group1.permissions.append(permission) # and persist this information in the database DBSession.add(group) DBSession.commit() """ __tablename__ = 'Permissions' __table_args__ = ( UniqueConstraint('access', 'action', 'class_name'), {"extend_existing": True} ) id = Column(Integer, primary_key=True) _access = Column('access', Enum('Allow', 'Deny', name='AccessNames')) _action = Column('action', Enum(*defaults.actions, name='AuthenticationActions')) _class_name = Column('class_name', String(32)) def __init__(self, access, action, class_name): self._access = self._validate_access(access) self._action = self._validate_action(action) self._class_name = self._validate_class_name(class_name) def _validate_access(self, access): """validates the given access value """ from stalker import __string_types__ if not isinstance(access, __string_types__): raise TypeError( '%s.access should be an instance of str not %s' % ( self.__class__.__name__, access.__class__.__name__ ) ) if access not in ['Allow', 'Deny']: raise ValueError( '%s.access should be "Allow" or "Deny" not %s' % (self.__class__.__name__, access) ) return access def _access_getter(self): """returns the _access value """ return self._access access = synonym('_access', descriptor=property(_access_getter)) def _validate_class_name(self, class_name): """validates the given class_name value """ from stalker import __string_types__ if not isinstance(class_name, __string_types__): raise TypeError( '%s.class_name should be an instance of str not %s' % (self.__class__.__name__, class_name.__class__.__name__) ) return class_name def _class_name_getter(self): """returns the _class_name attribute value """ return self._class_name class_name = synonym( '_class_name', descriptor=property(_class_name_getter) ) def _validate_action(self, action): """validates the given action value """ from stalker import __string_types__ if not isinstance(action, __string_types__): raise TypeError( '%s.action should be an instance of str not %s' % (self.__class__.__name__, action.__class__.__name__) ) if action not in defaults.actions: raise ValueError( "%s.action should be one of the values of %s not '%s'" % (self.__class__.__name__, defaults.actions, action) ) return action def _action_getter(self): """returns the _action value """ return self._action action = synonym('_action', descriptor=property(_action_getter)) def __eq__(self, other): """the equality of two Permissions """ return isinstance(other, Permission) \ and other.access == self.access \ and other.action == self.action \ and other.class_name == self.class_name
from dataclasses import dataclass from sqlalchemy import Table, Column, Integer, String, ForeignKey, DateTime from sqlalchemy.orm import relationship from typing import List from . import Base association_table = Table('works_cvs', Base.metadata, Column('work_uid', String(10), ForeignKey('works.uid')), Column('cv_id', Integer, ForeignKey('cvs.id')) ) @dataclass class Record(Base): __tablename__ = "records" id = Column(Integer, primary_key=True) uid = Column(String(10), ForeignKey('works.uid')) work = relationship("Work", back_populates="records") timestamp:DateTime = Column(DateTime) dl_count:int = Column(Integer) wishlist_count:int = Column(Integer) @dataclass class CV(Base): __tablename__ = 'cvs' id = Column(Integer, primary_key=True) name:str = Column(String(50))
class Lesson(Base): __tablename__ = "schedule_lessons" id = Column(Integer, primary_key=True) group = Column(String, ForeignKey('schedule_groups.name')) subject = Column(String) teacher = Column(String) day = Column(Integer) start = Column(String) end = Column(String) room = Column(Integer) def __init__(self, group, subject, teacher, day, start, end, room): self.group = group self.subject = subject self.teacher = teacher self.day = day self.start = start self.end = end self.room = room def __repr__(self): return f"Lesson({self.subject}, {self.start})" @property def start_struct(self): """ Converts start time from string to time object :return: datetime """ return datetime.now().replace(hour=int(self.start[:2]), minute=int(self.start[3:])) @property def end_struct(self): """ Converts end time from string to time object :return: datetime """ return datetime.now().replace(hour=int(self.end[:2]), minute=int(self.end[3:])) @property def minutes_until_start(self): """ Total number of minutes until lesson begins :return: int """ seconds_left = (self.start_struct - datetime.now()).total_seconds() return round(seconds_left / 60) @property def minutes_until_end(self): """ Total number of minutes until lesson ends :return: int """ seconds_left = (self.end_struct - datetime.now()).total_seconds() return round(seconds_left / 60) def __lt__(self, other): """ Compares this lesson with given. Used in lesson sort :param other: Lesson :return: boolean """ return self.start_struct < other.start_struct def __str__(self): """ Converts current lesson to string for easy output :return: String """ return f"{self.subject}\n"\ f"👨🏫 {self.teacher}\n"\ f"🕐 {self.start} — {self.end}\n" \ f"🚪 {self.room if self.room != -1 else '?'}\n" def get_str_current(self): """ Returns string, which indicates how many time left until current lesson will be finished. Used when NOW button is pressed and current lesson is going :return: String """ hours_until_end = self.minutes_until_end // 60 return f"{self}⏸️ {str(hours_until_end)+'h ' if hours_until_end > 0 else ''}" \ f"{self.minutes_until_end % 60}m\n" def get_str_future(self): """ Returns string, which indicates how many time left until current lesson will be started. Used when NOW button is pressed and current lesson will start next :return: String """ hours_until_start = self.minutes_until_start // 60 return f"{self}▶ ️{str(hours_until_start)+'h ' if hours_until_start > 0 else ''}" \ f"{self.minutes_until_start % 60}m\n"
class Group(Entity, ACLMixin): """Creates groups for users to be used in authorization system. A Group instance is nothing more than a list of :class:`.User`\ s created to be able to assign permissions in a group level. The Group class, as with the :class:`.User` class, is mixed with the :class:`.ACLMixin` which adds ability to hold :class:`.Permission` instances and serve ACLs to Pyramid. :param str name: The name of this group. :param list users: A list of :class:`.User` instances, holding the desired users in this group. """ __auto_name__ = False __tablename__ = 'Groups' __mapper_args__ = {'polymorphic_identity': 'Group'} gid = Column("id", Integer, ForeignKey("Entities.id"), primary_key=True) users = relationship( "User", secondary="Group_Users", back_populates="groups", doc="""Users in this group. Accepts:class:`.User` instance. """ ) def __init__(self, name='', users=None, permissions=None, **kwargs): if users is None: users = [] if permissions is None: permissions = [] kwargs.update({'name': name}) super(Group, self).__init__(**kwargs) self.users = users self.permissions = permissions @validates('users') def _validate_users(self, key, user): """validates the given user instance """ if not isinstance(user, User): raise TypeError( '%s.users attribute must all be stalker.models.auth.User ' 'instances not %s' % (self.__class__.__name__, user.__class__.__name__) ) return user def __hash__(self): """the overridden __hash__ method """ return super(Group, self).__hash__()
""" from stalker.models.client import ClientUser return ClientUser(client=client) def create_project_user(project): """helper function to create ProjectUser instance on association proxy """ from stalker.models.project import ProjectUser return ProjectUser(project=project) # Group_Users Group_Users = Table( "Group_Users", Base.metadata, Column("uid", Integer, ForeignKey("Users.id"), primary_key=True), Column("gid", Integer, ForeignKey("Groups.id"), primary_key=True) ) class AuthenticationLog(SimpleEntity): """Keeps track of login/logout dates and the action (login or logout). """ __auto_name__ = True __tablename__ = "AuthenticationLogs" __mapper_args__ = {"polymorphic_identity": "AuthenticationLog"} log_id = Column( 'id', Integer,
class AuthenticationLog(SimpleEntity): """Keeps track of login/logout dates and the action (login or logout). """ __auto_name__ = True __tablename__ = "AuthenticationLogs" __mapper_args__ = {"polymorphic_identity": "AuthenticationLog"} log_id = Column( 'id', Integer, ForeignKey('SimpleEntities.id'), primary_key=True ) user_id = Column( 'uid', Integer, ForeignKey('Users.id'), nullable=False ) user = relationship( 'User', primaryjoin='AuthenticationLogs.c.uid==Users.c.id', uselist=False, back_populates='authentication_logs', doc="The :class:`.User` instance that this AuthenticationLog is " "created for" ) action = Column( 'action', Enum(LOGIN, LOGOUT, name='ActionNames'), nullable=False ) date = Column( DateTime(timezone=True), nullable=False ) def __init__(self, user=None, date=None, action=LOGIN, **kwargs): super(AuthenticationLog, self).__init__(**kwargs) self.user = user self.date = date self.action = action @validates('user') def __validate_user__(self, key, user): """validates the given user argument value """ if not isinstance(user, User): raise TypeError( '%s.user should be a User instance, not %s' % ( self.__class__.__name__, user.__class__.__name__ ) ) return user @validates('action') def __validate_action__(self, key, action): """validates the given action argument value """ if action is None: import copy action = copy.copy(LOGIN) if action not in [LOGIN, LOGOUT]: raise ValueError( '%s.action should be one of "login" or "logout", not "%s"' % ( self.__class__.__name__, action ) ) return action @validates('date') def __validate_date__(self, key, date): """validates the given date value """ if date is None: date = datetime.datetime.now(pytz.utc) if not isinstance(date, datetime.datetime): raise TypeError( '%s.date should be a "datetime.datetime" instance, not %s' % ( self.__class__.__name__, date.__class__.__name__ ) ) return date
class User(Entity, ACLMixin): """The user class is designed to hold data about a User in the system. .. note:: .. versionadded 0.2.0: Task Watchers New to version 0.2.0 users can be assigned to a :class:`.Task` as a **Watcher**. Which can be used to inform the users in watchers list about the updates of certain Tasks. .. note:: .. versionadded 0.2.0: Vacations It is now possible to define Vacations per user. .. note:: .. versionadded 0.2.7: Resource Efficiency .. note:: .. versionadded 0.2.11: Users not have a :attr:`.rate` attribute. :param rate: For future usage a rate attribute is added to the User to record the daily cost of this user as a resource. It should be either 0 or a positive integer or float value. Default is 0. :param efficiency: The efficiency is a multiplier for a user as a resource to a task and defines how much of the time spent for that particular task is counted as an actual effort. The default value is 1.0, lowest possible value is 0.0 and there is no upper limit. The efficiency of a resource can be used for three purposes. First you can use it as a crude way to model a team. A team of 5 people should have an efficiency of 5.0. Keep in mind that you cannot track the members of the team individually if you use this feature. They always act as a group. Another use is to model performance variations between your resources. Again, this is a fairly crude mechanism and should be used with care. A resource that isn't every good at some task might be pretty good at another. This can't be taken into account as the resource efficiency can only set globally for all tasks. One another and interesting use is to model the availability of passive resources like a meeting room or a workstation or something that needs to be free for a task to take place but does not contribute to a task as an active resource. All resources that do not contribute effort to the task, that is a passive resource, should have an efficiency of 0.0. Again a typical example would be a conference room. It's necessary for a meeting, but it does not contribute any work. :param email: holds the e-mail of the user, should be in [part1]@[part2] format :type email: str :param login: This is the login name of the user, it should be all lower case. Giving a string that has uppercase letters, it will be converted to lower case. It can not be an empty string or None and it can not contain any white space inside. :type login: str :param departments: It is the departments that the user is a part of. It should be a list of Department objects. One user can be listed in multiple departments. :type departments: list of :class:`.Department`\ s :param password: it is the password of the user, can contain any character. Stalker doesn't store the raw passwords of the users. To check a stored password with a raw password use :meth:`.check_password` and to set the password you can use the :attr:`.password` property directly. :type password: str :param groups: It is a list of :class:`.Group` instances that this user belongs to. :type groups: list of :class:`.Group` :param tasks: it is a list of Task objects which holds the tasks that this user has been assigned to :type tasks: list of :class:`.Task`\ s :param last_login: it is a datetime.datetime object holds the last login date of the user (not implemented yet) :type last_login: datetime.datetime """ __auto_name__ = False __tablename__ = "Users" __mapper_args__ = {"polymorphic_identity": "User"} user_id = Column( "id", Integer, ForeignKey("Entities.id"), primary_key=True ) departments = association_proxy( 'department_role', 'department', creator=lambda d: create_department_user(d) ) department_role = relationship( 'DepartmentUser', back_populates='user', cascade='all, delete-orphan', primaryjoin='Users.c.id==Department_Users.c.uid', doc="""A list of :class:`.Department`\ s that this user is a part of""" ) companies = association_proxy( 'company_role', 'client', creator=lambda n: create_client_user(n) ) company_role = relationship( "ClientUser", back_populates="user", cascade='all, delete-orphan', primaryjoin="Users.c.id==Client_Users.c.uid", doc="""A list of :class:`.Client`\ s that this user is a part of.""" ) email = Column( String(256), unique=True, nullable=False, doc="email of the user, accepts string" ) password = Column( String(256), nullable=False, doc="""The password of the user. It is scrambled before it is stored. """ ) login = Column( String(256), nullable=False, unique=True, doc="""The login name of the user. Can not be empty. """ ) authentication_logs = relationship( "AuthenticationLog", primaryjoin="AuthenticationLogs.c.uid==Users.c.id", back_populates="user", cascade='all, delete-orphan', doc="""A list of :class:`.AuthenticationLog` instances which holds the login/logout info for this :class:`.User`. """ ) groups = relationship( 'Group', secondary='Group_Users', back_populates='users', doc="""Permission groups that this users is a member of. Accepts :class:`.Group` object. """ ) projects = association_proxy( 'project_role', 'project', creator=lambda p: create_project_user(p) ) project_role = relationship( 'ProjectUser', back_populates='user', cascade='all, delete-orphan', primaryjoin='Users.c.id==Project_Users.c.user_id' ) tasks = relationship( "Task", secondary="Task_Resources", back_populates="resources", doc=""":class:`.Task`\ s assigned to this user. It is a list of :class:`.Task` instances. """ ) watching = relationship( 'Task', secondary='Task_Watchers', back_populates='watchers', doc=''':class:`.Tasks`\ s that this user is assigned as a watcher. It is a list of :class:`.Task` instances. ''' ) responsible_of = relationship( 'Task', secondary='Task_Responsible', primaryjoin='Users.c.id==Task_Responsible.c.responsible_id', secondaryjoin='Task_Responsible.c.task_id==Tasks.c.id', back_populates='_responsible', uselist=True, doc="""A list of :class:`.Task` instances that this user is responsible of.""" ) time_logs = relationship( "TimeLog", primaryjoin="TimeLogs.c.resource_id==Users.c.id", back_populates="resource", cascade='all, delete-orphan', doc="""A list of :class:`.TimeLog` instances which holds the time logs created for this :class:`.User`. """ ) vacations = relationship( 'Vacation', primaryjoin='Vacations.c.user_id==Users.c.id', back_populates='user', cascade='all, delete-orphan', doc="""A list of :class:`.Vacation` instances which holds the vacations created for this :class:`.User` """ ) efficiency = Column(Float, default=1.0) rate = Column(Float, default=0.0) def __init__( self, name=None, login=None, email=None, password=None, departments=None, companies=None, groups=None, efficiency=1.0, rate=0.0, **kwargs): kwargs['name'] = name super(User, self).__init__(**kwargs) self.login = login if departments is None: departments = [] # from stalker import DepartmentUser # for department in departments: # self.department_role.append( # DepartmentUser(user=self, department=department) # ) self.departments = departments if companies is None: companies = [] self.companies = companies self.email = email # to be able to mangle the password do it like this self.password = password if groups is None: groups = [] self.groups = groups self.tasks = [] self.last_login = None self.efficiency = efficiency self.rate = rate def __repr__(self): """return the representation of the current User """ return "<%s ('%s') (User)>" % (self.name, self.login) def __eq__(self, other): """the equality operator """ return super(User, self).__eq__(other) and \ isinstance(other, User) and \ self.email == other.email and \ self.login == other.login and \ self.name == other.name def __hash__(self): """the overridden __hash__ method """ return super(User, self).__hash__() @validates("login") def _validate_login(self, key, login): """validates the given login value """ if login is None: raise TypeError( '%s.login can not be None' % self.__class__.__name__ ) login = self._format_login(login) # raise a ValueError if the login is an empty string after formatting if login == '': raise ValueError( '%s.login can not be an empty string' % self.__class__.__name__ ) logger.debug("name out: %s" % login) return login @validates("email") def _validate_email(self, key, email_in): """validates the given email value """ # check if email_in is an instance of string from stalker import __string_types__ if not isinstance(email_in, __string_types__): raise TypeError( "%s.email should be an instance of str not %s" % (self.__class__.__name__, email_in.__class__.__name__) ) return self._validate_email_format(email_in) def _validate_email_format(self, email_in): """formats the email """ # split the mail from @ sign splits = email_in.split("@") len_splits = len(splits) # there should be one and only one @ sign if len_splits > 2: raise ValueError( "check the formatting of %s.email, there are more than one @ " "sign" % self.__class__.__name__ ) if len_splits < 2: raise ValueError( "check the formatting of %s.email, there is no @ sign" % self.__class__.__name__ ) if splits[0] == "": raise ValueError( "check the formatting of %s.email, the name part is missing" % self.__class__.__name__ ) if splits[1] == "": raise ValueError( "check the formatting %s.email, the domain part is missing" % self.__class__.__name__ ) return email_in @validates("last_login") def _validate_last_login(self, key, last_login_in): """validates the given last_login argument """ if not isinstance(last_login_in, datetime.datetime) \ and last_login_in is not None: raise TypeError( "%s.last_login should be an instance of datetime.datetime or " "None not %s" % (self.__class__.__name__, last_login_in.__class__.__name__) ) return last_login_in @classmethod def _format_login(cls, login): """formats the given login value """ # strip white spaces from start and end login = login.strip() # remove all the spaces login = login.replace(" ", "") # make it lowercase login = login.lower() # remove any illegal characters login = re.sub("[^\\(a-zA-Z0-9)]+", "", login) # remove any number at the beginning login = re.sub("^[0-9]+", "", login) return login @validates("password") def _validate_password(self, key, password_in): """validates the given password Note: This function was updated to support both Python 2.7 and 3.5. It will now explicitly convert the base64 bytes object into a string object. """ if password_in is None: raise TypeError( "%s.password cannot be None" % self.__class__.__name__ ) if password_in == "": raise ValueError( "%s.password can not be an empty string" % self.__class__.__name__ ) # mangle the password mangled_password_bytes = base64.b64encode(password_in.encode('utf-8')) if sys.version_info.major == 2: mangled_password_str = str(mangled_password_bytes) else: # Assuming Python >= 3.5 mangled_password_str = \ str(mangled_password_bytes.decode('utf-8')) return mangled_password_str def check_password(self, raw_password): """Checks the given raw_password. Checks the given raw_password with the current User object's mangled password. Handles the encryption process behind the scene. Note: This function was updated to support both Python 2.7 and 3.5. It will now compare the string (str) versions of the given raw_password and the current Users object encrypted password. """ mangled_password_str = str(self.password) raw_password_bytes = base64.b64encode( bytes(raw_password.encode('utf-8'))) if sys.version_info.major == 2: raw_password_encrypted_str = str(raw_password_bytes) else: # Assuming Python >= 3.5 raw_password_encrypted_str = \ str(raw_password_bytes.decode('utf-8')) return mangled_password_str == raw_password_encrypted_str @validates("groups") def _validate_groups(self, key, group): """check the given group """ if not isinstance(group, Group): raise TypeError( "Any group in %s.groups should be an instance of " "stalker.models.auth.Group not %s" % (self.__class__.__name__, group.__class__.__name__) ) return group @validates("tasks") def _validate_tasks(self, key, task): """validates the given tasks attribute """ from stalker.models.task import Task if not isinstance(task, Task): raise TypeError( "Any element in %s.tasks should be an instance of " "stalker.models.task.Task not %s" % (self.__class__.__name__, task.__class__.__name__) ) return task @validates("watching") def _validate_watching(self, key, task): """validates the given watching attribute """ from stalker.models.task import Task if not isinstance(task, Task): raise TypeError( "Any element in %s.watching should be an instance of " "stalker.models.task.Task not %s" % (self.__class__.__name__, task.__class__.__name__) ) return task @validates('vacations') def _validate_vacations(self, key, vacation): """validates the given vacation value """ from stalker.models.studio import Vacation if not isinstance(vacation, Vacation): raise TypeError( "All of the elements in %s.vacations should be a " "stalker.models.studio.Vacation instance, not %s" % (self.__class__.__name__, vacation.__class__.__name__) ) return vacation @validates('efficiency') def _validate_efficiency(self, key, efficiency): """validates the given efficiency value """ if efficiency is None: efficiency = 1.0 if not isinstance(efficiency, (int, float)): raise TypeError( '%(class)s.efficiency should be a float number greater or ' 'equal to 0.0, not %(efficiency_class)s' % { 'class': self.__class__.__name__, 'efficiency_class': efficiency.__class__.__name__ } ) if efficiency < 0: raise ValueError( '%(class)s.efficiency should be a float number greater or ' 'equal to 0.0, not %(efficiency)s' % { 'class': self.__class__.__name__, 'efficiency': efficiency } ) return efficiency @validates('rate') def _validate_rate(self, key, rate): """validates the given rate value """ if rate is None: rate = 0.0 if not isinstance(rate, (int, float)): raise TypeError( '%(class)s.rate should be a float number greater or ' 'equal to 0.0, not %(rate_class)s' % { 'class': self.__class__.__name__, 'rate_class': rate.__class__.__name__ } ) if rate < 0: raise ValueError( '%(class)s.rate should be a float number greater or ' 'equal to 0.0, not %(rate)s' % { 'class': self.__class__.__name__, 'rate': rate } ) return rate @property def tickets(self): """The list of :class:`.Ticket`\ s that this user has. returns a list of :class:`.Ticket` instances which this user is the owner of. """ # do it with sqlalchemy from stalker import Ticket return Ticket.query \ .filter(Ticket.owner == self) \ .all() @property def open_tickets(self): """The list of open :class:`.Ticket`\ s that this user has. returns a list of :class:`.Ticket` instances which has a status of `Open` that this user is assigned as the owner. """ from stalker import Ticket, Status return Ticket.query \ .join(Status, Ticket.status) \ .filter(Ticket.owner == self) \ .filter(Status.code != 'CLS') \ .all() @property def to_tjp(self): """outputs a TaskJuggler formatted string """ from jinja2 import Template temp = Template(defaults.tjp_user_template, trim_blocks=True) return temp.render({'user': self})
class Hoa_don(Base): __tablename__ = 'hoa_don' ma_hoa_don = Column(Integer, nullable = False, primary_key = True) ngay_tao_hoa_don = Column(DateTime, nullable = False) ma_khach_hang = Column(Integer, ForeignKey('khach_hang.ma_khach_hang')) tong_tien = Column(Float, nullable = False) ma_hoa_don_sendo = Column(String(50)) nha_van_chuyen = Column(String(255)) ma_van_don = Column(String(100)) trang_thai = Column(Integer) ghi_chu = Column(Text) da_in_hd = Column(Integer, default = 0) da_cap_nhat_kho = Column(Integer, default = 0) khach_hang = relationship(Khach_hang, backref = 'hoa_don') def __repr__(self): return "<Ma_hoa_don = %d>" % self.ma_hoa_don def get_id(self): return self.ma_hoa_don
class Genre(BaseModel): __tablename__ = 'genre' name = Column(String(256)) description = Column(String(256))
class Loai_nguoi_dung(Base): __tablename__ = 'loai_nguoi_dung' ma_loai_nguoi_dung = Column(Integer, nullable = False, primary_key = True) ten_loai_nguoi_dung = Column(String(100), nullable = False) def __str__(self): return self.ten_loai_nguoi_dung
class SiteDB(Base): """ Databse model for site table """ __tablename__ = 'sites' __table_args__ = {'extend_existing': True} id = Column(Integer, primary_key=True) sitename = Column(String, unique=True) site_type = Column(String) cache_type = Column(String) site_path = Column(String) # Use default=func.now() to set the default created time # of a site to be the current time when a # Site record was created created_on = Column(DateTime, default=func.now()) is_enabled = Column(Boolean, unique=False, default=True, nullable=False) is_ssl = Column(Boolean, unique=False, default=False) storage_fs = Column(String) storage_db = Column(String) db_name = Column(String) db_user = Column(String) db_password = Column(String) db_host = Column(String) is_hhvm = Column(Boolean, unique=False, default=False) is_pagespeed = Column(Boolean, unique=False, default=False) def __init__(self, sitename=None, site_type=None, cache_type=None, site_path=None, site_enabled=None, is_ssl=None, storage_fs=None, storage_db=None, db_name=None, db_user=None, db_password=None, db_host='localhost', hhvm=None, pagespeed=None): self.sitename = sitename self.site_type = site_type self.cache_type = cache_type self.site_path = site_path self.is_enabled = site_enabled self.is_ssl = is_ssl self.storage_fs = storage_fs self.storage_db = storage_db self.db_name = db_name self.db_user = db_user self.db_password = db_password self.db_host = db_host self.is_hhvm = hhvm self.is_pagespeed = pagespeed # def __repr__(self): # return '<Site %r>' % (self.site_type) # # def getType(self): # return '%r>' % (self.site_type)