class DbUser(Base): __tablename__ = "db_dbuser" id = Column(Integer, primary_key=True) email = Column(String(254), unique=True, index=True) password = Column(String(128)) # Clear text password ? # Not in django model definition, but comes from inheritance? is_superuser = Column(Boolean, default=False, nullable=False) first_name = Column(String(254), nullable=True) last_name = Column(String(254), nullable=True) institution = Column(String(254), nullable=True) is_staff = Column(Boolean, default=False) is_active = Column(Boolean, default=False) last_login = Column(DateTime(timezone=True), default=timezone.now) date_joined = Column(DateTime(timezone=True), default=timezone.now) # XXX is it safe to set name and institution to an empty string ? def __init__(self, email, first_name="", last_name="", institution="", **kwargs): self.email = email self.first_name = first_name self.last_name = last_name self.institution = institution super(DbUser, self).__init__(**kwargs) def get_full_name(self): if self.first_name and self.last_name: return "{} {} ({})".format(self.first_name, self.last_name, self.email) elif self.first_name: return "{} ({})".format(self.first_name, self.email) elif self.last_name: return "{} ({})".format(self.last_name, self.email) else: return "{}".format(self.email) def get_short_name(self): return self.email def __str__(self): return self.email def get_aiida_class(self): from aiida.orm.user import User return User(dbuser=self)
class DbApiKey(Base): __tablename__ = 'api_key' # fields id = Column(Integer, primary_key=True, nullable=False) secret_hash = Column(String(1024), nullable=False) creation_date = Column(DateTime(), nullable=False) last_modification_date = Column(DateTime()) secret_prefix = Column(String(8), nullable=False) account = relationship("DbAccount") # foreign keys account_id = Column(Integer, ForeignKey('account.id'), nullable=False)
class Token(Base): __tablename__ = 'token' id = Column(Integer(), primary_key=True) token = Column(LargeBinary(size.password)) activated = Column(Boolean) lastlogin = Column(DateTime()) expiration = Column(DateTime()) deleted = Column(Boolean()) def __init__(self, **kw): self.token = kw['token'] self.activated = kw['activated'] self.deleted = kw['deleted']
class JobApplication(Base): __tablename__ = 'job_applications' job_application_id = Column(Integer(), primary_key=True) job_id = Column(Integer(), ForeignKey('jobs.job_id'), nullable=False) applicant_id = Column(String(100), nullable=False) job_application_status = Column(Enum(JobApplicationStatus), nullable=False) resumes = Column(String(255), nullable=False) cover_letters = Column(String(255), nullable=False) created_on = Column(DateTime(), default=datetime.now) updated_on = Column(DateTime(), default=datetime.now, onupdate=datetime.now)
class DbComment(Base): """Class to store comments using SQLA backend.""" __tablename__ = 'db_dbcomment' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name uuid = Column(UUID(as_uuid=True), default=get_new_uuid, unique=True) dbnode_id = Column( Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED')) ctime = Column(DateTime(timezone=True), default=timezone.now) mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now) user_id = Column( Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED')) content = Column(Text, nullable=True) dbnode = relationship('DbNode', backref='dbcomments') user = relationship('DbUser') def __str__(self): return 'DbComment for [{} {}] on {}'.format( self.dbnode.get_simple_name(), self.dbnode.id, timezone.localtime(self.ctime).strftime('%Y-%m-%d')) def __init__(self, *args, **kwargs): """Adding mtime attribute if not present.""" super().__init__(*args, **kwargs) # The behavior of an unstored Comment instance should be that all its attributes should be initialized in # accordance with the defaults specified on the collums, i.e. if a default is specified for the `uuid` column, # then an unstored `DbComment` instance should have a default value for the `uuid` attribute. The exception here # is the `mtime`, that we do not want to be set upon instantiation, but only upon storing. However, in # SqlAlchemy a default *has* to be defined if one wants to get that value upon storing. But since defining a # default on the column in combination with the hack in `aiida.backend.SqlAlchemy.models.__init__` to force all # defaults to be populated upon instantiation, we have to unset the `mtime` attribute here manually. # # The only time that we allow mtime not to be null is when we explicitly pass mtime as a kwarg. This covers # the case that a node is constructed based on some very predefined data like when we create nodes at the # AiiDA import functions. if 'mtime' not in kwargs: self.mtime = None
class PingConfiguration(Base): __tablename__ = 'ping_configuration_v1' id = Column(Integer, primary_key=True) image = Column(Unicode, nullable=False) vendor = Column(Unicode, nullable=False) product = Column(Unicode, nullable=False) dualboot = Column(NullableBoolean, nullable=False, server_default='unknown') created_at = Column(DateTime(timezone=True), nullable=False, index=True) image_product = Column(Unicode, index=True) image_branch = Column(Unicode, index=True) image_arch = Column(Unicode, index=True) image_platform = Column(Unicode, index=True) image_timestamp = Column(DateTime(timezone=True), index=True) image_personality = Column(Unicode, index=True) __table_args__ = ( UniqueConstraint(image, vendor, product, dualboot, name='uq_ping_configuration_v1_image_vendor_product_dualboot'), ) @classmethod def id_from_serialized(cls, serialized: bytes, dbsession: DbSession) -> int: record = json.loads(serialized.decode('utf-8')) columns = inspect(cls).attrs record = {k: v for (k, v) in record.items() if k in columns} record['vendor'] = normalize_vendor(record.get('vendor', 'unknown')) # Let's make the case of a missing "image" fail at the SQL level if 'image' in record: # pragma: no branch record.update(**parse_endless_os_image(record['image'])) # Postgresql's 'INSERT … ON CONFLICT …' is not available at the ORM layer, so let's # drop down to the SQL layer stmt = insert(PingConfiguration.__table__).values(**record) stmt = stmt.returning(PingConfiguration.__table__.c.id) # We have to use 'ON CONFLICT … DO UPDATE …' because 'ON CONFLICT DO NOTHING' does not # return anything, and we need to get the id back; in addition we have to actually # update something, anything, so let's arbitrarily update the image to its existing value stmt = stmt.on_conflict_do_update( constraint='uq_ping_configuration_v1_image_vendor_product_dualboot', set_={'image': record['image']} ) result = dbsession.connection().execute(stmt) dbsession.commit() return result.first()[0]
class User(db.Model): """Flask-login User model for session management""" __tablename__ = 'user' id = Column(UUIDType(binary=True), default=gen_uuid, primary_key=True, index=True) __table_args__ = {'extend_existing': True} active = Column(Boolean()) username = Column(String(255), unique=True) email = Column(String(255)) last_login_at = Column(DateTime()) last_seen = Column(DateTime()) current_login_at = Column(DateTime()) last_login_ip = Column(String(100)) current_login_ip = Column(String(100)) login_count = Column(Integer) confirmed_at = Column(DateTime()) job_ids = Column(JSON, unique=True) task_ids = Column(JSON, unique=True) is_admin = Column(Boolean()) password = Column(Unicode(100)) def is_active(self): """Required method for flask-login User class""" return self.active def get_id(self): """Required method for flask-login User class""" return self.id def is_anonymous(self): """Required method for flask-login User class""" return False def is_authenticated(self): """Required method for flask-login User class""" return True def __repr__(self): """Required method for flask-login User class""" ret = { 'user': self.username, 'job_ids': self.job_ids, 'task_ids': self.job_ids, 'email': self.email, 'last_seen': self.last_seen, 'password': self.password } return json.dumps(ret)
class News(Base): __tablename__='news' id = Column(Integer,primary_key=True) link = Column(Unicode, unique= True) received_date = Column(DateTime(), default=datetime.datetime.now) analysed_date = Column(DateTime()) status = Column(Unicode) info = Column(Unicode) publish_date = Column(DateTime()) text = Column(Unicode) link_image = Column(Unicode) keywords = Column(Unicode) videos = Column(Unicode) summary = Column(Unicode) title = Column(Unicode)
class Project(Base): ''' 项目模型 ''' __tablename__ = 'Project' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(225), unique=True) explan = Column(String(225), default=None) por_type = Column(Integer, nullable=False) star_time = Column(DateTime(datetime.date), default=None) end_time = Column(DateTime(datetime.date), default=None) create_time = Column(DateTime(datetime.date), default=None) def __repr__(self): return '<Project %r>' % self.name
class Reports(Base): __tablename__ = 'result_ports' id = Column('id', Integer, primary_key=True) taskid = Column('taskid', String(256)) inserted = Column('inserted', DateTime(), default='now') address = Column('address', String(256)) port = Column('port', Integer) service = Column('service', String(256)) state = Column('state', String(12)) protocol = Column('protocol', String(12)) product = Column('product', String(64)) product_version = Column('product_version', String(64)) product_extrainfo = Column('product_extrainfo', String(128)) # banner = Column('banner', String(256)) scripts_results = Column('scripts_results', Text) def __init__(self, service_struct): self.taskid = service_struct.taskid self.inserted = datetime.fromtimestamp(time.time()) self.address = str(service_struct.address) self.port = service_struct.port self.service = str(service_struct.service) self.state = str(service_struct.state) self.protocol = str(service_struct.protocol) self.product = str(service_struct.product) self.product_version = str(service_struct.product_version) self.product_extrainfo = str(service_struct.product_extrainfo) self.scripts_results = str(service_struct.scripts_results)
class utc_now(FunctionElement): r"""Current timestamp in UTC timezone :Dialects: - mysql - postgresql - sqlite :return: :class:`FuntionElement` E.g.:: from sqlalchemy import select from sqla_ext import func as func_ext query = select([ func_ext.datetime.utc_now() ]) The above statement will produce SQL resembling:: SELECT timezone('utc', current_timestamp) """ name = "to_array" def __init__(self) -> None: super().__init__() type = DateTime()
def _sqlalchemy_type(self, col): from sqlalchemy.types import (BigInteger, Float, Text, Boolean, DateTime, Date, Time) if com.is_datetime64_dtype(col): try: tz = col.tzinfo return DateTime(timezone=True) except: return DateTime if com.is_timedelta64_dtype(col): warnings.warn( "the 'timedelta' type is not supported, and will be " "written as integer values (ns frequency) to the " "database.", UserWarning) return BigInteger elif com.is_float_dtype(col): return Float elif com.is_integer_dtype(col): # TODO: Refine integer size. return BigInteger elif com.is_bool_dtype(col): return Boolean inferred = lib.infer_dtype(com._ensure_object(col)) if inferred == 'date': return Date if inferred == 'time': return Time return Text
class Link(TableBase): u"""URL associated with a package.""" __tablename__ = 'links' __table_args__ = (UniqueConstraint('collection_package_id', 'url'), { 'sqlite_autoincrement': True }) id = IDColumn() collection_package_id = Column(ForeignKey(CollectionPackage.id), nullable=False) url = Column(Unicode(), nullable=False) type = Column(Enum('homepage', 'bug', 'repo'), nullable=False) note = Column(Unicode(), nullable=True, doc='Type-specific note about the link') last_update = Column( DateTime(), nullable=True, doc="Datetime of the last known change of the Link's contents") collection_package = relationship('CollectionPackage', backref=backref('links')) def __repr__(self): return '<{} {} for {}: {}>'.format( type(self).__qualname__, self.type, self.collection_package.name, self.url)
def date_in_timezone(date_, timezone): """ Given a naive postgres date object (postgres doesn't have tzd dates), returns a timezone-aware timestamp for the start of that date in that timezone. E.g. if postgres is in 'America/New_York', SET SESSION TIME ZONE 'America/New_York'; CREATE TABLE tz_trouble (to_date date, timezone text); INSERT INTO tz_trouble(to_date, timezone) VALUES ('2021-03-10'::date, 'Australia/Sydney'), ('2021-03-20'::date, 'Europe/Berlin'), ('2021-04-15'::date, 'America/New_York'); SELECT timezone(timezone, to_date::timestamp) FROM tz_trouble; The result is: timezone ------------------------ 2021-03-09 08:00:00-05 2021-03-19 19:00:00-04 2021-04-15 00:00:00-04 """ return func.timezone(timezone, cast(date_, DateTime(timezone=False)))
class DbLog(Base): __tablename__ = "db_dblog" id = Column(Integer, primary_key=True) time = Column(DateTime(timezone=True), default=timezone.now) loggername = Column(String(255), index=True) levelname = Column(String(255), index=True) objname = Column(String(255), index=True) objpk = Column(Integer, index=True, nullable=True) message = Column(Text(), nullable=True) _metadata = Column('metadata', JSONB) def __init__(self, loggername="", levelname="", objname="", objpk=None, message=None, metadata=None): if not loggername or not levelname: raise ValidationError( "The loggername and levelname can't be empty") self.loggername = loggername self.levelname = levelname self.objname = objname self.objpk = objpk self.message = message self._metadata = metadata or {} def __str__(self): return "DbComment for [{} {}] on {}".format( self.dbnode.get_simple_name(), self.dbnode.id, timezone.localtime(self.ctime).strftime("%Y-%m-%d")) @classmethod def add_from_logrecord(cls, record): """ Add a new entry from a LogRecord (from the standard python logging facility). No exceptions are managed here. """ objpk = record.__dict__.get('objpk', None) objname = record.__dict__.get('objname', None) # Filter: Do not store in DB if no objpk and objname is given if objpk is None or objname is None: return new_entry = cls(loggername=record.name, levelname=record.levelname, objname=objname, objpk=objpk, message=record.getMessage(), metadata=record.__dict__) new_entry.save()
class Genre(Base): """Genre of the creative work""" #: (:class:`int`) The primary key integer. id = Column(Integer, primary_key=True) #: (:class:`str`) The name of the genre. name = Column(String, nullable=False, index=True) #: (:class:`collections.abc.MutableSet`) The set of #: :class:`WorkGenre`\ s that the genre has. work_genres = relationship('WorkGenre', cascade='delete, merge, save-update', collection_class=set) #: (:class:`collections.abc.MutableSet`) The set of #: :class:`Work`\ s that fall into the genre. works = relationship(lambda: Work, secondary='work_genres', collection_class=set) #: (:class:`datetime.datetime`) The date and time on which #: the record was created. created_at = Column(DateTime(timezone=True), nullable=False, default=now(), index=True) __tablename__ = 'genres' __repr_columns__ = id, name
class Credit(Base): """Relationship between the work, the person, and the team. Describe that the person participated in making the work. """ #: (:class:`int`) :class:`Work.id` of :attr:`work`. work_id = Column(Integer, ForeignKey('works.id'), primary_key=True) #: (:class:`Work`) The work which the :attr:`person` made. work = relationship(lambda: Work) #: (:class:`int`) :class:`cliche.people.Person.id` of :attr:`person`. person_id = Column(Integer, ForeignKey('people.id'), primary_key=True) #: (:class:`cliche.people.Person`) The person who made the :attr:`work`. person = relationship('Person') #: The person's role in making the work. role = Column(EnumType(Role, name='credits_role'), primary_key=True, default=Role.unknown) #: (:class:`int`) :class:`Team.id` of :attr:`team`. (optional) team_id = Column(Integer, ForeignKey('teams.id')) #: The team which the person belonged when work had been made. team = relationship('Team') #: (:class:`datetime.datetime`) The date and time on which #: the record was created. created_at = Column(DateTime(timezone=True), nullable=False, default=now()) __tablename__ = 'credits' __repr_columns__ = person_id, work_id, role, team_id
class FrostObservation(Base): __tablename__ = 'frost_observations' source_id = Column( String(20), # TODO: Add FK constraint # ForeignKey('frost_sources.id'), primary_key=True, nullable=False ) element = Column(String(50), primary_key=True, nullable=False) time = Column(DateTime(), primary_key=True, nullable=False) reg_id = Column( Integer(), # TODO: Add FK constraint # ForeignKey('regobs_data.reg_id'), primary_key=True, nullable=False ) distance = Column(Float()) value = Column(String(20)) orig_value = Column(String(20)) unit = Column(String(20)) code_table = Column(String(50)) level_type = Column(String(50)) level_unit = Column(String(20)) level_series_id = Column(String(20)) performance_category = Column(String(5)) exposure_category = Column(String(5)) quality_code = Column(String(5)) control_info = Column(String(50)) data_version = Column(String(20))
class DbCalcState(Base): __tablename__ = "db_dbcalcstate" id = Column(Integer, primary_key=True) dbnode_id = Column( Integer, ForeignKey('db_dbnode.id', ondelete="CASCADE", deferrable=True, initially="DEFERRED")) dbnode = relationship( 'DbNode', backref=backref('dbstates', passive_deletes=True), ) # Note: this is suboptimal: calc_states is not sorted # therefore the order is not the expected one. If we # were to use the correct order here, we could directly sort # without specifying a custom order. This is probably faster, # but requires a schema migration at this point state = Column(ChoiceType((_, _) for _ in calc_states), index=True) time = Column(DateTime(timezone=True), default=timezone.now) __table_args__ = (UniqueConstraint('dbnode_id', 'state'), )
class Items(Base): """ Items table """ __tablename__ = 'items' ## #Try adding query here so flaskapp knows what to do at /success ## #This didn't work because database.py is supposed to have the query setup for declarative. ## query = db_session.query_property() id = Column(Integer, primary_key=True) name = Column(String(256)) #Won't setting quantity=Column(Integer) give an error if the user doesn't unput and integer? quantity = Column(Integer) description = Column(String(256)) ''' DateTime may be causing the 504 error because the timestamp needs to be calculated by the server instead of the app. Try importing func and setting default to server_default = func.now() ''' date_added = Column(DateTime(), server_default=func.now()) #The code for init and repr is missing, so that might be why the table entries were empty. #This was it. Putting this in allowed query to work as intended. Challenge complete! def __init__(self, name, quantity, description, date_added): self.name = name self.quantity = quantity self.description = description self.date_added = date_added def __repr__(self): return '{} {} {} {} {}'.format(self.id, self.name, self.quantity, self.description, self.date_added)
class Content(Base): __tablename__ = 'content' id = Column(Integer, primary_key=True) entry_id = Column(Integer, ForeignKey(Entry.id), nullable=False) type = Column(String(MAX_CONTENT_TYPE_LEN), nullable=False) hash = Column(String(40), nullable=False) retrieved_at = Column(DateTime(timezone=True), nullable=False, default=datetime.now) expired = Column(Boolean, nullable=False, default=False) summary = Column(Boolean, nullable=False, default=False) data = deferred(Column(Text, nullable=False, default="")) entry = relationship(Entry, backref='content') def to_json(self): return { 'id': self.id, 'type': self.type, 'hash': self.hash, 'expired': self.expired, 'summary': self.summary, 'length': len(self.data), }
def upgrade(): op.create_table( 'user', Column('id', UUID(), nullable=False), Column('created_at', DateTime(timezone=True), nullable=False), Column('email', Unicode(), nullable=False), Column('password', PasswordType(), nullable=False), PrimaryKeyConstraint('id'), UniqueConstraint('email'))
def test_validate_column_types_devmode(self): # when in devmode, strict type checking is enforced with EnvironmentVarGuard() as env: env["SQLTASK_DEVELOPER_MODE"] = "1" validate = BaseEngineSpec.validate_column_value str10_column = Column("str10_col", String(10), nullable=False) str_column = Column("str_col", String, nullable=False) int_column = Column("int_col", Integer()) float_column = Column("float_col", Float(), nullable=False) date_column = Column("float_col", Date(), nullable=False) datetime_column = Column("float_col", DateTime(), nullable=False) self.assertIsNone(validate(date(2019, 12, 31), date_column)) self.assertIsNone(validate(date(2019, 12, 31), datetime_column)) self.assertIsNone(validate("abc", str10_column)) self.assertIsNone(validate("1234567890", str10_column)) self.assertIsNone(validate("123456789012345", str_column)) self.assertIsNone(validate(Decimal("1234.567"), float_column)) self.assertIsNone(validate(1.1, float_column)) self.assertIsNone(validate(1, float_column)) self.assertIsNone(validate(1, int_column)) self.assertIsNone(validate(None, int_column)) self.assertRaises(ValueError, validate, datetime.utcnow(), date_column) self.assertRaises(ValueError, validate, None, str_column) self.assertRaises(ValueError, validate, "12345678901", str10_column) self.assertRaises(ValueError, validate, 12345, str_column) self.assertRaises(ValueError, validate, 12345.5, int_column)
class PeriodicTask(GenericTable): __tablename__ = "periodictasks" id = Column(Integer, primary_key=True) name = Column(String(100), nullable=False) task = Column(String(100), nullable=False) enabled = Column(Boolean, nullable=False, default=True) crontab_minute = Column(String(20), nullable=False, default="*") crontab_hour = Column(String(20), nullable=False, default="*") crontab_day_of_week = Column(String(20), nullable=False, default="*") crontab_day_of_month = Column(String(20), nullable=False, default="*") crontab_month_of_year = Column(String(20), nullable=False, default="*") last_run_at = Column(DateTime(timezone=True), nullable=True) args = Column(JSONType, nullable=False, default=[]) kwargs = Column(JSONType, nullable=False, default={}) @property def is_run_action(self): return self.task == "pyfaf.celery_tasks.run_action" @property def args_parsed(self): return self._foo @property def nice_name(self): return self.name @property def nice_task(self): if self.is_run_action and self.args: return "Action {0}".format(self.args[0]) return self.task
class UtcDateTime(TypeDecorator): """Almost equivalent to :class:`~sqlalchemy.types.DateTime` with ``timezone=True`` option, but it differs from that by: - Never silently take naive :class:`~datetime.datetime`, instead it always raise :exc:`ValueError` unless time zone aware value. - :class:`~datetime.datetime` value's :attr:`~datetime.datetime.tzinfo` is always converted to UTC. - Unlike SQLAlchemy's built-in :class:`~sqlalchemy.types.DateTime`, it never return naive :class:`~datetime.datetime`, but time zone aware value, even with SQLite or MySQL. """ impl = DateTime(timezone=True) def process_bind_param(self, value, dialect): if value is not None: if not isinstance(value, datetime.datetime): raise TypeError('expected datetime.datetime, not ' + repr(value)) elif value.tzinfo is None: raise ValueError('naive datetime is disallowed') return value.astimezone(utc) def process_result_value(self, value, dialect): if value is not None and value.tzinfo is None: value = value.replace(tzinfo=utc) return value
class ClickthroughsByCodename(Base): """Clickthrough counts for ads.""" __tablename__ = "traffic_click" codename = Column("fullname", String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def history(cls, interval, codename): time_points, q = make_history_query(cls, interval) q = q.filter(cls.codename == codename) return fill_gaps(time_points, q, "unique_count", "pageview_count") @classmethod @memoize_traffic(time=3600) def promotion_history(cls, codename, start, stop): return promotion_history(cls, codename, start, stop) @classmethod @memoize_traffic(time=3600) def historical_totals(cls, interval): return totals(cls, interval) @classmethod @memoize_traffic(time=3600) def total_by_codename(cls, codenames): return total_by_codename(cls, codenames)
class Items(Base): """Items table""" __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def __init__(self, name=None, quantity=0, description=None, date_added=None): self.name = name self.quantity = quantity self.description = description self.date_added = date_added def __repr__(self): return '<Item %s (%d), %s, %s>' % (self.name, self.quantity, self.description, self.date_added) def __str__(self): return 'Name: %s | Quantity: %d | Description: %s | Date_added: %s' % ( self.name, self.quantity, self.description, self.date_added)
def generate_repmgr_metadata(schema_name=None, bind=None): metadata = MetaData(schema=schema_name, bind=bind) repl_status = Table( 'repl_status', metadata, Column('primary_node', Integer, nullable=False), Column('standby_node', Integer, nullable=False), Column('last_wal_primary_location', Text(255), nullable=False), Column('last_wal_standby_location', Text(255), nullable=False), Column('replication_lag', Text(255), nullable=False), Column('apply_lag', Text(255), nullable=False), Column('communication_time_lag', Interval, nullable=False, default=datetime.timedelta(0)), Column('last_monitor_time', DateTime(True), nullable=False, default=datetime.datetime.strptime('2000-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'))) repl_nodes = Table('repl_nodes', metadata, Column('id', Integer, primary_key=True), Column('cluster', Text(255), nullable=False), Column('name', Text(255), nullable=False), Column('conninfo', Text(255), nullable=False)) return metadata
class DbSetting(Base): __tablename__ = "db_dbsetting" id = Column(Integer, primary_key=True) key = Column(String(255), index=True, nullable=False) datatype = Column(String(10), index=True, nullable=False) tval = Column(String(255), default='', nullable=True) fval = Column(Float, default=None, nullable=True) ival = Column(Integer, default=None, nullable=True) bval = Column(Boolean, default=None, nullable=True) dval = Column(DateTime(timezone=True), default=None, nullable=True) val = Column(JSONB, default={}) description = Column(String(255), default='', nullable=True) time = Column(DateTime(timezone=True), default=timezone.now)
class AuthUserLog(Base): """ event: L - Login R - Register P - Password F - Forgot """ __tablename__ = 'auth_user_log' __table_args__ = {'sqlite_autoincrement': True} id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey(Users.id), index=True) user = relationship(Users, backref=backref('user_log', cascade='all, delete-orphan')) time = Column(DateTime(), default=func.now()) ip_addr = Column(Unicode(39), nullable=False) event = Column(Enum(u'L', u'R', u'P', u'F', name=u'event'), default=u'L') @property def timestamp(self): today = datetime.date.today() yesterday = today - datetime.timedelta(days=1) if self.time.date() == today: return time_ago_in_words(self.time, granularity="minute") + " ago" elif self.time.date() == yesterday: return self.time.strftime("yesterday at %I:%M%p").lower() else: return self.time.strftime("%a %b %d, %Y")