comments = Table( "comments", db.metadata, Column("id", Integer(), primary_key=True, nullable=False), Column( "rating", Integer(), ForeignKey("ratings.id", ondelete="CASCADE"), ), Column( "user_name", CIText(), ForeignKey("accounts_user.username", ondelete="CASCADE"), ), Column("date", DateTime(timezone=False)), Column("message", Text()), Column( "in_reply_to", Integer(), ForeignKey("comments.id", ondelete="CASCADE"), ), ) comments_journal = Table( "comments_journal", db.metadata, Column("name", Text()), Column("version", Text()), Column("id", Integer()), Column(
class User(db.Model, UserMixin): """ A user who can login and work with Dexter. """ __tablename__ = "users" id = Column(Integer, primary_key=True) email = Column(String(50), index=True, nullable=False, unique=True) first_name = Column(String(50), nullable=False) last_name = Column(String(50), nullable=False) admin = Column(Boolean, default=False) disabled = Column(Boolean, default=False) password = Column(String(100), default='') default_analysis_nature_id = Column(Integer, ForeignKey('analysis_natures.id'), default=1, nullable=False) country_id = Column(Integer, ForeignKey('countries.id'), nullable=False) created_at = Column(DateTime(timezone=True), index=True, unique=False, nullable=False, server_default=func.now()) updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.current_timestamp()) # associations default_analysis_nature = relationship("AnalysisNature") country = relationship("Country") roles = db.relationship('Role', secondary='roles_users', backref=db.backref('users', lazy='dynamic')) def short_name(self): s = "" if self.first_name: s += self.first_name if self.last_name: if s: s += " " + self.last_name[0] + "." else: s = self.last_name if not s: s = self.email return s def full_name(self): s = '%s %s' % (self.first_name or '', self.last_name or '') s = s.strip() if not s: s = self.email return s def __repr__(self): return "<User email=%s>" % (self.email, ) # Flask-Security requires an active attribute @property def active(self): return not self.disabled @active.setter def active(self, value): self.disabled = not value @classmethod def create_defaults(self): from . import Country from flask_security.utils import encrypt_password admin_user = User() admin_user.first_name = "Admin" admin_user.last_name = "Admin" admin_user.admin = True admin_user.email = "*****@*****.**" admin_user.country = Country.query.filter( Country.name == 'South Africa').one() admin_user.password = encrypt_password('admin') return [admin_user]
class CommentOptionsOperation(Base): """ Steem Blockchain Example ====================== { "allow_curation_rewards": true, "allow_votes": true, "permlink": "testing6", "percent_steem_dollars": 5000, "max_accepted_payout": "1000.000 SBD", "author": "testing001", "extensions": [] } """ __tablename__ = 'sbds_op_comment_option' __table_args__ = ( PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'), ForeignKeyConstraint(['author'], ['sbds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True), ) block_num = Column(Integer, nullable=False, index=True) transaction_num = Column(SmallInteger, nullable=False, index=True) operation_num = Column(SmallInteger, nullable=False, index=True) trx_id = Column(String(40), nullable=False) timestamp = Column(DateTime(timezone=False)) author = Column(String(16)) # steem_type:account_name_type permlink = Column(Unicode(256), index=True) # name:permlink max_accepted_payout = Column(Numeric(20, 6), nullable=False) # steem_type:asset max_accepted_payout_symbol = Column(String(5)) # steem_type:asset percent_steem_dollars = Column(Integer) # steem_type:uint16_t allow_votes = Column(Boolean) # steem_type:bool allow_curation_rewards = Column(Boolean) # steem_type:bool extensions = Column( JSONB) # steem_type:steemit::protocol::comment_options_extensions_type operation_type = Column(operation_types_enum, nullable=False, index=True, default='comment_options') _fields = dict( max_accepted_payout=lambda x: amount_field( x.get('max_accepted_payout'), num_func=float), # steem_type:asset max_accepted_payout_symbol=lambda x: amount_symbol_field( x.get('max_accepted_payout')), # steem_type:asset extensions=lambda x: json_string_field( x.get('extensions') ), # steem_type:steemit::protocol::comment_options_extensions_type ) _account_fields = frozenset([ 'author', ]) def dump(self): return dissoc(self.__dict__, '_sa_instance_state') def to_dict(self, decode_json=True): data_dict = self.dump() if isinstance(data_dict.get('json_metadata'), str) and decode_json: data_dict['json_metadata'] = sbds.sbds_json.loads( data_dict['json_metadata']) return data_dict def to_json(self): data_dict = self.to_dict() return sbds.sbds_json.dumps(data_dict) def __repr__(self): return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % ( self.__class__.__name__, self.block_num, self.transaction_num, self.operation_num, tuple(self.dump().keys())) def __str__(self): return str(self.dump())
def build_tables(): """Builds database postgres schema""" MetaData().create_all(engine) user_subscribed_tags = \ Table('user_to_tags', core.Base.metadata, Column('user_id', BigInteger, ForeignKey('users.id'), primary_key=True, nullable=False, onupdate="CASCADE"), Column('tag_id', BigInteger, ForeignKey('tags.id'), primary_key=True, nullable=False, onupdate="CASCADE"), Column('created', DateTime(timezone=False), default=datetime.utcnow, nullable=False), UniqueConstraint('user_id', 'tag_id', name='user_tag_uix') ) question_tags = \ Table('question_to_tags', core.Base.metadata, Column('question_id', BigInteger, ForeignKey('questions.id'), primary_key=True, nullable=False, onupdate="CASCADE"), Column('tag_id', BigInteger, ForeignKey('tags.id'), primary_key=True, nullable=False, onupdate="CASCADE"), Column('created', DateTime(timezone=False), default=datetime.utcnow, nullable=False), UniqueConstraint('question_id', 'tag_id', name='question_tag_uix')
class _SSOUser(Base): __tablename__ = 'zato_sso_user' __table_args__ = ( UniqueConstraint('username', name='zato_u_usrn_uq'), UniqueConstraint('user_id', name='zato_user_id_uq'), Index('zato_u_email_idx', 'email', unique=False, mysql_length={'email':767}), Index('zato_u_appr_stat_idx', 'approval_status', unique=False, mysql_length={'email':767}), Index('zato_u_dspn_idx', 'display_name_upper', unique=False), Index('zato_u_alln_idx', 'first_name_upper', 'middle_name_upper', 'last_name_upper', unique=False), Index('zato_u_lastn_idx', 'last_name_upper', unique=False), Index('zato_u_sigst_idx', 'sign_up_status', unique=False), Index('zato_u_sigctok_idx', 'sign_up_confirm_token', unique=True), {}) # Not exposed publicly, used only for SQL joins id = Column(Integer, Sequence('zato_sso_user_id_seq'), primary_key=True) # Publicly visible user_id = Column(String(191), nullable=False) is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True is_internal = Column(Boolean(), nullable=False, default=False) is_super_user = Column(Boolean(), nullable=False, default=False) is_locked = Column(Boolean(), nullable=False, default=False) locked_time = Column(DateTime(), nullable=True) # Creation metadata, e.g. what this user's remote IP was creation_ctx = Column(Text(), nullable=False) # Note that this is not an FK - this is on purpose to keep this information around # even if parent row is deleted. locked_by = Column(String(191), nullable=True) approval_status = Column(String(191), nullable=False) approval_status_mod_time = Column(DateTime(), nullable=False) # When user was approved or rejected approval_status_mod_by = Column(String(191), nullable=False) # Same comment as in locked_by # Basic information, always required username = Column(String(191), nullable=False) password = Column(Text(), nullable=False) password_is_set = Column(Boolean(), nullable=False) password_must_change = Column(Boolean(), nullable=False) password_last_set = Column(DateTime(), nullable=False) password_expiry = Column(DateTime(), nullable=False) # Sign-up information, possibly used in API workflows sign_up_status = Column(String(191), nullable=False) sign_up_time = Column(DateTime(), nullable=False) sign_up_confirm_time = Column(DateTime(), nullable=True) sign_up_confirm_token = Column(String(191), nullable=False) # Won't be always needed email = Column(Text(), nullable=True) # Various cultures don't have a notion of first or last name and display_name is the one that can be used in that case. display_name = Column(String(191), nullable=True) first_name = Column(String(191), nullable=True) middle_name = Column(String(191), nullable=True) last_name = Column(String(191), nullable=True) # Same as above but upper-cased for look-up / indexing purposes display_name_upper = Column(String(191), nullable=True) first_name_upper = Column(String(191), nullable=True) middle_name_upper = Column(String(191), nullable=True) last_name_upper = Column(String(191), nullable=True) # Rate limiting is_rate_limit_active = Column(Boolean(), nullable=True) rate_limit_type = Column(String(40), nullable=True) rate_limit_def = Column(Text(), nullable=True) rate_limit_check_parent_def = Column(Boolean(), nullable=True) # TOTP is_totp_enabled = Column(Boolean(), nullable=False, server_default=sa_false()) totp_key = Column(Text(), nullable=True) totp_label = Column(Text(), nullable=True) # JSON data is here opaque1 = Column(_JSON(), nullable=True)
class BaseJob(Base, LoggingMixin): """ Abstract class to be derived for jobs. Jobs are processing items with state and duration that aren't task instances. For instance a BackfillJob is a collection of task instance runs, but should have it's own state, start and end time. """ __tablename__ = "job" id = Column(Integer, primary_key=True) dag_id = Column(String(ID_LEN), ) state = Column(String(20)) job_type = Column(String(30)) start_date = Column(DateTime()) end_date = Column(DateTime()) latest_heartbeat = Column(DateTime()) executor_class = Column(String(500)) hostname = Column(String(500)) unixname = Column(String(1000)) __mapper_args__ = { 'polymorphic_on': job_type, 'polymorphic_identity': 'BaseJob' } __table_args__ = (Index('job_type_heart', job_type, latest_heartbeat), ) def __init__(self, executor=executors.DEFAULT_EXECUTOR, heartrate=configuration.getfloat('scheduler', 'JOB_HEARTBEAT_SEC'), *args, **kwargs): self.hostname = socket.gethostname() self.executor = executor self.executor_class = executor.__class__.__name__ self.start_date = datetime.now() self.latest_heartbeat = datetime.now() self.heartrate = heartrate self.unixname = getpass.getuser() super(BaseJob, self).__init__(*args, **kwargs) def is_alive(self): return ((datetime.now() - self.latest_heartbeat).seconds < (configuration.getint('scheduler', 'JOB_HEARTBEAT_SEC') * 2.1)) def kill(self): session = settings.Session() job = session.query(BaseJob).filter(BaseJob.id == self.id).first() job.end_date = datetime.now() try: self.on_kill() except: self.logger.error('on_kill() method failed') session.merge(job) session.commit() session.close() raise AirflowException("Job shut down externally.") def on_kill(self): ''' Will be called when an external kill command is received ''' pass def heartbeat_callback(self): pass def heartbeat(self): ''' Heartbeats update the job's entry in the database with a timestamp for the latest_heartbeat and allows for the job to be killed externally. This allows at the system level to monitor what is actually active. For instance, an old heartbeat for SchedulerJob would mean something is wrong. This also allows for any job to be killed externally, regardless of who is running it or on which machine it is running. Note that if your heartbeat is set to 60 seconds and you call this method after 10 seconds of processing since the last heartbeat, it will sleep 50 seconds to complete the 60 seconds and keep a steady heart rate. If you go over 60 seconds before calling it, it won't sleep at all. ''' session = settings.Session() job = session.query(BaseJob).filter(BaseJob.id == self.id).first() if job.state == State.SHUTDOWN: self.kill() if job.latest_heartbeat: sleep_for = self.heartrate - ( datetime.now() - job.latest_heartbeat).total_seconds() if sleep_for > 0: sleep(sleep_for) job.latest_heartbeat = datetime.now() session.merge(job) session.commit() session.close() self.heartbeat_callback() self.logger.debug('[heart] Boom.') def run(self): if statsd: statsd.incr(self.__class__.__name__.lower() + '_start', 1, 1) # Adding an entry in the DB session = settings.Session() self.state = State.RUNNING session.add(self) session.commit() id_ = self.id make_transient(self) self.id = id_ # Run self._execute() # Marking the success in the DB self.end_date = datetime.now() self.state = State.SUCCESS session.merge(self) session.commit() session.close() if statsd: statsd.incr(self.__class__.__name__.lower() + '_end', 1, 1) def _execute(self): raise NotImplementedError("This method needs to be overridden")
from sqlalchemy.schema import Table, Column, MetaData from designate.openstack.common import timeutils from designate.openstack.common.uuidutils import generate_uuid from designate.sqlalchemy.types import UUID meta = MetaData() TSIG_ALGORITHMS = [ 'hmac-md5', 'hmac-sha1', 'hmac-sha224', 'hmac-sha256', 'hmac-sha384', 'hmac-sha512' ] tsigkeys = Table( 'tsigkeys', meta, Column('id', UUID(), default=generate_uuid, primary_key=True), Column('created_at', DateTime(), default=timeutils.utcnow), Column('updated_at', DateTime(), onupdate=timeutils.utcnow), Column('version', Integer(), default=1, nullable=False), Column('name', String(255), nullable=False, unique=True), Column('algorithm', Enum(name='tsig_algorithms', *TSIG_ALGORITHMS), nullable=False), Column('secret', String(255), nullable=False)) def upgrade(migrate_engine): meta.bind = migrate_engine tsigkeys.create() def downgrade(migrate_engine):
class States(Base): # type: ignore """State change history.""" __tablename__ = "states" state_id = Column(Integer, primary_key=True) domain = Column(String(64)) entity_id = Column(String(255), index=True) state = Column(String(255)) attributes = Column(Text) event_id = Column(Integer, ForeignKey("events.event_id"), index=True) last_changed = Column(DateTime(timezone=True), default=dt_util.utcnow) last_updated = Column(DateTime(timezone=True), default=dt_util.utcnow, index=True) created = Column(DateTime(timezone=True), default=dt_util.utcnow) context_id = Column(String(36), index=True) context_user_id = Column(String(36), index=True) # context_parent_id = Column(String(36), index=True) __table_args__ = ( # Used for fetching the state of entities at a specific time # (get_states in history.py) Index("ix_states_entity_id_last_updated", "entity_id", "last_updated"), ) @staticmethod def from_event(event): """Create object from a state_changed event.""" entity_id = event.data["entity_id"] state = event.data.get("new_state") dbstate = States( entity_id=entity_id, context_id=event.context.id, context_user_id=event.context.user_id, # context_parent_id=event.context.parent_id, ) # State got deleted if state is None: dbstate.state = "" dbstate.domain = split_entity_id(entity_id)[0] dbstate.attributes = "{}" dbstate.last_changed = event.time_fired dbstate.last_updated = event.time_fired else: dbstate.domain = state.domain dbstate.state = state.state dbstate.attributes = json.dumps(dict(state.attributes), cls=JSONEncoder) dbstate.last_changed = state.last_changed dbstate.last_updated = state.last_updated return dbstate def to_native(self): """Convert to an HA state object.""" context = Context(id=self.context_id, user_id=self.context_user_id) try: return State( self.entity_id, self.state, json.loads(self.attributes), process_timestamp(self.last_changed), process_timestamp(self.last_updated), context=context, # Temp, because database can still store invalid entity IDs # Remove with 1.0 or in 2020. temp_invalid_id_bypass=True, ) except ValueError: # When json.loads fails _LOGGER.exception("Error converting row to state: %s", self) return None
class utcnow(expression.FunctionElement): type = DateTime()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine # New table quota_classes = Table('quota_classes', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True), Column('class_name', String(length=255), index=True), Column('resource', String(length=255)), Column('hard_limit', Integer(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8', ) try: quota_classes.create() except Exception: LOG.error(_LE("Table |%s| not created!"), repr(quota_classes)) raise quota_usages = Table('quota_usages', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True), Column('project_id', String(length=255), index=True), Column('resource', String(length=255)), Column('in_use', Integer(), nullable=False), Column('reserved', Integer(), nullable=False), Column('until_refresh', Integer(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8', ) try: quota_usages.create() except Exception: LOG.error(_LE("Table |%s| not created!"), repr(quota_usages)) raise reservations = Table('reservations', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True), Column('uuid', String(length=36), nullable=False), Column('usage_id', Integer(), ForeignKey('quota_usages.id'), nullable=False), Column('project_id', String(length=255), index=True), Column('resource', String(length=255)), Column('delta', Integer(), nullable=False), Column('expire', DateTime(timezone=False)), mysql_engine='InnoDB', mysql_charset='utf8', ) try: reservations.create() except Exception: LOG.error(_LE("Table |%s| not created!"), repr(reservations)) raise
class User(Base, UserMixin): # General user properties id = Column(Integer(), primary_key=True) first_name = Column(String(255)) last_name = Column(String(255)) email = Column(String(255)) proof = Column(String(255), default='') password = Column(String(255)) active = Column(Boolean()) confirmed_at = Column(DateTime()) # Foreign key relationships roles = relationship("Role", backref='user', lazy='dynamic') authentications = relationship("Authentication", backref='user', lazy='dynamic') assignments = relationship("Assignment", backref='user', lazy='dynamic') STAFF_ROLES = [ "urn:lti:role:ims/lis/teachingassistant", "instructor", "contentdeveloper", "teachingassistant", "urn:lti:role:ims/lis/instructor", "urn:lti:role:ims/lis/contentdeveloper" ] def encode_json(self, use_owner=True): return { 'id': self.id, 'first_name': self.first_name, 'last_name': self.last_name, 'email': self.email } @staticmethod def new_from_instructor(email, first_name='', last_name=''): new_user = User(first_name=first_name, last_name=last_name, email=email) db.session.add(new_user) db.session.commit() return new_user @staticmethod def find_student(email): # Hack: We have to lowercase emails because apparently some LMSes want to SHOUT EMAIL ADDRESSES return User.query.filter( func.lower(User.email) == func.lower(email)).first() def get_roles(self): return models.Role.query.filter_by(user_id=self.id).all() def get_course_roles(self, course_id): return models.Role.query.filter_by(user_id=self.id, course_id=course_id).all() def get_editable_courses(self): return (db.session.query(models.Course).filter( models.Role.user_id == self.id, models.Role.course_id == models.Course.id, (models.Role.name == 'instructor') | (models.Role.name == 'admin')).order_by( models.Course.name).distinct()) def get_courses(self): return (db.session.query(models.Course, models.Role).filter( models.Role.user_id == self.id, models.Role.course_id == models.Course.id).order_by( models.Role.name).all()) def __str__(self): return '<User {} ({})>'.format(self.id, self.email) def name(self): return ' '.join((self.first_name or "", self.last_name or "")) def get_filename(self, extension='.json'): return secure_filename(self.name().replace(' ', "_")) + extension def in_course(self, course_id): return bool( models.Role.query.filter_by(course_id=course_id, user_id=self.id).first()) def is_admin(self): return 'admin' in {role.name.lower() for role in self.roles.all()} def is_instructor(self, course_id=None): if course_id is not None: return 'instructor' in { role.name.lower() for role in self.roles.all() if role.course_id == course_id } return 'instructor' in {role.name.lower() for role in self.roles.all()} def is_grader(self, course_id=None): if course_id is not None: role_strings = { role.name.lower() for role in self.roles.all() if role.course_id == course_id } else: role_strings = {role.name.lower() for role in self.roles.all()} return ('instructor' in role_strings or 'urn:lti:sysrole:ims/lis/none' in role_strings or 'urn:lti:role:ims/lis/teachingassistant' in role_strings) def is_student(self, course_id=None): if course_id is not None: return 'learner' in { role.name.lower() for role in self.roles.all() if role.course_id == course_id } return 'learner' in {role.name.lower() for role in self.roles.all()} def add_role(self, name, course_id): new_role = models.Role(name=name, user_id=self.id, course_id=course_id) db.session.add(new_role) db.session.commit() def update_roles(self, new_roles, course_id): old_roles = [ role for role in self.roles.all() if role.course_id == course_id ] new_role_names = set(new_role_name.lower() for new_role_name in new_roles) for old_role in old_roles: if old_role.name.lower() not in new_role_names: models.Role.query.filter( models.Role.id == old_role.id).delete() old_role_names = set(role.name.lower() for role in old_roles) for new_role_name in new_roles: if new_role_name.lower() not in old_role_names: new_role = models.Role(name=new_role_name.lower(), user_id=self.id, course_id=course_id) db.session.add(new_role) db.session.commit() def determine_role(self, assignments, submissions): ''' Note that when you use an assignment from another course, you are implicitly giving all the graders from that course access to your students' submissions in the editor menu. Of course, it would be very unusual to be able to access submissions from that menu, but in theory that's what this role delegation means. :param assignments: :param submissions: :return: ''' role = 'student' if assignments and self.is_grader(assignments[0].course_id): role = 'owner' elif submissions and self.is_grader(submissions[0].course_id): role = 'grader' return role @staticmethod def is_lti_instructor(given_roles): return any(role.lower() for role in User.STAFF_ROLES if role in given_roles) @staticmethod def new_lti_user(service, lti_user_id, lti_email, lti_first_name, lti_last_name): new_user = User(first_name=lti_first_name, last_name=lti_last_name, email=lti_email.lower(), password="", active=True, confirmed_at=None) db.session.add(new_user) db.session.flush() new_authentication = models.Authentication(type=service, value=lti_user_id, user_id=new_user.id) db.session.add(new_authentication) db.session.commit() return new_user def register_authentication(self, service, lti_user_id): new_authentication = models.Authentication(type=service, value=lti_user_id, user_id=self.id) db.session.add(new_authentication) db.session.commit() return self @staticmethod def from_lti(service, lti_user_id, lti_email, lti_first_name, lti_last_name): """ For a given service (e.g., "canvas"), and a user_id in the LTI system """ lti = models.Authentication.query.filter_by(type=service, value=lti_user_id).first() if lti is None: user = User.find_student(lti_email) if user: user.register_authentication(service, lti_user_id) return user else: return User.new_lti_user(service, lti_user_id, lti_email, lti_first_name, lti_last_name) else: return lti.user
class Pessoa(Base): """ Represents table "tb_pessoa" 1. Recursive One to One relationship - colaborador One Pessoa can only be one colaborador 2. Recursive One to One relationship - idealizador One Pessoa can only be one idealizador 3. Recursive One to One relationship - aliado One Pessoa can only be one aliado Attributes: id: Integer, Primary key usuario: String senha: String nome: String data_criacao: Datetime - default uses DB function Now() data_atualizacao: Datetime - default uses function Now() on the server data_nascimento: Date telefone: String ativo: Boolean superusuario: Boolean colaborador: Boolean idealizador: Boolean aliado: Boolean """ __tablename__ = "tb_pessoa" id = Column(Integer, primary_key=True, index=True) usuario = Column(String, unique=True) email = Column(String, unique=True, index=True, nullable=False) senha = Column(String, nullable=False) nome = Column(String) # data_criacao uses server time with timezone and not user time by default data_criacao = Column(DateTime(timezone=True), server_default=func.now()) data_atualizacao = Column(DateTime(timezone=True), onupdate=func.now()) data_nascimento = Column(Date, default=date(year=1990, month=1, day=1)) telefone = Column(String) foto_perfil = Column(String) ativo = Column(Boolean, default=True) superusuario = Column(Boolean, default=False) experiencia_profissional = relationship("ExperienciaProf") experiencia_projetos = relationship("ExperienciaProj") experiencia_academica = relationship("ExperienciaAcad") pessoa_projeto = relationship("PessoaProjeto", back_populates="pessoa") areas = relationship("Area", secondary=PessoaArea) habilidades = relationship("Habilidades", secondary=HabilidadesPessoa) colaborador = Column(Boolean, default=False) idealizador = Column(Boolean, default=False) aliado = Column(Boolean, default=False) def __repr__(self): return f"<Pessoa {self.id}, {self.email}, {self.superusuario}>"
def load_world_bank_health_n_pop(): """Loads the world bank health dataset, slices and a dashboard""" tbl_name = 'wb_health_population' with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f: pdf = pd.read_json(f) pdf.columns = [col.replace('.', '_') for col in pdf.columns] pdf.year = pd.to_datetime(pdf.year) pdf.to_sql(tbl_name, db.engine, if_exists='replace', chunksize=50, dtype={ 'year': DateTime(), 'country_code': String(3), 'country_name': String(255), 'region': String(255), }, index=False) print("Creating table [wb_health_population] reference") tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first() if not tbl: tbl = TBL(table_name=tbl_name) tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md')) tbl.main_dttm_col = 'year' tbl.database = get_or_create_main_db() tbl.filter_select_enabled = True db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() defaults = { "compare_lag": "10", "compare_suffix": "o10Y", "limit": "25", "granularity": "year", "groupby": [], "metric": 'sum__SP_POP_TOTL', "metrics": ["sum__SP_POP_TOTL"], "row_limit": config.get("ROW_LIMIT"), "since": "2014-01-01", "until": "2014-01-02", "where": "", "markup_type": "markdown", "country_fieldtype": "cca3", "secondary_metric": "sum__SP_POP_TOTL", "entity": "country_code", "show_bubbles": True, } print("Creating slices") slices = [ Slice(slice_name="Region Filter", viz_type='filter_box', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(defaults, viz_type='filter_box', groupby=['region', 'country_name'])), Slice(slice_name="World's Population", viz_type='big_number', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(defaults, since='2000', viz_type='big_number', compare_lag="10", metric='sum__SP_POP_TOTL', compare_suffix="over 10Y")), Slice(slice_name="Most Populated Countries", viz_type='table', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(defaults, viz_type='table', metrics=["sum__SP_POP_TOTL"], groupby=['country_name'])), Slice(slice_name="Growth Rate", viz_type='line', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(defaults, viz_type='line', since="1960-01-01", metrics=["sum__SP_POP_TOTL"], num_period_compare="10", groupby=['country_name'])), Slice(slice_name="% Rural", viz_type='world_map', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(defaults, viz_type='world_map', metric="sum__SP_RUR_TOTL_ZS", num_period_compare="10")), Slice(slice_name="Life Expectancy VS Rural %", viz_type='bubble', datasource_type='table', datasource_id=tbl.id, params=get_slice_json( defaults, viz_type='bubble', since="2011-01-01", until="2011-01-02", series="region", limit=0, entity="country_name", x="sum__SP_RUR_TOTL_ZS", y="sum__SP_DYN_LE00_IN", size="sum__SP_POP_TOTL", max_bubble_size="50", filters=[{ "col": "country_code", "val": [ "TCA", "MNP", "DMA", "MHL", "MCO", "SXM", "CYM", "TUV", "IMY", "KNA", "ASM", "ADO", "AMA", "PLW", ], "op": "not in" }], )), Slice(slice_name="Rural Breakdown", viz_type='sunburst', datasource_type='table', datasource_id=tbl.id, params=get_slice_json( defaults, viz_type='sunburst', groupby=["region", "country_name"], secondary_metric="sum__SP_RUR_TOTL", since="2011-01-01", until="2011-01-01", )), Slice(slice_name="World's Pop Growth", viz_type='area', datasource_type='table', datasource_id=tbl.id, params=get_slice_json( defaults, since="1960-01-01", until="now", viz_type='area', groupby=["region"], )), Slice(slice_name="Box plot", viz_type='box_plot', datasource_type='table', datasource_id=tbl.id, params=get_slice_json( defaults, since="1960-01-01", until="now", whisker_options="Min/max (no outliers)", viz_type='box_plot', groupby=["region"], )), Slice(slice_name="Treemap", viz_type='treemap', datasource_type='table', datasource_id=tbl.id, params=get_slice_json( defaults, since="1960-01-01", until="now", viz_type='treemap', metrics=["sum__SP_POP_TOTL"], groupby=["region", "country_code"], )), Slice(slice_name="Parallel Coordinates", viz_type='para', datasource_type='table', datasource_id=tbl.id, params=get_slice_json( defaults, since="2011-01-01", until="2011-01-01", viz_type='para', limit=100, metrics=[ "sum__SP_POP_TOTL", 'sum__SP_RUR_TOTL_ZS', 'sum__SH_DYN_AIDS' ], secondary_metric='sum__SP_POP_TOTL', series="country_name", )), ] misc_dash_slices.append(slices[-1].slice_name) for slc in slices: merge_slice(slc) print("Creating a World's Health Bank dashboard") dash_name = "World's Bank Data" slug = "world_health" dash = db.session.query(Dash).filter_by(slug=slug).first() if not dash: dash = Dash() js = textwrap.dedent("""\ [ { "col": 1, "row": 0, "size_x": 2, "size_y": 2, "slice_id": "1231" }, { "col": 1, "row": 2, "size_x": 2, "size_y": 2, "slice_id": "1232" }, { "col": 10, "row": 0, "size_x": 3, "size_y": 7, "slice_id": "1233" }, { "col": 1, "row": 4, "size_x": 6, "size_y": 3, "slice_id": "1234" }, { "col": 3, "row": 0, "size_x": 7, "size_y": 4, "slice_id": "1235" }, { "col": 5, "row": 7, "size_x": 8, "size_y": 4, "slice_id": "1236" }, { "col": 7, "row": 4, "size_x": 3, "size_y": 3, "slice_id": "1237" }, { "col": 1, "row": 7, "size_x": 4, "size_y": 4, "slice_id": "1238" }, { "col": 9, "row": 11, "size_x": 4, "size_y": 4, "slice_id": "1239" }, { "col": 1, "row": 11, "size_x": 8, "size_y": 4, "slice_id": "1240" } ] """) l = json.loads(js) for i, pos in enumerate(l): pos['slice_id'] = str(slices[i].id) dash.dashboard_title = dash_name dash.position_json = json.dumps(l, indent=4) dash.slug = slug dash.slices = slices[:-1] db.session.merge(dash) db.session.commit()
Column('id', Integer(), primary_key=True, nullable=False), ) volumes = Table( 'volumes', meta, Column('id', Integer(), primary_key=True, nullable=False), ) # # New Tables # certificates = Table( 'certificates', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True, nullable=False), Column( 'user_id', String(length=255, convert_unicode=False, assert_unicode=None, unicode_error=None, _warn_on_bytestring=False)), Column( 'project_id', String(length=255, convert_unicode=False,
class File(db.Model): __tablename__ = "release_files" @declared_attr def __table_args__(cls): # noqa return ( ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ), CheckConstraint("sha256_digest ~* '^[A-F0-9]{64}$'"), CheckConstraint("blake2_256_digest ~* '^[A-F0-9]{64}$'"), Index("release_files_name_version_idx", "name", "version"), Index("release_files_packagetype_idx", "packagetype"), Index("release_files_version_idx", "version"), Index( "release_files_single_sdist", "name", "version", "packagetype", unique=True, postgresql_where=((cls.packagetype == 'sdist') & (cls.allow_multiple_sdist == False) # noqa ), ), ) name = Column(Text) version = Column(Text) python_version = Column(Text) requires_python = Column(Text) packagetype = Column( Enum( "bdist_dmg", "bdist_dumb", "bdist_egg", "bdist_msi", "bdist_rpm", "bdist_wheel", "bdist_wininst", "sdist", ), ) comment_text = Column(Text) filename = Column(Text, unique=True) path = Column(Text, unique=True, nullable=False) size = Column(Integer) has_signature = Column(Boolean) md5_digest = Column(Text, unique=True, nullable=False) sha256_digest = Column(CIText, unique=True, nullable=False) blake2_256_digest = Column(CIText, unique=True, nullable=False) upload_time = Column(DateTime(timezone=False), server_default=func.now()) # We need this column to allow us to handle the currently existing "double" # sdists that exist in our database. Eventually we should try to get rid # of all of them and then remove this column. allow_multiple_sdist = Column( Boolean, nullable=False, server_default=sql.false(), ) # TODO: Once Legacy PyPI is gone, then we should remove this column # completely as we no longer use it. downloads = Column(Integer, server_default=sql.text("0")) @hybrid_property def pgp_path(self): return self.path + ".asc" @pgp_path.expression def pgp_path(self): return func.concat(self.path, ".asc") @validates("requires_python") def validates_requires_python(self, *args, **kwargs): raise RuntimeError("Cannot set File.requires_python")
class Song(Base): __tablename__ = 'song' id = Column(Integer, primary_key=True) title = Column(String(250), nullable=False) has_lyrics = Column(Boolean) lyrics = Column(Text) is_bonus_track = Column(Boolean) duration = Column(Integer) mb_id = Column(Integer) music_brainz_data = Column(JsonEncodedDict) genius_id = Column(Integer, nullable=False) genius_data = Column(JsonEncodedDict) # total_words = Column(Text) word_count = Column(Integer) unique_simple = Column(JsonEncodedDict) unique_complex = Column(JsonEncodedDict) percent_simple = Column(Numeric(4, 2)) percent_complex = Column(Numeric(4, 2)) total_simple = Column(Integer) total_complex = Column(Integer) artist_id = Column(Integer, ForeignKey('artist.id')) artist = relationship(Artist) album_id = Column(Integer, ForeignKey('album.id')) album = relationship(Album) time_created = Column(DateTime(timezone=True), server_default=func.now()) time_updated = Column(DateTime(timezone=True), onupdate=func.now()) @classmethod def find_by_name(cls, session, title, artist_id): obj = session.query(cls).filter_by(title=title, artist_id=artist_id).all() if len(obj) == 0: return None elif len(obj) == 1: return obj[0] else: print('More than one Song found matching the name criteria provided') return obj @classmethod def find_songs_by_album(cls, session, album_id): obj = session.query(cls).filter_by(album_id=album_id).all() if len(obj) == 0: return None else: return obj @classmethod def find_songs_by_artist(cls, session, artist_id): obj = session.query(cls).filter_by(artist_id=artist_id).all() if len(obj) == 0: return None else: return obj @classmethod def remove_songs_by_artist(cls, session: Session, artist_id): songs = session.query(cls).filter_by(artist_id=artist_id).all() if len(songs) == 0: return else: for s in songs: session.delete(s) session.commit()
class Project(SitemapMixin, db.ModelBase): __tablename__ = "packages" __table_args__ = (CheckConstraint( "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'::text", name="packages_valid_name", ), ) __repr__ = make_repr("name") name = Column(Text, primary_key=True, nullable=False) normalized_name = orm.column_property(func.normalize_pep426_name(name)) stable_version = Column(Text) autohide = Column(Boolean, server_default=sql.true()) comments = Column(Boolean, server_default=sql.true()) bugtrack_url = Column(Text) hosting_mode = Column(Text, nullable=False, server_default="pypi-only") created = Column( DateTime(timezone=False), nullable=False, server_default=sql.func.now(), ) has_docs = Column(Boolean) upload_limit = Column(Integer, nullable=True) last_serial = Column(Integer, nullable=False, server_default=sql.text("0")) allow_legacy_files = Column( Boolean, nullable=False, server_default=sql.false(), ) zscore = Column(Float, nullable=True) users = orm.relationship( User, secondary=Role.__table__, backref="projects", ) releases = orm.relationship( "Release", backref="project", cascade="all, delete-orphan", order_by=lambda: Release._pypi_ordering.desc(), ) def __getitem__(self, version): session = orm.object_session(self) try: return (session.query(Release).filter((Release.project == self) & ( Release.version == version)).one()) except NoResultFound: raise KeyError from None def __acl__(self): session = orm.object_session(self) acls = [ (Allow, "group:admins", "admin"), ] # Get all of the users for this project. query = session.query(Role).filter(Role.project == self) query = query.options(orm.lazyload("project")) query = query.options(orm.joinedload("user").lazyload("emails")) for role in sorted( query.all(), key=lambda x: ["Owner", "Maintainer"].index(x.role_name)): if role.role_name == "Owner": acls.append((Allow, str(role.user.id), ["manage", "upload"])) else: acls.append((Allow, str(role.user.id), ["upload"])) return acls @property def documentation_url(self): # TODO: Move this into the database and elimnate the use of the # threadlocal here. request = get_current_request() # If the project doesn't have docs, then we'll just return a None here. if not self.has_docs: return return request.route_url("legacy.docs", project=self.name)
class Album(Base): __tablename__ = 'album' id = Column(Integer, primary_key=True) name = Column(String(250), nullable=False) year = Column(Integer) album_art = Column(JsonEncodedDict) mb_id = Column(String(250)) label = Column(Text) music_brainz_data = Column(JsonEncodedDict) total_words = Column(Text) word_count = Column(Integer) unique_simple = Column(JsonEncodedDict) unique_complex = Column(JsonEncodedDict) percent_simple = Column(Numeric(4, 2)) percent_complex = Column(Numeric(4, 2)) total_simple = Column(Integer) total_complex = Column(Integer) artist_name = Column(Text) artist_id = Column(Integer, ForeignKey('artist.id')) artist = relationship(Artist) time_created = Column(DateTime(timezone=True), server_default=func.now()) time_updated = Column(DateTime(timezone=True), onupdate=func.now()) @classmethod def find_by_name(cls, session, name, artist_id): obj = session.query(cls).filter_by(name=name, artist_id=artist_id).all() if len(obj) == 0: return None elif len(obj) == 1: return obj[0] else: print('More than one Album found matching the criteria provided') return obj @classmethod def get_all_albums(cls, session): obj = session.query(cls).all() if len(obj) == 0: return None else: return obj @classmethod def find_albums_by_artist(cls, session, artist_id): obj = session.query(cls).filter_by(artist_id=artist_id).all() if len(obj) == 0: return None else: return obj @classmethod def remove_album_by_artist(cls, session: Session, artist_id): albums = session.query(cls).filter_by(artist_id=artist_id).all() if len(albums) == 0: return else: for a in albums: session.delete(a) session.commit()
class Bid(Sessionized, Base): __tablename__ = "bids" STATUS = Enum("AUTH", "CHARGE", "REFUND", "VOID") # will be unique from authorize transaction = Column(BigInteger, primary_key=True, autoincrement=False) # identifying characteristics account_id = Column(BigInteger, index=True, nullable=False) pay_id = Column(BigInteger, index=True, nullable=False) thing_id = Column(BigInteger, index=True, nullable=False) # breadcrumbs ip = Column(Inet) date = Column(DateTime(timezone=True), default=safunc.now(), nullable=False) # bid information: bid = Column(Float, nullable=False) charge = Column(Float) status = Column(Integer, nullable=False, default=STATUS.AUTH) # make this a primary key as well so that we can have more than # one freebie per campaign campaign = Column(Integer, default=0, primary_key=True) @classmethod def _new(cls, trans_id, user, pay_id, thing_id, bid, campaign=0): bid = Bid(trans_id, user, pay_id, thing_id, getattr(request, 'ip', '0.0.0.0'), bid=bid, campaign=campaign) bid._commit() return bid # @classmethod # def for_transactions(cls, transids): # transids = filter(lambda x: x != 0, transids) # if transids: # q = cls.query() # q = q.filter(or_(*[cls.transaction == i for i in transids])) # return dict((p.transaction, p) for p in q) # return {} def set_status(self, status): if self.status != status: self.status = status self._commit() def auth(self): self.set_status(self.STATUS.AUTH) def is_auth(self): return (self.status == self.STATUS.AUTH) def void(self): self.set_status(self.STATUS.VOID) def is_void(self): return (self.status == self.STATUS.VOID) def charged(self): self.set_status(self.STATUS.CHARGE) def is_charged(self): """ Returns True if transaction has been charged with authorize.net or is a freebie with "charged" status. """ return (self.status == self.STATUS.CHARGE) def refund(self): self.set_status(self.STATUS.REFUND)
def test_compare_current_timestamp_text(self): self._compare_default_roundtrip( DateTime(), text("TIMEZONE('utc', CURRENT_TIMESTAMP)") )
class DebcheckIssue(Base): ''' Data for a package migration excuse, as emitted by Britney ''' __tablename__ = 'debcheck_issues' uuid = Column(UUID(as_uuid=True), primary_key=True, default=uuid4) time = Column(DateTime(), default=datetime.utcnow) # Time when this excuse was created package_type = Column(Enum(PackageType)) repo_id = Column(Integer, ForeignKey('archive_repositories.id')) repo = relationship('ArchiveRepository') suite_id = Column(Integer, ForeignKey('archive_suites.id', ondelete='cascade')) suite = relationship('ArchiveSuite', backref=backref('debcheck_issues', passive_deletes=True)) architectures = Column(ARRAY(Text()), default=['any']) # Architectures this issue affects, may be a wildcard like "any" or (list of) architecture expressions package_name = Column(String(256)) # Name of the package this issue affects package_version = Column(DebVersion()) # Version of the package this issue affects _missing_json = Column('missing', JSON) # information about missing packages _conflicts_json = Column('conflicts', JSON) # information about conflicts _missing = None _conflicts = None @property def missing(self): if self._missing is not None: return self._missing if not self._missing_json: return [] jlist = json.loads(self._missing_json) schema = PackageIssue() self._missing = [schema.load(d) for d in jlist] return self._missing @missing.setter def missing(self, v): self._missing = None schema = PackageIssue() self._missing_json = json.dumps([schema.dump(e) for e in v]) @property def conflicts(self): if self._conflicts is not None: return self._conflicts if not self._conflicts_json: return [] jlist = json.loads(self._conflicts_json) schema = PackageConflict() self._conflicts = [schema.load(d) for d in jlist] return self._conflicts @conflicts.setter def conflicts(self, v): self._conflicts = None schema = PackageConflict() self._conflicts_json = json.dumps([schema.dump(e) for e in v])
def test_compare_current_timestamp_fn_w_binds(self): self._compare_default_roundtrip( DateTime(), func.timezone("utc", func.current_timestamp()) )
from sqlalchemy.sql import func from . import _core, _sql _LOG = logging.getLogger(__name__) METADATA_TYPE = Table( 'metadata_type', _core.METADATA, Column('id', SmallInteger, primary_key=True, autoincrement=True), Column('name', String, unique=True, nullable=False), Column('definition', postgres.JSONB, nullable=False), # When it was added and by whom. Column('added', DateTime(timezone=True), server_default=func.now(), nullable=False), Column('added_by', _sql.PGNAME, server_default=func.current_user(), nullable=False), # Name must be alphanumeric + underscores. CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'), ) DATASET_TYPE = Table( 'dataset_type', _core.METADATA, Column('id', SmallInteger, primary_key=True, autoincrement=True),
Column('key', String(255), nullable=False), Column('description', String(255), nullable=True), Column('tenant_id', String(36), nullable=False), Column('target_tenant_id', String(36), nullable=True), Column( 'status', Enum(name='resource_statuses', *TASK_STATUSES), nullable=False, server_default='ACTIVE', ), Column('created_at', DateTime()), Column('updated_at', DateTime()), Column('version', Integer(), nullable=False), mysql_engine='INNODB', mysql_charset='utf8') zone_transfer_accepts = Table('zone_transfer_accepts', meta, Column('id', UUID(), primary_key=True), Column('domain_id', UUID, ForeignKey('domains.id'), nullable=False), Column('zone_transfer_request_id', UUID, ForeignKey('zone_transfer_requests.id',
class NginxLog(Base): __tablename__ = 't_nginx_log' id = Column(Integer, primary_key=True, autoincrement=True) dst_ip = Column(String(50)) # 10.255.252.4 request_time = Column(DateTime()) # 16/Mar/2018:20:29:57 +0800 request_line = Column(VARCHAR( 2048)) # GET /zh-CN/baby_credits?baby_id=761756&v=2&page=0 HTTP/1.1 request_method = Column(String(32)) request_url = Column(VARCHAR(1024)) request_url_origin = Column(VARCHAR(1024)) http_version = Column(String(128)) host = Column(String(128)) # api.shiguangxiaowu.cn http_status = Column(String(128)) # 200 size = Column(String(128)) # 1238 referrer = Column( VARCHAR(1024) ) #'https://www.baidu.com/link?url=-YzFM4p2GLR8g_&wd=&eqid=e5c101dd0007cbed0000000 user_agent = Column( VARCHAR(2048) ) # com.liveyap.timehut/5.2.2.1 (android 6.0.1, OPPO A57) (SOURCE/oppostore, VERSION_CODE/255) phone_type = Column(String(16)) phone_os_version = Column(String(32)) phone_model = Column(String(128)) src_ip = Column(String(128)) # 58.53.78.68 backend_address = Column(String(128)) # unix:///tmp/saturn.sock backend_status = Column(String(128)) # 200 backend_time = Column(String(128)) # 0.077 response_time = Column(String(128)) # 0.077 phone_brand = Column(String(32)) create_time = Column(DateTime()) phone_os_type = Column(String(16)) def __init__(self, dst_ip, request_time, request_line, request_method, request_url, http_version, host, http_status, size, referrer, user_agent, phone_type, src_ip, backend_address, backend_status, backend_time, response_time, phone_os_version, phone_model, request_url_origin, phone_brand, phone_os_type): self.dst_ip = dst_ip self.request_time = request_time self.request_line = request_line self.request_method = request_method self.request_url = request_url self.http_version = http_version self.host = host self.http_status = http_status self.size = size self.referrer = referrer self.user_agent = user_agent self.phone_type = phone_type self.src_ip = src_ip self.backend_address = backend_address self.backend_status = backend_status self.backend_time = backend_time self.response_time = response_time self.phone_os_version = phone_os_version self.phone_model = phone_model self.request_url_origin = request_url_origin self.phone_brand = phone_brand self.phone_os_type = phone_os_type self.create_time = datetime.datetime.now()
class AccountCreateOperation(Base): """ Steem Blockchain Example ====================== { "creator": "hello", "json_metadata": "{}", "owner": { "key_auths": [ [ "STM8MN3FNBa8WbEpxz3wGL3L1mkt6sGnncH8iuto7r8Wa3T9NSSGT", 1 ] ], "account_auths": [], "weight_threshold": 1 }, "memo_key": "STM6Gkj27XMkoGsr4zwEvkjNhh4dykbXmPFzHhT8g86jWsqu3U38X", "fee": "0.000 STEEM", "active": { "key_auths": [ [ "STM8HCf7QLUexogEviN8x1SpKRhFwg2sc8LrWuJqv7QsmWrua6ZyR", 1 ] ], "account_auths": [], "weight_threshold": 1 }, "posting": { "key_auths": [ [ "STM8EhGWcEuQ2pqCKkGHnbmcTNpWYZDjGTT7ketVBp4gUStDr2brz", 1 ] ], "account_auths": [], "weight_threshold": 1 }, "new_account_name": "fabian" } """ __tablename__ = 'sbds_op_account_creates' __table_args__ = ( PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'), ForeignKeyConstraint(['creator'], ['sbds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True), ForeignKeyConstraint(['new_account_name'], ['sbds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True),) block_num = Column(Integer, nullable=False, index=True) transaction_num = Column(SmallInteger, nullable=False, index=True) operation_num = Column(SmallInteger, nullable=False, index=True) trx_id = Column(String(40),nullable=False) timestamp = Column(DateTime(timezone=False)) fee = Column(Numeric(20,6), nullable=False) # steem_type:asset fee_symbol = Column(String(5)) # steem_type:asset creator = Column(String(16)) # steem_type:account_name_type new_account_name = Column(String(16)) # steem_type:account_name_type owner = Column(JSONB) # steem_type:authority active = Column(JSONB) # steem_type:authority posting = Column(JSONB) # steem_type:authority memo_key = Column(String(60), nullable=False) # steem_type:public_key_type json_metadata = Column(JSONB) # name:json_metadata operation_type = Column(operation_types_enum,nullable=False,index=True,default='account_create') _fields = dict( fee=lambda x: amount_field(x.get('fee'), num_func=float), # steem_type:asset fee_symbol=lambda x: amount_symbol_field(x.get('fee')), # steem_type:asset owner=lambda x:json_string_field(x.get('owner')), # steem_type:authority active=lambda x: json_string_field(x.get('active')), # name:active posting=lambda x: json_string_field(x.get('posting')), # name:posting json_metadata=lambda x: json_string_field(x.get('json_metadata')), # name:json_metadata ) _account_fields = frozenset(['creator','new_account_name',]) def dump(self): return dissoc(self.__dict__, '_sa_instance_state') def to_dict(self, decode_json=True): data_dict = self.dump() if isinstance(data_dict.get('json_metadata'), str) and decode_json: data_dict['json_metadata'] = sbds.sbds_json.loads( data_dict['json_metadata']) return data_dict def to_json(self): data_dict = self.to_dict() return sbds.sbds_json.dumps(data_dict) def __repr__(self): return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % ( self.__class__.__name__, self.block_num, self.transaction_num, self.operation_num, tuple(self.dump().keys())) def __str__(self): return str(self.dump())
Warehouse.metadata, Column("name", UnicodeText(), primary_key=True, nullable=False), Column("stable_version", UnicodeText()), Column("normalized_name", UnicodeText()), Column("autohide", Boolean(), server_default=sql.true()), Column("comments", Boolean(), server_default=sql.true()), Column("bugtrack_url", UnicodeText()), Column( "hosting_mode", UnicodeText(), nullable=False, server_default="pypi-explicit", ), Column( "created", DateTime(), nullable=False, server_default=sql.func.now(), ), # Validate that packages begin and end with an alpha numeric and contain # only alpha numeric, ., _, and -. CheckConstraint( "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'", name="packages_valid_name", ), ) releases = Table( "releases", Warehouse.metadata,
class Release(db.ModelBase): __tablename__ = "releases" @declared_attr def __table_args__(cls): # noqa return ( Index("release_created_idx", cls.created.desc()), Index("release_name_created_idx", cls.name, cls.created.desc()), Index("release_name_idx", cls.name), Index("release_pypi_hidden_idx", cls._pypi_hidden), Index("release_version_idx", cls.version), ) __repr__ = make_repr("name", "version") name = Column( Text, ForeignKey("packages.name", onupdate="CASCADE"), primary_key=True, ) version = Column(Text, primary_key=True) is_prerelease = orm.column_property(func.pep440_is_prerelease(version)) author = Column(Text) author_email = Column(Text) maintainer = Column(Text) maintainer_email = Column(Text) home_page = Column(Text) license = Column(Text) summary = Column(Text) description = Column(Text) keywords = Column(Text) platform = Column(Text) download_url = Column(Text) _pypi_ordering = Column(Integer) _pypi_hidden = Column(Boolean) cheesecake_installability_id = Column( Integer, ForeignKey("cheesecake_main_indices.id"), ) cheesecake_documentation_id = Column( Integer, ForeignKey("cheesecake_main_indices.id"), ) cheesecake_code_kwalitee_id = Column( Integer, ForeignKey("cheesecake_main_indices.id"), ) requires_python = Column(Text) description_from_readme = Column(Boolean) created = Column( DateTime(timezone=False), nullable=False, server_default=sql.func.now(), ) _classifiers = orm.relationship( Classifier, backref="project_releases", secondary=lambda: release_classifiers, order_by=Classifier.classifier, ) classifiers = association_proxy("_classifiers", "classifier") files = orm.relationship( "File", backref="release", cascade="all, delete-orphan", lazy="dynamic", order_by=lambda: File.filename, ) dependencies = orm.relationship("Dependency") _requires = _dependency_relation(DependencyKind.requires) requires = association_proxy("_requires", "specifier") _provides = _dependency_relation(DependencyKind.provides) provides = association_proxy("_provides", "specifier") _obsoletes = _dependency_relation(DependencyKind.obsoletes) obsoletes = association_proxy("_obsoletes", "specifier") _requires_dist = _dependency_relation(DependencyKind.requires_dist) requires_dist = association_proxy("_requires_dist", "specifier") _provides_dist = _dependency_relation(DependencyKind.provides_dist) provides_dist = association_proxy("_provides_dist", "specifier") _obsoletes_dist = _dependency_relation(DependencyKind.obsoletes_dist) obsoletes_dist = association_proxy("_obsoletes_dist", "specifier") _requires_external = _dependency_relation(DependencyKind.requires_external) requires_external = association_proxy("_requires_external", "specifier") _project_urls = _dependency_relation(DependencyKind.project_url) project_urls = association_proxy("_project_urls", "specifier") uploader = orm.relationship( "User", secondary=lambda: JournalEntry.__table__, primaryjoin=lambda: ((JournalEntry.name == orm.foreign(Release.name)) & (JournalEntry.version == orm.foreign(Release.version)) & (JournalEntry.action == "new release")), secondaryjoin=lambda: ( (User.username == orm.foreign(JournalEntry._submitted_by))), order_by=lambda: JournalEntry.submitted_date.desc(), # TODO: We have uselist=False here which raises a warning because # multiple items were returned. This should only be temporary because # we should add a nullable FK to JournalEntry so we don't need to rely # on ordering and implicitly selecting the first object to make this # happen, uselist=False, viewonly=True, ) @property def urls(self): _urls = OrderedDict() if self.home_page: _urls["Homepage"] = self.home_page for urlspec in self.project_urls: name, url = [x.strip() for x in urlspec.split(",", 1)] _urls[name] = url if self.download_url and "Download" not in _urls: _urls["Download"] = self.download_url return _urls @property def has_meta(self): return any([ self.license, self.keywords, self.author, self.author_email, self.maintainer, self.maintainer_email, self.requires_python ])
class LiquidityRewardVirtualOperation(Base): """ dPay Blockchain Example ====================== """ __tablename__ = 'dpds_op_virtual_liquidity_rewards' __table_args__ = (ForeignKeyConstraint(['owner'], ['dpds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True), ) id = Column(Integer, primary_key=True) block_num = Column(Integer, nullable=False, index=True) transaction_num = Column(SmallInteger, nullable=False, index=True) operation_num = Column(SmallInteger, nullable=False, index=True) trx_id = Column(String(40), nullable=False) timestamp = Column(DateTime(timezone=False)) owner = Column(String(16)) # dpay_type:account_name_type payout = Column(Numeric(20, 6), nullable=False) # dpay_type:asset payout_symbol = Column(String(5)) # dpay_type:asset operation_type = Column(operation_types_enum, nullable=False, index=True, default='liquidity_reward') _fields = dict( payout=lambda x: amount_field(x.get('payout'), num_func=float ), # dpay_type:asset payout_symbol=lambda x: amount_symbol_field(x.get('payout') ), # dpay_type:asset ) _account_fields = frozenset([ 'owner', ]) def dump(self): return dissoc(self.__dict__, '_sa_instance_state') def to_dict(self, decode_json=True): data_dict = self.dump() if isinstance(data_dict.get('json_metadata'), str) and decode_json: data_dict['json_metadata'] = dpds.dpds_json.loads( data_dict['json_metadata']) return data_dict def to_json(self): data_dict = self.to_dict() return dpds.dpds_json.dumps(data_dict) def __repr__(self): return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % ( self.__class__.__name__, self.block_num, self.transaction_num, self.operation_num, tuple(self.dump().keys())) def __str__(self): return str(self.dump())
class TimePeriod(ExtMixin, Base): """Object representing a block of time in a calendar.""" startdatetime = Column(DateTime(timezone=True)) enddatetime = Column(DateTime(timezone=True)) description = Column(String(128)) task_id = Column(Integer, ForeignKey('task.id')) task = relationship("Task", back_populates="timeperiods") def __init__(self, start, end): if start.tzinfo: self.startdatetime = start.astimezone(pytz.utc) else: self.startdatetime = start if end.tzinfo: self.enddatetime = end.astimezone(pytz.utc) else: self.enddatetime = end @property def available(self): """ Is time period available for assignment.""" return not self.task @property def duration(self): """ Duration of time period in minutes. """ try: duration = ceil( (self.enddatetime - self.startdatetime) .total_seconds() / 60.0 ) except AttributeError: # fix for Python 2.6 td = self.enddatetime - self.startdatetime # total_seconds() doesn't exist - use this instead duration = ( td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6 ) / 10**6 duration = ceil(duration / 60.0) return duration @classmethod def unassigned_in_range(cls, startdate, enddate): """ Get first unassigned time period in the supplied date range. Ordered by startdatetime. """ return session.query(cls).filter(cls.task_id == None) \ .filter(cls.startdatetime >= startdate) \ .filter(cls.enddatetime <= enddate) \ .order_by(cls.startdatetime).first() @classmethod def get_assigned(cls): """ Get all assigned time periods.""" return session.query(cls).filter(cls.task_id != None).all() def as_event(self): """ Output the time period in a dict format that can be easily added as an event to Google calendar.""" if self.task: summary = self.task.taskref desc = self.task.description else: summary = None desc = self.description return { 'summary': summary, 'description': desc, 'start': { 'dateTime': self.startdatetime.isoformat(), 'timeZone': pytz.utc.zone }, 'end': { 'dateTime': self.enddatetime.isoformat(), 'timeZone': pytz.utc.zone } }