class TaskMixin(object): args = Column(JSON()) celeryUuid = Column(UUID(as_uuid=True), index=True, nullable=False) creationTime = Column(DateTime(), nullable=False, server_default=func.now()) hostname = Column(String(64)) isEager = Column(Boolean()) kwargs = Column(JSON()) name = Column(String(256), nullable=False) queue = Column(String(64)) planificationTime = Column(DateTime()) result = Column(JSON()) state = Column(Enum(TaskState), nullable=False) startTime = Column(DateTime()) stopTime = Column(DateTime()) traceback = Column(Text())
class Fixture(db.Model, JsonSerializableMixin): __tablename__ = 'fixtures' id = db.Column(db.Integer(), primary_key=True) name = db.Column(db.Unicode(64), unique=True, nullable=False) intro = db.Column(db.UnicodeText(), nullable=False) profile = db.Column(JSON()) attachment = db.Column(JSON) assets = db.Column(JSON()) cover = db.Column(db.Unicode(256), nullable=False) create_at = db.Column(db.DateTime(), default=datetime.datetime.utcnow) update_at = db.Column(db.DateTime(), nullable=True) _tags = db.relationship('FixtureTag', passive_deletes=True, cascade="all, delete-orphan") _patterns = db.relationship('FixturePattern', passive_deletes=True, cascade="all, delete-orphan") @property def tags(self): tag_ids = db.session.query(FixtureTag).options(FromCache('model', 'fixture:%s:tag_ids' % self.id)). \ with_entities(FixtureTag.tag_id).filter(FixtureTag.fixture_id == self.id).all() tag_model = get_model('Tag') return [tag_model.from_cache_by_id(tag_id) for (tag_id,) in tag_ids] @property def patterns(self): pattern_ids = db.session.query(FixturePattern).options(FromCache('model', 'fixture:%s:pattern_ids' % self.id)). \ with_entities(FixturePattern.pattern_id).filter(FixturePattern.fixture_id == self.id).all() pattern_model = get_model('Pattern') return [pattern_model.from_cache_by_id(pattern_id) for (pattern_id,) in pattern_ids] @property def download_times(self): return db.session.query(FixtureDownload).with_entities(FixtureDownload.number_of_times). \ filter(FixtureDownload.fixture_id == self.id).scalar() def increase_download(self): FixtureDownload.query.filter(FixtureDownload.fixture_id == self.id). \ update({FixtureDownload.number_of_times: FixtureDownload.number_of_times + 1}, synchronize_session=False) @classmethod def from_cache_by_id(cls, fixture_id): return Fixture.query.options(FromCache('model', 'fixture:%s' % fixture_id)). \ filter(Fixture.id == fixture_id).first() def __eq__(self, other): if isinstance(other, Fixture) and other.name == self.name: return True else: return False def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(self.name) def __repr__(self): return '<Fixture<id=%d>>' % self.id
def test_json_type(self): if config.requirements.sqlalchemy_110.enabled: eq_ignore_whitespace( autogenerate.render._repr_type(JSON(), self.autogen_context), "postgresql.JSON(astext_type=sa.Text())") else: eq_ignore_whitespace( autogenerate.render._repr_type(JSON(), self.autogen_context), "postgresql.JSON()")
class RecipeData(Base): __tablename__ = 'recipe_data' id = Column(Integer(), primary_key=True) message = Column(String(255)) created_at = Column(TIMESTAMP, default=datetime.utcnow, nullable=False) ingredients = Column(JSON()) steps = Column(JSON()) parent_id = Column(Integer(), ForeignKey('recipe_data.id')) children = relationship('RecipeData', backref= backref('parent', lazy='joined', remote_side=[id]))
class Users(Model): __tablename__ = "users" identity = db.Column(db.String(), nullable=False, index=True, unique=True) claim = db.Column(JSON(), nullable=True) groups = relationship("Groups", back_populates="user")
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'intents', sa.Column('patterns', sa.ARRAY(JSON(astext_type=sa.Text())), nullable=True))
class SessionOrmModel(OrmModelBase): __tablename__ = 'session' session_id = Column( String(32), primary_key=True) # Length of a uuid4 with the - stripped data = Column(JSON(), nullable=False) expires = Column(BigInteger(), nullable=False, index=True)
class Queue(Base): __tablename__ = 'queue' id = Column(BIGINT(), primary_key=True, nullable=False) queue_pkey = PrimaryKeyConstraint('id') enqueued_at = Column(TIMESTAMP(timezone=True), server_default=text('now()'), autoincrement=False, nullable=False) dequeued_at = Column(TIMESTAMP(timezone=True), autoincrement=False, nullable=True) expected_at = Column(TIMESTAMP(timezone=True), autoincrement=False, nullable=True) schedule_at = Column(TIMESTAMP(timezone=True), autoincrement=False, nullable=True) q_name = Column(TEXT(), CheckConstraint('length(q_name) > 0', name='queue_q_name_check'), autoincrement=False, nullable=False) data = Column(JSON(astext_type=Text()), autoincrement=False, nullable=False) __table_args__ = (Index('priority_idx', "schedule_at", "expected_at"), )
class Article(ApiHandler, Model, HasExternalThumbUrlMixin, HasSharesMixin, HasThumbMixin, SoftDeletableMixin, VersionedMixin): authors = Column(Text()) isReviewable = Column(Boolean()) isValidatedAsPeerPublication = Column(Boolean(), nullable=False, default=False) publishedDate = Column(DateTime()) source = Column(JSON()) summary = Column(Text()) tags = Column(Text()) theme = Column(String(140)) title = Column(String(140)) url = Column(String(220), nullable=False, unique=True) def get_score(self): amount = 0 if self.tags and 'PeerVerified' in self.tags: amount -= 10 return amount
def upgrade(): # We want to avoid an error trying to use the uuid_generate_v4(), so we have to install this extension # Example Error: sqlalchemy.exc.ProgrammingError: (psycopg2.ProgrammingError) function uuid_generate_v4() does not exist op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') op.create_table( 'users', sa.Column('created_at', sa.DateTime(timezone=True), server_default=func.now(), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=func.now(), nullable=False), sa.Column('id', UUID(), nullable=False, server_default=func.uuid_generate_v4()), sa.Column('email', sa.String(), nullable=False), sa.Column('password', sa.String(), nullable=True), sa.Column('meta', JSON(), nullable=True), sa.Column('active', sa.Boolean(), nullable=False), sa.Column('is_system_user', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'), ) op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False) op.create_index(op.f('ix_users_updated_at'), 'users', ['updated_at'], unique=False) op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) op.create_index(op.f('ix_users_active'), 'users', ['active'], unique=False)
def get_table(name): name = clean_table_name(name) table = metadata.tables.get(name, None) if table is None: table = sqlalchemy.Table( name, metadata, sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True), sqlalchemy.Column('content_type', sqlalchemy.String(length=255)), sqlalchemy.Column('object_id', sqlalchemy.Text()), sqlalchemy.Column('title', sqlalchemy.String(length=255)), sqlalchemy.Column('body', TsVector()), sqlalchemy.Column('data', JSON(), nullable=False), sqlalchemy.Column('geom_point', Geometry(geometry_type='MULTIPOINT', srid=4326)), sqlalchemy.Column('geom_linestring', Geometry(geometry_type='MULTIPOINT', srid=4326)), sqlalchemy.Column('geom_polygon', Geometry(geometry_type='MULTIPOINT', srid=4326)), sqlalchemy.Index('%s_body_idx' % name, 'body', postgresql_using='gin'), sqlalchemy.Index('%s_geom_point_idx' % name, 'geom_point', postgresql_using='gist'), sqlalchemy.Index('%s_geom_linestring_idx' % name, 'geom_linestring', postgresql_using='gist'), sqlalchemy.Index('%s_geom_polygon_idx' % name, 'geom_polygon', postgresql_using='gist'), ) return table
class Pattern(db.Model, JsonSerializableMixin): __tablename__ = 'patterns' id = db.Column(db.Integer(), primary_key=True) name = db.Column(db.Unicode(), unique=True, nullable=False) intro = db.Column(db.UnicodeText(), nullable=True) profile = db.Column(db.Unicode(256), nullable=False) references = db.Column(JSON()) attachment = db.Column(JSON()) assets = db.Column(JSON()) @classmethod def from_cache_by_id(cls, pattern_id): return Pattern.query.options(FromCache('model', 'pattern:%s' % pattern_id)). \ filter(Pattern.id == pattern_id).first() @classmethod def id_with_names(cls): return [ dict(id=id, name=name) for (id, name) in Pattern.query. options(FromCache('model', 'pattern:id_with_names')).with_entities( Pattern.id, Pattern.name).order_by(Pattern.name.asc()).all() ] @property def download_times(self): return db.session.query(PatternDownload).with_entities(PatternDownload.number_of_times). \ filter(PatternDownload.fixture_id == self.id).scalar() def increase_download(self): PatternDownload.query.filter(PatternDownload.pattern_id == self.id). \ update({PatternDownload.number_of_times: PatternDownload.number_of_times + 1}, synchronize_session=False) def __eq__(self, other): if isinstance(other, Pattern) and other.name == self.name: return True else: return False def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(self.name) def __repr__(self): return '<Pattern<id=%d>>' % self.id
class Comment(Base): id = Column(UUID(as_uuid=True), default=uuid4, primary_key=True) author_id = Column(UUID(as_uuid=True), nullable=False) target_id = Column(UUID(as_uuid=True), nullable=False, ) created_at = Column(UtcDateTime(), nullable=False, default=utcnow()) comment = Column(JSON(), nullable=False) __tablename__ = 'comments'
class Pub(Base): __tablename__ = 'pubs' id = Column('id', Integer, primary_key=True) scopus_id = Column('scopus_id', String(32), index=True) doctype = Column('doctype', String(8)) pub_date = Column('pub_date', Date()) cited_count = Column('cited_count', Integer(), default=0) data = Column('data', JSON())
class Check(db.Model): id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid4) app_id = db.Column(UUID(as_uuid=True), db.ForeignKey('app.id')) url = db.Column(db.String(256)) dynotype = db.Column(db.String(64)) params = db.Column(JSON(), default={}) def __repr__(self): return '<Check {}/{} {}>'.format(self.app.name, self.dynotype, self.url)
def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': # Use the native JSON type. if has_postgres_json: return dialect.type_descriptor(JSON()) else: return dialect.type_descriptor(PostgresJSONType()) else: return dialect.type_descriptor(self.impl)
def upgrade(op): task_state = sa.Enum('CREATED', 'FAILED', 'PUBLISHED', 'RECEIVED', 'RERUNNED', 'STARTED', 'STOPPED', 'SUCCEED', name='taskstate') op.create_table('task', sa.Column('args', JSON()), sa.Column('celeryUuid', UUID(as_uuid=True), index=True, nullable=False), sa.Column('creationTime', sa.DateTime(), nullable=False, server_default=func.now()), sa.Column('hostname', sa.String(64)), sa.Column('isEager', sa.Boolean()), sa.Column('kwargs', JSON()), sa.Column('name', sa.String(256), nullable=False), sa.Column('queue', sa.String(64)), sa.Column('planificationTime', sa.DateTime()), sa.Column('result', JSON()), sa.Column('state', sa.Enum(TaskState), nullable=False), sa.Column('startTime', sa.DateTime()), sa.Column('stopTime', sa.DateTime()), sa.Column('traceback', sa.Text()))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('feed_post_meta', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('contentid', sa.Text(), nullable=False), sa.Column('meta', JSON(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_feed_post_meta_contentid'), 'feed_post_meta', ['contentid'], unique=True)
class VerificationOrmModel(OrmModelBase): __tablename__ = 'verification' verification_id = Column( String(32), primary_key=True) # Length of a uuid4 with the - stripped ip4 = Column(BigInteger(), nullable=False, index=True) expires = Column(BigInteger(), nullable=False, index=True) # TODO: remove data = Column(JSON(), nullable=False)
class user_info_table(_db.Model): id = _db.Column(_db.Integer, primary_key=True) name = _db.Column(_db.String(64), nullable=False) account = _db.Column(_db.String(128), unique=True, nullable=False) passwordHash = _db.Column(_db.String(128), nullable=False) # sha512 salt = _db.Column(_db.String(32), nullable=False) socialInfo = _db.Column(JSON(), nullable=False) verifyAccount = _db.Column(_db.Boolean, default=False) followersCount = _db.Column(BIGINT, default=0) userImage = _db.Column(JSON(), nullable=True) createDT = _db.Column(_db.DateTime, nullable=False, default=datetime.utcnow) # relation rs_posts_dy = _db.relationship('articles_table', backref=_db.backref('rs_author', lazy=True), lazy='dynamic') rs_likes_dy = _db.relationship('post_likes_table', lazy='dynamic') rs_comments_dy = _db.relationship('post_comments_table', lazy='dynamic') rs_follows_dy = _db.relationship('follows_table', lazy='dynamic')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('comhealth_customer_info_items', 'order', existing_type=sa.INTEGER(), nullable=True) op.alter_column( 'comhealth_customer_info', 'data', existing_type=JSONB(), type_=JSON(), )
class Transaction(db.Model): Status = TransactionStatus id = db.Column(db.Integer(), primary_key=True) uuid = db.Column(UUID(as_uuid=True), index=True, default=uuid4) created_at = db.Column(db.DateTime(), default=datetime.utcnow) updated_at = db.Column(db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow) properties = db.Column(JSON()) customer = db.relationship('Customer', backref=db.backref('transactions')) customer_id = db.Column(db.Integer(), db.ForeignKey('customer.id')) vendor = db.relationship('Vendor', backref=db.backref('transactions')) vendor_id = db.Column(db.Integer(), db.ForeignKey('vendor.id'), nullable=False) status = db.Column(db.Enum(TransactionStatus), default=TransactionStatus.OPEN, nullable=False) products = association_proxy('gradient_prices', 'product') @property def total(self): return sum(gp.price for gp in self.gradient_prices) def add_product(self, product, price, max_price, min_price): self.gradient_prices \ .append(GradientPrice(product=product, price=price, max_price=max_price, min_price=min_price)) @property def key(self): ''' Generates an encrypted key based off of this transaction's UUID ''' return f.encrypt(str(self.uuid).encode('utf8')).decode('utf8') def validate_key(self, key): ''' Asserts that the supplied key, when decrypted, matches this transaction's UUID ''' return f.decrypt(key.encode('utf8')).decode('utf8') == str(self.uuid)
class post_comments_table(_db.Model): id = _db.Column(BIGINT(), primary_key=True, nullable=False) post_id = _db.Column(_db.Integer, _db.ForeignKey('articles_table.id'), nullable=False) user_id = _db.Column(_db.Integer, _db.ForeignKey('user_info_table.id'), nullable=False) replyTo = _db.Column(BIGINT) content = _db.Column(JSON(), nullable=False) commentDT = _db.Column(_db.DateTime, nullable=False, default=datetime.utcnow)
class IntegrationBase(Base, BaseModel): """ Model from which all integrations inherit using polymorphic approach """ __tablename__ = "integrations" front_visible = False as_alert_channel = False supports_report_alerting = False id = sa.Column(sa.Integer, primary_key=True) resource_id = sa.Column(sa.Integer, sa.ForeignKey("applications.resource_id")) integration_name = sa.Column(sa.Unicode(64)) _config = sa.Column("config", JSON(), nullable=False, default="") modified_date = sa.Column(sa.DateTime) channel = sa.orm.relationship( "AlertChannel", cascade="all,delete-orphan", passive_deletes=True, passive_updates=True, uselist=False, backref="integration", ) __mapper_args__ = { "polymorphic_on": "integration_name", "polymorphic_identity": "integration", } @classmethod def by_app_id_and_integration_name( cls, resource_id, integration_name, db_session=None ): db_session = get_db_session(db_session) query = db_session.query(cls) query = query.filter(cls.integration_name == integration_name) query = query.filter(cls.resource_id == resource_id) return query.first() @hybrid_property def config(self): return decrypt_dictionary_keys(self._config) @config.setter def config(self, value): if not hasattr(value, "items"): raise Exception("IntegrationBase.config only accepts " "flat dictionaries") self._config = encrypt_dictionary_keys(value)
class Dashboard(Resource): __tablename__ = "ae_charting_ee_dashboards" __mapper_args__ = {"polymorphic_identity": "dashboard"} # lists configurable possible permissions for this resource type __possible_permissions__ = ("view", "update") resource_id = sa.Column( sa.Integer(), sa.ForeignKey("resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"), primary_key=True, ) uuid = sa.Column(sa.String(), default=gen_uuid) public = sa.Column(sa.Boolean(), default=False, nullable=False) description = sa.Column(sa.UnicodeText, nullable=False, default="") layout_config = sa.Column(JSON(), default=[], nullable=False) charts = sa.orm.relationship( "DashboardChart", cascade="all, delete-orphan", passive_deletes=True, passive_updates=True, lazy="dynamic", ) def get_dict(self, *args, **kwargs): request = kwargs.pop("request", None) result = super(Dashboard, self).get_dict(*args, **kwargs) result["chart_config"] = {} result["public_url"] = self.get_public_url(request=request) for chart in self.charts: chart.migrate_json_config() result["chart_config"][chart.uuid] = chart.get_dict() return result def get_public_url(self, request=None, _app_url=None): """ Returns url that user can use to visit specific report """ if not request: request = get_current_request() url = request.route_url("/", _app_url=_app_url) return (url + "#/dashboard/{}").format(self.uuid) def get_chart(self, uuid): return self.charts.filter(DashboardChart.uuid == uuid).first()
class State(Base): __tablename__ = 'vld_state' class StateKey(enum.Enum): sequence_number = 'sequence_number' key = Column(Enum(StateKey, name='vld_statekey_enum'), primary_key=True) value = Column(JSON(), nullable=True) def __init__(self, key, value): self.key = key self.value = value def __repr__(self): return '<State: {} {}>'.format(self.key, self.value)
def getTable(self, table_name): """sqlaclhemy table. autogenerated from engine""" # probably inefficent to generate the table every time! # maybe add to dict # N.B. for testing w/pytest-pgsql + transacted + pyesql-helper # engine might be somewhere else fix_columns = () if table_name == 'visit_summary': fix_columns = (sqla.Column('notes', JSON(none_as_null=True)), ) tbl = sqla.Table(table_name, self.sqlmeta, *fix_columns, extend_existing=True, autoload=True, autoload_with=self.engine) return (tbl)
class WikiMap(BASE): __tablename__ = "maps" id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) title = Column(String(200), primary_key=True, index=True) json_data = Column(JSON()) levels = Column(SmallInteger()) lpp = Column(SmallInteger()) def __repr__(self): msg = "<WikiMap: (\n\t" msg += "id={},\n\t".format(self.id) msg += "title='{}',\n\t".format(self.title) msg += "json_data - not displayed,\n\t" msg += "levels={},\n\t".format(self.levels) msg += "lpp (links per page)={}\n".format(self.lpp) msg += ")>" return msg
class Layer(db.Model): __tablename__ = 'layers' id = db.Column(db.Integer, primary_key=True) created_at = db.Column(db.DateTime(), default=func.now()) service_id = db.Column(db.Integer, db.ForeignKey('services.id')) layer_data = db.Column(JSON()) name = db.Column(db.String()) service = db.relationship( Service, backref=db.backref( 'layers', lazy='dynamic', cascade='delete,all', ) )
class Metric(Base, BaseModel): __tablename__ = "metrics" __table_args__ = {"implicit_returning": False} pkey = sa.Column(sa.BigInteger(), primary_key=True) resource_id = sa.Column( sa.Integer(), sa.ForeignKey("applications.resource_id"), nullable=False, primary_key=True, ) timestamp = sa.Column(sa.DateTime(), default=datetime.utcnow, server_default=sa.func.now()) tags = sa.Column(JSON(), default={}) namespace = sa.Column(sa.Unicode(255)) @property def partition_id(self): return "rcae_m_%s" % self.timestamp.strftime("%Y_%m_%d") def es_doc(self): tags = {} tag_list = [] for name, value in self.tags.items(): # replace dot in indexed tag name name = name.replace(".", "_") tag_list.append(name) tags[name] = { "values": convert_es_type(value), "numeric_values": value if (isinstance(value, (int, float)) and not isinstance(value, bool)) else None, } return { "metric_id": self.pkey, "resource_id": self.resource_id, "timestamp": self.timestamp, "namespace": self.namespace, "tags": tags, "tag_list": tag_list, }
def __init__(self, none_as_null = False): JSON.__init__(self, none_as_null = none_as_null)