class Statistics(db.Model): __tablename__ = 'statistics' id = Column(db.Integer, primary_key=True) update_at = Column(db.DateTime, nullable=False, default=get_current_time) managed_nodes = Column(db.Integer, nullable=False, default=0) system_capacity = Column(db.Integer, nullable=False, default=0) system_utilization = Column(JSONType(10000)) user_count = Column(db.Integer, nullable=False, default=0.0) registered_master = Column(db.Integer, nullable=False, default=0) total_task = Column(db.Integer, nullable=False, default=0) service_level = Column(JSONType(10000), nullable=False, default='') uptime = Column(db.Integer, nullable=False, default=0) page_visit_count = Column(db.Integer, nullable=False, default=0) api_visit_count = Column(db.Integer, nullable=False, default=0) @staticmethod def update(cls): pass @classmethod def get_count(cls): count_q = cls.query.statement.with_only_columns([func.count() ]).order_by(None) count = db.session.execute(count_q).scalar() return count
class OAIProvider(db.Model): __tablename__ = "oarepo_oai_provider" id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(16), nullable=False, unique=True) description = db.Column(db.String(2048), nullable=True) oai_endpoint = db.Column(db.String(2048), nullable=False) set_ = db.Column(db.String(256), name="set") metadata_prefix = db.Column(db.String(32), default="oai_dc") constant_fields = db.Column(db.JSON().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ).with_variant( JSONType(), 'mysql', ), default=lambda: dict(), nullable=True) def get_parsers(self): return registry.parsers.get(self.code) or {} def get_rules(self, parser_name): return registry.rules.get(parser_name)
def upgrade(): op.create_table('user', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('username', sa.String(length=32), nullable=False), sa.Column('email', sa.String(length=64), nullable=False), sa.Column('_password_hash', sa.Binary(), nullable=False), sa.Column('created_at', ArrowType(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) op.create_table('device', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('uuid', UUIDType(), nullable=False), sa.Column('name', sa.String(length=128), nullable=True), sa.Column('last_address', sa.String(length=15), nullable=True), sa.Column('registered_at', ArrowType(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_device_id'), 'device', ['id'], unique=False) op.create_index(op.f('ix_device_last_address'), 'device', ['last_address'], unique=False) op.create_index(op.f('ix_device_name'), 'device', ['name'], unique=False) op.create_index(op.f('ix_device_registered_at'), 'device', ['registered_at'], unique=False) op.create_index(op.f('ix_device_uuid'), 'device', ['uuid'], unique=True) op.create_table('script', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('name', sa.String(length=64), nullable=False), sa.Column('tag', sa.String(length=64), nullable=False), sa.Column('calls', sa.Integer(), nullable=True), sa.Column('runtime', sa.Integer(), nullable=True), sa.Column('created_at', ArrowType(), nullable=True), sa.Column('updated_at', ArrowType(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_script_id'), 'script', ['id'], unique=False) op.create_table('device_log', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('log', JSONType(), nullable=False), sa.Column('created_at', ArrowType(), nullable=True), sa.Column('device_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['device_id'], ['device.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id') ) op.create_index(op.f('ix_device_log_created_at'), 'device_log', ['created_at'], unique=False) op.create_table('device_task', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('device_id', sa.Integer(), nullable=False), sa.Column('task', JSONType(), nullable=False), sa.Column('created_at', ArrowType(), nullable=True), sa.ForeignKeyConstraint(['device_id'], ['device.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id') )
def __init__(self, db_uri='sqlite:///immobilien.db', debug=False): """constructor """ self.debug = debug self.db_uri = db_uri self.engine = create_engine(db_uri, echo=self.debug) self.metadata = MetaData(self.engine) # self.encoding = self.immobilien = Table( 'immobilien', self.metadata, Column('id', Integer(), primary_key=True), Column('search_url', String(2000)), Column('unixtimestamp', Integer(), default=datetime.datetime.now( datetime.timezone.utc).timestamp()), Column('cwid', String()), Column('shortlisted', String()), Column('privateoffer', String()), Column('title', String(255)), Column('address', String(255)), Column('district', String(255)), Column('city', String(255)), Column('zip', Integer()), Column('distanceinkm', Integer()), Column('hasnewflag', String()), Column('hasfloorplan', String()), Column('hasvaluation', String()), Column('realtorlogoforresultlisturl', String()), Column(u'realtorcompanyname', String()), Column('contactname', String()), Column('kaltmiete', Numeric()), Column('kaufpreis', Numeric()), Column('wohnfläche', Numeric()), Column('grundstück', Integer()), Column('zimmer', Integer()), Column('idtohide', Integer()), Column('listingsize', String(3)), Column('latitude', Numeric()), Column('longitude', Numeric()), Column('checkedattributes', JSONType()), Column('gallerypictures', JSONType())) self.checkedAttributes = Table( 'checkedAttributes', self.metadata, Column('id', Integer(), primary_key=True), Column('attribute', String(), unique=True)) self.immobilienAttributes = Table( 'immobilienAttributes', self.metadata, Column('id', Integer(), primary_key=True), Column('immobilie_fk', Integer(), ForeignKey('immobilien.id')), Column('checkedAttributes_fk', Integer(), ForeignKey('immobilien.id'), unique=True)) self.url = Table('url', self.metadata, Column('id', Integer(), primary_key=True), Column('url', Integer())) self.metadata.create_all() self.conn = self.engine.connect()
def upgrade(): op.create_table( "events", sa.Column("pk", UUIDType(), nullable=False), sa.Column("type", sa.String(length=64), nullable=False), sa.Column("created_at", sa.DateTime(), nullable=False), sa.Column("user_pk", sa.Integer(), nullable=True), sa.Column("data", JSONType(), nullable=True), sa.Column("meta", JSONType(), nullable=True), sa.Column("article_pk", sa.Integer(), nullable=True), sa.Column("amendement_pk", sa.Integer(), nullable=True), sa.ForeignKeyConstraint(["article_pk"], ["articles.pk"]), sa.ForeignKeyConstraint(["amendement_pk"], ["amendements.pk"]), sa.ForeignKeyConstraint(["user_pk"], ["users.pk"]), sa.PrimaryKeyConstraint("pk"), )
def _include_sqlalchemy(obj, engine=None): """Init all required SQLAlchemy's types.""" # for module in sqlalchemy, sqlalchemy.orm: # for key in module.__all__: # if not hasattr(obj, key): # setattr(obj, key, # getattr(module, key)) if engine == 'mysql': from sqlalchemy.dialects import mysql as engine_types else: from sqlalchemy import types as engine_types # Length is provided to JSONType to ensure MySQL uses LONGTEXT instead # of TEXT which only provides for 64kb storage compared to 4gb for # LONGTEXT. setattr(obj, 'JSON', JSONType(length=2**32 - 2)) setattr(obj, 'Char', engine_types.CHAR) try: setattr(obj, 'TinyText', engine_types.TINYTEXT) except AttributeError: setattr(obj, 'TinyText', engine_types.TEXT) setattr(obj, 'hybrid_property', hybrid_property) try: setattr(obj, 'Double', engine_types.DOUBLE) except AttributeError: setattr(obj, 'Double', engine_types.FLOAT) setattr(obj, 'Binary', sqlalchemy.types.LargeBinary) setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary) setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary) setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary) setattr(obj, 'UUID', GUID) setattr(obj, 'Integer', LegacyInteger) setattr(obj, 'MediumInteger', LegacyMediumInteger) setattr(obj, 'SmallInteger', LegacySmallInteger) setattr(obj, 'TinyInteger', LegacyTinyInteger) setattr(obj, 'BigInteger', LegacyBigInteger) setattr(obj, 'Boolean', LegacyBoolean) if engine == 'mysql': from .engines import mysql as dummy_mysql # noqa # module = invenio.sqlalchemyutils_mysql # for key in module.__dict__: # setattr(obj, key, # getattr(module, key)) obj.AsBINARY = AsBINARY obj.MarshalBinary = MarshalBinary obj.PickleBinary = PickleBinary # Overwrite :meth:`MutableDick.update` to detect changes. from sqlalchemy.ext.mutable import MutableDict def update_mutable_dict(self, *args, **kwargs): super(MutableDict, self).update(*args, **kwargs) self.changed() MutableDict.update = update_mutable_dict obj.MutableDict = MutableDict
def _json_column(**kwargs): """Return JSON column.""" return db.Column(JSONType().with_variant( postgresql.JSON(none_as_null=True), 'postgresql', ), nullable=True, **kwargs)
def upgrade(): op.create_table( "oauth", sa.Column("id", sa.Integer(), nullable=False), sa.Column("provider", sa.String(length=50), nullable=False), sa.Column("created_at", sa.DateTime(), nullable=False), sa.Column("token", JSONType(), nullable=False), sa.Column("user_id", sa.Integer(), nullable=True), sa.ForeignKeyConstraint(["user_id"], ["users.id"]), sa.PrimaryKeyConstraint("id"), )
class Record(BaseTable): __tablename__ = 'records' name = db.Column(db.String(128), unique=True, nullable=False) description = db.Column(db.String(200)) starred = db.Column(db.Boolean, default=False) reprompt = db.Column(db.Boolean, default=False) color = db.Column(db.String(36)) notes = db.Column(db.String(128)) details = db.Column(JSONType()) attachments = db.Column(ScalarListType()) history = db.Column(ScalarListType()) type = db.Column(db.String(50)) __mapper_args__ = { 'polymorphic_identity':'records', 'polymorphic_on': type } def _get_name(self): return self.name def _get_description(self): return self.description def _get_starred(self): return self.starred def _get_reprompt(self): return self.reprompt def _get_color(self): return self.color def _get_notes(self): return self.notes def _get_flags(self): pass def _get_tags(self): pass def _get_attachments(self): pass def _get_history(self): pass def change_description(self): pass def change_color(self): pass
class Execution(Base): __tablename__ = "execution" id = Column(UUIDType, primary_key=True, default=uuid.uuid4) dt = Column(DateTime, nullable=True, default=datetime.datetime.utcnow) ds = Column(Float, nullable=True) request_id = Column(UUIDType, ForeignKey('request.id')) request = relationship('Request', back_populates='executions') operations = relationship( "Operation", secondary=execution_operation_link, back_populates="executions", ) fragments = relationship( "Fragment", secondary=execution_fragment_link, back_populates="executions", ) variables = Column(JSONType()) @classmethod def from_document(cls, session, document, **kwargs): operations = [ Operation.get_or_create_from_definition(session, definition) for definition in document.definitions if isinstance(definition, graphql.language.ast.OperationDefinition) ] fragments = [ Fragment.get_or_create_from_definition(session, definition) for definition in document.definitions if isinstance(definition, graphql.language.ast.FragmentDefinition) ] return cls(operations=operations, fragments=fragments, **kwargs)
def test_should_jsontype_convert_jsonstring(): assert get_field(JSONType()).type == JSONString
def upgrade(): """Upgrade database.""" op.create_table( 'rdm_parents_metadata', sa.Column( 'created', sa.DateTime().with_variant(mysql.DATETIME(fsp=6), 'mysql'), nullable=False, ), sa.Column( 'updated', sa.DateTime().with_variant(mysql.DATETIME(fsp=6), 'mysql'), nullable=False, ), sa.Column( 'id', UUIDType(), nullable=False, ), sa.Column( 'json', sa.JSON().with_variant(JSONType(), 'mysql').with_variant( postgresql.JSONB(none_as_null=True, astext_type=sa.Text()), 'postgresql').with_variant(JSONType(), 'sqlite'), nullable=True, ), sa.Column( 'version_id', sa.Integer(), nullable=False, ), sa.PrimaryKeyConstraint( 'id', name=op.f('pk_rdm_parents_metadata'), ), ) # Drafts table FK to parent op.add_column('rdm_drafts_metadata', sa.Column('parent_id', UUIDType(), nullable=True)) op.create_foreign_key( op.f('fk_rdm_drafts_metadata_parent_id_rdm_parents_metadata'), 'rdm_drafts_metadata', 'rdm_parents_metadata', ['parent_id'], ['id'], ondelete='RESTRICT', ) op.add_column('rdm_drafts_metadata', sa.Column('index', sa.Integer, nullable=True)) # Records table FK to parent op.add_column('rdm_records_metadata', sa.Column('parent_id', UUIDType(), nullable=True)) op.create_foreign_key( op.f('fk_rdm_records_metadata_parent_id_rdm_parents_metadata'), 'rdm_records_metadata', 'rdm_parents_metadata', ['parent_id'], ['id'], ondelete='RESTRICT', ) op.add_column('rdm_records_metadata', sa.Column('index', sa.Integer, nullable=True)) # Records revisions table FK to parent op.add_column('rdm_records_metadata_version', sa.Column('parent_id', UUIDType(), nullable=True)) op.add_column('rdm_records_metadata_version', sa.Column('index', sa.Integer, nullable=True)) # Create versions state table op.create_table( 'rdm_versions_state', sa.Column('latest_index', sa.Integer(), nullable=True), sa.Column('parent_id', UUIDType(), nullable=False), sa.Column('latest_id', UUIDType(), nullable=True), sa.Column('next_draft_id', UUIDType(), nullable=True), sa.ForeignKeyConstraint( ['latest_id'], ['rdm_records_metadata.id'], name=op.f('fk_rdm_versions_state_latest_id_rdm_records_metadata'), ), sa.ForeignKeyConstraint( ['next_draft_id'], ['rdm_drafts_metadata.id'], name=op.f( 'fk_rdm_versions_state_next_draft_id_rdm_drafts_metadata'), ), sa.ForeignKeyConstraint( ['parent_id'], ['rdm_parents_metadata.id'], name=op.f('fk_rdm_versions_state_parent_id_rdm_parents_metadata'), ondelete='CASCADE', ), sa.PrimaryKeyConstraint('parent_id', name=op.f('pk_rdm_versions_state')), )
def upgrade(): op.create_table( "cves", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("cve_id", sa.String(), nullable=False), sa.Column("json", postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column("vendors", postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column("cwes", postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column("summary", sa.String(), nullable=False), sa.Column("cvss2", sa.Float(), nullable=True), sa.Column("cvss3", sa.Float(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_cves_created_at"), "cves", ["created_at"], unique=False) op.create_index(op.f("ix_cves_cve_id"), "cves", ["cve_id"], unique=False) op.create_index(op.f("ix_cves_updated_at"), "cves", ["updated_at"], unique=False) op.create_index( "ix_cves_vendors", "cves", ["vendors"], unique=False, postgresql_using="gin", ) op.create_index("ix_cves_cwes", "cves", ["cwes"], unique=False, postgresql_using="gin") op.create_table( "cwes", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("cwe_id", sa.String(), nullable=True), sa.Column("name", sa.String(), nullable=True), sa.Column("description", sa.String(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_cwes_created_at"), "cwes", ["created_at"], unique=False) op.create_table( "metas", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.Column("value", sa.String(), nullable=False), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_metas_created_at"), "metas", ["created_at"], unique=False) op.create_table( "tasks", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_tasks_created_at"), "tasks", ["created_at"], unique=False) op.create_table( "users", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("username", sa.String(length=50), nullable=False), sa.Column("password", sa.String(length=255), server_default="", nullable=False), sa.Column( "reset_password_token", sa.String(length=100), server_default="", nullable=False, ), sa.Column("email", sa.String(length=255), nullable=False), sa.Column("email_confirmed_at", sa.DateTime(timezone=True), nullable=True), sa.Column( "enable_notifications", sa.Boolean(), server_default=sa.text("true"), nullable=False, ), sa.Column("filters_notifications", JSONType(), nullable=True), sa.Column( "frequency_notifications", sa.Enum("once", "always", name="notification_frequencies"), nullable=True, ), sa.Column("is_active", sa.Boolean(), server_default=sa.text("false"), nullable=False), sa.Column("first_name", sa.String(length=100), server_default="", nullable=False), sa.Column("last_name", sa.String(length=100), server_default="", nullable=False), sa.Column("admin", sa.Boolean(), server_default=sa.text("false"), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("email"), sa.UniqueConstraint("username"), ) op.create_index(op.f("ix_users_created_at"), "users", ["created_at"], unique=False) op.create_table( "vendors", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), ) op.create_index(op.f("ix_vendors_created_at"), "vendors", ["created_at"], unique=False) op.create_table( "changes", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("json", JSONType(), nullable=True), sa.Column("cve_id", UUIDType(binary=False), nullable=True), sa.Column("task_id", UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint( ["cve_id"], ["cves.id"], ), sa.ForeignKeyConstraint( ["task_id"], ["tasks.id"], ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_changes_created_at"), "changes", ["created_at"], unique=False) op.create_table( "products", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.Column("vendor_id", UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint( ["vendor_id"], ["vendors.id"], ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_products_created_at"), "products", ["created_at"], unique=False) op.create_index(op.f("ix_products_name"), "products", ["name"], unique=False) op.create_table( "reports", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("public_link", sa.String(), nullable=True), sa.Column("seen", sa.Boolean(), nullable=True), sa.Column("details", JSONType(), nullable=True), sa.Column("user_id", UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_reports_created_at"), "reports", ["created_at"], unique=False) op.create_table( "users_vendors", sa.Column("user_id", UUIDType(binary=False), nullable=False), sa.Column("vendor_id", UUIDType(binary=False), nullable=False), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.ForeignKeyConstraint( ["vendor_id"], ["vendors.id"], ), sa.PrimaryKeyConstraint("user_id", "vendor_id"), ) op.create_table( "alerts", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column("details", JSONType(), nullable=True), sa.Column("notify", sa.Boolean(), nullable=True), sa.Column("user_id", UUIDType(binary=False), nullable=True), sa.Column("cve_id", UUIDType(binary=False), nullable=True), sa.Column("report_id", UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint( ["cve_id"], ["cves.id"], ), sa.ForeignKeyConstraint( ["report_id"], ["reports.id"], ), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_alerts_created_at"), "alerts", ["created_at"], unique=False) op.create_table( "events", sa.Column("id", UUIDType(binary=False), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), sa.Column( "type", sa.Enum( "new_cve", "references", "cpes", "cvss", "summary", "cwes", name="event_types", ), nullable=True, ), sa.Column("details", JSONType(), nullable=True), sa.Column("review", sa.Boolean(), nullable=True), sa.Column("cve_id", UUIDType(binary=False), nullable=True), sa.Column("change_id", UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint( ["change_id"], ["changes.id"], ), sa.ForeignKeyConstraint( ["cve_id"], ["cves.id"], ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_events_created_at"), "events", ["created_at"], unique=False) op.create_table( "users_products", sa.Column("user_id", UUIDType(binary=False), nullable=False), sa.Column("product_id", UUIDType(binary=False), nullable=False), sa.ForeignKeyConstraint( ["product_id"], ["products.id"], ), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.PrimaryKeyConstraint("user_id", "product_id"), ) op.create_table( "alerts_events", sa.Column("alert_id", UUIDType(binary=False), nullable=False), sa.Column("event_id", UUIDType(binary=False), nullable=False), sa.ForeignKeyConstraint( ["alert_id"], ["alerts.id"], ), sa.ForeignKeyConstraint( ["event_id"], ["events.id"], ), sa.PrimaryKeyConstraint("alert_id", "event_id"), )
def upgrade(): op.create_table( 'logs', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('level', sa.String(length=10), nullable=False), sa.Column('message', sa.String(), nullable=False), sa.Column('key', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_logs_created_at'), 'logs', ['created_at'], unique=False) op.create_table( 'teams', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_teams_created_at'), 'teams', ['created_at'], unique=False) op.create_table( 'users', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('admin', sa.Boolean(), nullable=True), sa.Column('active', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False) op.create_table( 'configs', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('team_id', UUIDType(binary=False), nullable=False), sa.Column('data', JSONType(), nullable=False), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_configs_created_at'), 'configs', ['created_at'], unique=False) op.create_table( 'grants', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('user_id', UUIDType(binary=False), nullable=True), sa.Column('role', sa.Enum('member', 'editor', 'manager', name='rolenames'), nullable=False), sa.Column('team_id', UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_grants_created_at'), 'grants', ['created_at'], unique=False) op.create_table( 'rules', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('description', sa.String(), nullable=True), sa.Column('team_id', UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('team_id', 'name', name='team_rule_uc') ) op.create_index(op.f('ix_rules_created_at'), 'rules', ['created_at'], unique=False) op.create_table( 'sources', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('plugin', sa.String(length=255), nullable=False), sa.Column('configuration', sa.LargeBinary(), nullable=True), sa.Column('team_id', UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_sources_created_at'), 'sources', ['created_at'], unique=False) op.create_table( 'worst', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('team_id', UUIDType(binary=False), nullable=False), sa.Column('date', sa.DateTime(timezone=True), nullable=False), sa.Column('label', sa.String(length=255), nullable=False), sa.Column('period', sa.Enum('daily', 'monthly', name='periods'), nullable=False), sa.Column('data', JSONType(), nullable=False), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('team_id', 'label', 'date', 'period', name='team_label_date_period_uc') ) op.create_index(op.f('ix_worst_created_at'), 'worst', ['created_at'], unique=False) op.create_table( 'checks', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('source_id', UUIDType(binary=False), nullable=False), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('parameters', JSONType(), nullable=True), sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_checks_created_at'), 'checks', ['created_at'], unique=False) op.create_table( 'rule_check_association', sa.Column('rule_id', UUIDType(binary=False), nullable=False), sa.Column('check_id', UUIDType(binary=False), nullable=False), sa.ForeignKeyConstraint(['check_id'], ['checks.id'], ), sa.ForeignKeyConstraint(['rule_id'], ['rules.id'], ), sa.PrimaryKeyConstraint('rule_id', 'check_id'), sa.UniqueConstraint('rule_id', 'check_id', name='rule_check_uix') ) op.create_table( 'variables', sa.Column('id', UUIDType(binary=False), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('value', sa.String(), nullable=False), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('rule_id', UUIDType(binary=False), nullable=True), sa.Column('team_id', UUIDType(binary=False), nullable=False), sa.Column('source_id', UUIDType(binary=False), nullable=True), sa.Column('check_id', UUIDType(binary=False), nullable=True), sa.ForeignKeyConstraint(['check_id'], ['checks.id'], ), sa.ForeignKeyConstraint(['rule_id'], ['rules.id'], ), sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_variables_created_at'), 'variables', ['created_at'], unique=False)
def test_should_jsontype_convert_jsonstring(): assert_column_conversion(JSONType(), JSONString)
def upgrade(): """Upgrade database.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('schema', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('major', sa.Integer(), nullable=False), sa.Column('minor', sa.Integer(), nullable=False), sa.Column('patch', sa.Integer(), nullable=False), sa.Column('json', JSONType().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ), default=lambda: dict(), nullable=True ), sa.PrimaryKeyConstraint('id', name=op.f('pk_schema')), sa.UniqueConstraint('name', 'major', 'minor', 'patch', name='unique_schema_version') ) op.create_table('reana', sa.Column('id', UUIDType(), nullable=False ), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('record_id', UUIDType(), nullable=False ), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('params', JSONType().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ), default=lambda: dict(), nullable=True ), sa.Column('output', JSONType().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ), default=lambda: dict(), nullable=True), sa.ForeignKeyConstraint( ['record_id'], [u'records_metadata.id'], name=op.f('fk_reana_record_id_records_metadata') ), sa.ForeignKeyConstraint( ['user_id'], [u'accounts_user.id'], name=op.f('fk_reana_user_id_accounts_user') ), sa.PrimaryKeyConstraint('id', name=op.f('pk_reana')) )
class OARepoCommunityModel(db.Model, Timestamp): __tablename__ = 'oarepo_communities' __table_args__ = {'extend_existing': True} __versioned__ = {'versioning': False} id = db.Column( db.String(63), primary_key=True, ) """Primary Community identifier slug.""" title = db.Column(db.String(128), ) """Community title name.""" type = db.Column(ChoiceType(choices=OAREPO_COMMUNITIES_TYPES, impl=db.VARCHAR(16)), default='other', nullable=False) """Community type or focus.""" json = db.Column(db.JSON().with_variant( postgresql.JSONB(none_as_null=True), 'postgresql', ).with_variant( JSONType(), 'sqlite', ).with_variant( JSONType(), 'mysql', ), default=lambda: dict(), nullable=True) """Store community metadata in JSON format.""" is_deleted = db.Column( db.Boolean(name="ck_oarepo_community_metadata_is_deleted"), nullable=True, default=False) """Was the OARepo community soft-deleted.""" roles = db.relationship('Role', secondary=oarepo_communities_role, backref=db.backref('community', lazy='dynamic')) def delete_roles(self): """Delete roles associated with this community.""" with db.session.begin_nested(): for r in self.roles: db.session.delete(r) def delete(self): """Mark the community for deletion.""" self.is_deleted = True self.delete_roles() def _validate_role_action(self, role, action, system=False): if action not in current_oarepo_communities.allowed_actions: raise AttributeError(f'Action {action} not allowed') if system: if role.value not in current_access.system_roles: raise AttributeError(f'Role {role} not in system roles') elif role not in self.roles: raise AttributeError(f'Role {role} not in community roles') def allow_action(self, role, action, system=False): """Allow action for a role.""" self._validate_role_action(role, action, system) with db.session.begin_nested(): if system: ar = ActionSystemRoles.query.filter_by( action=action, argument=self.id, role_name=role.value).first() if not ar: ar = ActionSystemRoles(action=action, argument=self.id, role_name=role.value) else: ar = ActionRoles.query.filter_by(action=action, argument=self.id, role_id=role.id).first() if not ar: ar = ActionRoles(action=action, argument=self.id, role=role) db.session.add(ar) return ar def deny_action(self, role, action, system=False): """Deny action for a role.""" self._validate_role_action(role, action, system) with db.session.begin_nested(): if system: ar = ActionSystemRoles.query.filter_by( action=action, argument=self.id, role_name=role.value).all() else: ar = ActionRoles.query.filter_by(action=action, argument=self.id, role=role).all() for a in ar: db.session.delete(a) @property def actions(self): ars = ActionRoles.query \ .filter_by(argument=self.id) \ .order_by(ActionRoles.action).all() sars = ActionSystemRoles.query \ .filter_by(argument=self.id) \ .order_by(ActionSystemRoles.action).all() ars = [{k: list(g)} for k, g in groupby(ars, key=attrgetter('action'))] sars = [{ k: list(g) } for k, g in groupby(sars, key=attrgetter('action'))] return ars, sars @property def excluded_facets(self): return self.json.get('excluded_facets', {}) def to_json(self): return dict(id=self.id, title=self.title, type=str(self.type), metadata=self.json)
class Masterdb(db.Model): __tablename__ = 'masterdb' def __repr__(self): return '<Master APi %r>' % self.master_name #id = Column(db.Integer, primary_key=True) id = Column(UUIDType(binary=False), primary_key=True) master_name = Column(db.String(STRING_LEN), nullable=False, unique=True, index=True, info={ 'verbose_name': u'主机名', }) master_ip = Column(db.String(STRING_LEN), nullable=False, unique=False, info={ 'verbose_name': u'主机IP', }) master_port = Column(db.String(STRING_LEN), nullable=False, default="", info={ 'verbose_name': u'主机端口', }) master_api_url = Column(db.String(STRING_LEN), nullable=False, default="", info={ 'verbose_name': u'主机API地址', }) master_api_port = Column(db.Integer, nullable=False, default=0, info={ 'verbose_name': u'主机API端口', }) username = Column(db.String(STRING_LEN), nullable=False, default='salt') password = Column(db.String(STRING_LEN), nullable=False, default='sugar') #location = Column(db.String(STRING_LEN), nullable=False, default="") location_id = Column(UUIDType(binary=False), db.ForeignKey('location.id'), nullable=False, default="", info={ 'verbose_name': u'提供商', }) location = db.relationship('Location', backref='masters') bio = Column(db.Text, default="", info={ 'verbose_name': u'备注', }) ssh_key = Column(db.String(STRING_LEN)) create_at = Column(db.DateTime, nullable=False, default=get_current_time, info={ 'verbose_name': u'创建时间', }) update_at = Column(db.DateTime, info={ 'verbose_name': u'更新时间', }) operator = Column(UUIDType(binary=False), nullable=True, info={ 'verbose_name': u'Master', }) avatar = Column(db.String(STRING_LEN), nullable=False, default='') token = Column(db.String(STRING_LEN), nullable=False, default='') token_expire = Column(db.Float, nullable=False, default=0.0) minion_data = Column(JSONType(1000), nullable=False, default='') def ret_api(self): #return self.master_api_url + ":" + str(self.master_api_port) return self.master_api_url @classmethod def get_count(cls): count_q = cls.query.statement.with_only_columns([func.count() ]).order_by(None) count = db.session.execute(count_q).scalar() return count @classmethod def get_list(cls, page=1): q = cls.query.order_by(cls.update_at.desc()) return cls.paginate(query=q, page=page) @staticmethod def paginate(query, page, per_page=20, error_out=False): if error_out and page < 1: abort(404) items = query.limit(per_page).offset((page - 1) * per_page).all() if not items and page != 1 and error_out: abort(404) # No need to count if we're on the first page and there are fewer # items than we expected. if page == 1 and len(items) < per_page: total = len(items) else: total = query.order_by(None).count() return Pagination(query, page, per_page, total, items)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( "organization", sa.Column( "contained", sqlalchemy_utils.types.pg_composite.CompositeArray(JSONType()), nullable=True, ), ) op.add_column( "organization", sa.Column( "extension", sqlalchemy_utils.types.pg_composite.CompositeArray( PgComposite( "fhir_extension", [ Column("url", StringField(), nullable=False), Column("value", OpenType()), ], )), nullable=True, ), ) op.add_column( "organization", sa.Column( "modifierExtension", sqlalchemy_utils.types.pg_composite.CompositeArray( PgComposite( "fhir_extension", [ Column("url", StringField(), nullable=False), Column("value", OpenType()), ], )), nullable=True, ), ) op.add_column( "organization", sa.Column( "text", fhir_server.elements.base.complex_mixin.PgComposite( "fhir_narrative", [ Column( "extension", PgComposite( "fhir_extension", [ Column("url", StringField(), nullable=False), Column("value", OpenType()), ], ), ), Column("id", StringField()), Column("div", StringField(), nullable=False), Column("status", CodeField(), nullable=False), ], ), nullable=True, ), ) op.add_column( "structuredefinition", sa.Column( "contained", sqlalchemy_utils.types.pg_composite.CompositeArray(JSONType()), nullable=True, ), ) op.add_column( "structuredefinition", sa.Column( "extension", sqlalchemy_utils.types.pg_composite.CompositeArray( PgComposite( "fhir_extension", [ Column("url", StringField(), nullable=False), Column("value", OpenType()), ], )), nullable=True, ), ) op.add_column( "structuredefinition", sa.Column( "modifierExtension", sqlalchemy_utils.types.pg_composite.CompositeArray( PgComposite( "fhir_extension", [ Column("url", StringField(), nullable=False), Column("value", OpenType()), ], )), nullable=True, ), ) op.add_column( "structuredefinition", sa.Column( "text", fhir_server.elements.base.complex_mixin.PgComposite( "fhir_narrative", [ Column( "extension", PgComposite( "fhir_extension", [ Column("url", StringField(), nullable=False), Column("value", OpenType()), ], ), ), Column("id", StringField()), Column("div", StringField(), nullable=False), Column("status", CodeField(), nullable=False), ], ), nullable=True, ), ) ### end Alembic commands ### conn = op.get_bind() for resource in all_resources: conn.execute( sa.sql.text(""" DROP INDEX IF EXISTS {0}_meta_version_id; CREATE UNIQUE INDEX {0}_meta_version_id ON {0} ( ((meta).version_id)); DROP TRIGGER IF EXISTS meta_version_id_concurrency ON {0}; CREATE TRIGGER meta_version_id_concurrency BEFORE INSERT OR UPDATE ON {0} FOR EACH ROW EXECUTE PROCEDURE meta_version_id_concurrency(); """.format(resource.__tablename__))) profiled_resources = { con.get("resource"): con.get("fields") for con in constraints } for resource, const in profiled_resources.items(): for c in const: field = c.get("name") mini = str(c.get("cardinality").get("mini")) maxi = str(c.get("cardinality").get("maxi")) conn.execute( sa.sql.text(""" DROP TRIGGER IF EXISTS validate_meta_fields_{1} ON {0}; CREATE TRIGGER validate_meta_fields_{1} BEFORE INSERT OR UPDATE ON {0} FOR EACH ROW EXECUTE PROCEDURE validate_meta_fields({1}, '{2}', '{3}'); """.format(resource.lower(), field, mini, maxi)))
class Nodedb(db.Model): __tablename__ = 'nodedb' def __repr__(self): return '<node %r>' % self.node_name #id = Column(db.Integer, primary_key=True) id = Column(UUIDType(binary=False), default=uuid.uuid4, primary_key=True) node_name = Column(db.String(STRING_LEN), nullable=False, unique=True, index=True, info={ 'verbose_name': u'Node名', }) #node_ip = Column(db.String(STRING_LEN), nullable=False, # unique=False, info={'verbose_name': u'Node IP', }) node_ip = Column(JSONType(10000), nullable=False, default='') node_port = Column(db.String(STRING_LEN), nullable=False, default="", info={ 'verbose_name': u'Node 端口', }) username = Column(db.String(STRING_LEN), nullable=False, default='salt') password = Column(db.String(STRING_LEN), nullable=False, default='sugar') #location = Column(db.String(STRING_LEN), nullable=False, default="") #location_id = Column(UUIDType(binary=False), db.ForeignKey( # 'location.id'), nullable=False, default="", info={'verbose_name': u'提供商', }) #location = db.relationship('Location', backref='nodes') bio = Column(db.Text, default="", info={ 'verbose_name': u'备注', }) ssh_key = Column(db.String(STRING_LEN)) create_at = Column(db.DateTime, nullable=False, default=get_current_time, info={ 'verbose_name': u'创建时间', }) update_at = Column(db.DateTime, info={ 'verbose_name': u'更新时间', }) master_id = Column(UUIDType(binary=False), db.ForeignKey('masterdb.id'), nullable=False, default="", info={ 'verbose_name': u'Master', }) master = db.relationship('Masterdb', backref='nodes') avatar = Column(db.String(STRING_LEN), nullable=False, default='') minion_data = Column(JSONType(10000), nullable=False, default='') os = Column(db.String(STRING_LEN), nullable=False, default='') kenel = Column(db.String(STRING_LEN), nullable=False, default='') core = Column(db.Integer, nullable=False, default=0) cpu = Column(db.String(STRING_LEN), nullable=False, default='') mem = Column(db.String(STRING_LEN), nullable=False, default='') host = Column(db.String(STRING_LEN), nullable=False, default='') status = Column(db.String(STRING_LEN), nullable=False, default='') @classmethod def get_nodes(cls): q = cls.query.with_entities(cls.node_name).all() return q @classmethod def get_count(cls): count_q = cls.query.statement.with_only_columns([func.count() ]).order_by(None) count = db.session.execute(count_q).scalar() return count @classmethod def get_list(cls, page=1): q = cls.query.order_by(cls.update_at.desc()) return cls.paginate(query=q, page=page) @staticmethod def paginate(query, page, per_page=20, error_out=False): if error_out and page < 1: abort(404) items = query.limit(per_page).offset((page - 1) * per_page).all() if not items and page != 1 and error_out: abort(404) # No need to count if we're on the first page and there are fewer # items than we expected. if page == 1 and len(items) < per_page: total = len(items) else: total = query.order_by(None).count() return Pagination(query, page, per_page, total, items)