class Sales(Base): __tablename__ = 'sales' id = sa.Column( postgresql_types.BIGINT(), primary_key=True, unique=True, nullable=False, autoincrement=True, doc="testtest", ) book_id = sa.Column( postgresql_types.UUID(), sa.ForeignKey(column="book.id", ondelete="CASCADE"), primary_key=False, unique=False, nullable=False, doc="testtest", ) sold = sa.Column( postgresql_types.TIMESTAMP(), primary_key=False, unique=False, nullable=False, doc="testtest", ) reservation = sa.Column( postgresql_types.INTERVAL(), primary_key=False, unique=False, nullable=True, doc="testtest", ) source = sa.Column( postgresql_types.INET(), primary_key=False, unique=False, nullable=True, doc="testtest", ) book = sa.orm.relationship( 'Book', foreign_keys="[sales.c.book_id]", remote_side=None, backref="sales", )
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy_utils import UUIDType, JSONType from sqlalchemy import BigInteger, Column, DateTime from sqlalchemy.sql import func import sqlalchemy from sqlalchemy.dialects import sqlite, postgresql from sqlalchemy.orm.attributes import flag_modified BigIntegerType = BigInteger() BigIntegerType = BigIntegerType.with_variant(postgresql.BIGINT(), "postgresql") BigIntegerType = BigIntegerType.with_variant(sqlite.INTEGER(), "sqlite") DeclarativeBase = declarative_base() PkType = BigIntegerType ExtPkType = UUIDType(binary=False) import uuid from typing import Optional class Base(DeclarativeBase): # type: ignore __abstract__ = True id = Column(PkType, primary_key=True) ext_id = Column(ExtPkType, default=lambda: uuid.uuid4(), nullable=False, unique=True) created_at = Column(DateTime, server_default=func.now())
from Pegasus.db.ensembles import EnsembleWorkflow, EnsembleWorkflowStates log = logging.getLogger(__name__) metadata = MetaData() # for SQLite warnings.filterwarnings('ignore', '.*does \*not\* support Decimal*.') # These are keywords that all tables should have table_keywords = {} table_keywords['mysql_charset'] = 'latin1' table_keywords['mysql_engine'] = 'InnoDB' KeyInteger = BigInteger() KeyInteger = KeyInteger.with_variant(postgresql.BIGINT(), 'postgresql') KeyInteger = KeyInteger.with_variant(mysql.BIGINT(), 'mysql') KeyInteger = KeyInteger.with_variant(sqlite.INTEGER(), 'sqlite') # -------------------------------------------------------------------- # Method to verify if tables exist or are according to the schema # -------------------------------------------------------------------- def get_missing_tables(db): tables = [ db_version, # WORKFLOW st_workflow, st_workflowstate, st_workflow_meta, st_workflow_files,
from sqlalchemy import BigInteger from sqlalchemy.dialects import postgresql, mysql, sqlite # sqlite does not allow BigIntegers as a auto-incrementing primary key BigIntegerVariantType = BigInteger() BigIntegerVariantType = BigIntegerVariantType.with_variant( postgresql.BIGINT(), 'postgresql') BigIntegerVariantType = BigIntegerVariantType.with_variant( mysql.BIGINT(), 'mysql') BigIntegerVariantType = BigIntegerVariantType.with_variant( sqlite.INTEGER(), 'sqlite')
from sqlalchemy import Column, String, BigInteger, func from auditing.db import session from sqlalchemy.dialects import postgresql, sqlite from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() BigIntegerType = BigInteger().with_variant(postgresql.BIGINT(), 'postgresql') class Organization(Base): __tablename__ = "organization" id = Column(BigIntegerType, primary_key=True) name = Column(String(120), unique=True) @classmethod def find_one(cls, id=None): query = session.query(cls) if id: query = query.filter(cls.id == id) return query.first() @classmethod def count(cls): return session.query(func.count(cls.id)).scalar() def save(self): session.add(self) session.flush() session.commit()
""" SQLAlchemy Database Models used to generate and update db tables """ from . import db from sqlalchemy import BigInteger from sqlalchemy.dialects import postgresql, sqlite from flask_login import UserMixin from werkzeug.security import generate_password_hash, check_password_hash from datetime import datetime from application import UNDERGRAD_MAJORS as majors # Fix BigInteger not working in sqlite but still works with postgresql BigInt = BigInteger() BigInt = BigInt.with_variant(postgresql.BIGINT(), 'postgresql') BigInt = BigInt.with_variant(sqlite.INTEGER(), 'sqlite') class User(UserMixin, db.Model): """Model for user accounts.""" __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(64), index=False, unique=False, nullable=False) email = db.Column(db.String(80), index=True, unique=True, nullable=False) password = db.Column(db.String(128), index=False, unique=False, nullable=False) admin = db.Column(db.Boolean(), default=False)
def upgrade(): op.create_table( 'project', sa.Column('id', postgresql.INTEGER(), nullable=False, primary_key=True), sa.Column('name', postgresql.VARCHAR(255), nullable=False), sa.Column('data_source_query', postgresql.VARCHAR(1024), nullable=True) ) op.create_table( 'team', sa.Column('id', postgresql.INTEGER(), nullable=False, primary_key=True), sa.Column('name', postgresql.VARCHAR(255), nullable=False), sa.Column('bugfix_rate', postgresql.FLOAT(), nullable=False), ) op.create_table( 'project_history', sa.Column('begin_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('end_datetime', postgresql.TIMESTAMP(), nullable=True), sa.Column('project_id', postgresql.INTEGER(), nullable=False), sa.Column('scope_complete_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('feature_complete_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('ready_to_manufacture_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('owner_team_id', postgresql.INTEGER(), nullable=False), sa.Column('comment', postgresql.TEXT(), nullable=True), sa.ForeignKeyConstraint( ('project_id',), ('project.id',), name='project_history_project_id_fk', ), sa.ForeignKeyConstraint( ('owner_team_id',), ('team.id',), name='project_history_owner_team_id_fk', ), sa.UniqueConstraint('begin_datetime', 'project_id', name='project_history_begin_datetime_project_id_idx'), ) op.create_table( 'known_bugs_count_history', sa.Column('begin_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('end_datetime', postgresql.TIMESTAMP(), nullable=True), sa.Column('project_id', postgresql.INTEGER(), nullable=False), sa.Column('team_id', postgresql.INTEGER(), nullable=False), sa.Column('known_bugs_count', postgresql.INTEGER(), nullable=False), sa.ForeignKeyConstraint( ('project_id',), ('project.id',), name='project_history_project_id_fk', ), sa.ForeignKeyConstraint( ('team_id',), ('team.id',), name='project_history_owner_team_id_fk', ), sa.UniqueConstraint( 'begin_datetime', 'project_id', 'team_id', name='known_bugs_count_history_begin_datetime_project_id_team_id_idx' ), ) op.create_table( 'person', sa.Column('id', postgresql.INTEGER(), nullable=False, primary_key=True), sa.Column('firstname', postgresql.VARCHAR(255), nullable=False), sa.Column('lastname', postgresql.VARCHAR(255), nullable=False), sa.Column('issue_tracker_name', postgresql.VARCHAR(255), nullable=False), ) op.create_table( 'person_history', sa.Column('begin_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('end_datetime', postgresql.TIMESTAMP(), nullable=True), sa.Column('person_id', postgresql.INTEGER(), nullable=False), sa.Column('team_id', postgresql.INTEGER(), nullable=False), sa.Column('project_id', postgresql.INTEGER(), nullable=False), sa.Column('project_assignment', postgresql.FLOAT(), nullable=False), sa.Column('comment', postgresql.TEXT(), nullable=True), sa.ForeignKeyConstraint( ('person_id',), ('person.id',), name='person_history_person_id_fk', ), sa.ForeignKeyConstraint( ('team_id',), ('team.id',), name='person_history_team_id_fk', ), sa.ForeignKeyConstraint( ('project_id',), ('project.id',), name='person_history_project_id_fk', ), sa.UniqueConstraint( 'begin_datetime', 'person_id', 'team_id', 'project_id', name='person_history_begin_datetime_person_id_team_id_project_id_idx' ), ) op.create_table( 'issue_type', sa.Column('id', postgresql.INTEGER(), nullable=False, primary_key=True), sa.Column('name', postgresql.VARCHAR(255), nullable=False), ) op.create_table( 'implementation_type', sa.Column('id', postgresql.INTEGER(), nullable=False, primary_key=True), sa.Column('name', postgresql.VARCHAR(255), nullable=False), ) op.create_table( 'issue', sa.Column('id', postgresql.BIGINT(), nullable=False, primary_key=True), sa.Column('key', postgresql.VARCHAR(64), nullable=False), sa.Column('url', postgresql.VARCHAR(255), nullable=False), sa.Column('created', postgresql.TIMESTAMP(), nullable=False), ) op.create_table( 'issue_history', sa.Column('begin_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('end_datetime', postgresql.TIMESTAMP(), nullable=True), sa.Column('issue_id', postgresql.BIGINT(), nullable=False), sa.Column('type_id', postgresql.INTEGER(), nullable=False), sa.Column('resolved', postgresql.INTEGER(), nullable=True, default=None), sa.Column('due', postgresql.DATE(), nullable=True, default=None), sa.Column('summary', postgresql.VARCHAR(255), nullable=False), sa.Column('components', postgresql.VARCHAR(255), nullable=True), sa.Column('assignee_id', postgresql.INTEGER(), nullable=False), sa.Column('reporter_id', postgresql.INTEGER(), nullable=False), sa.Column('status', postgresql.VARCHAR(255), nullable=False), sa.Column('reqs_level', postgresql.FLOAT(), nullable=True), sa.Column('design_level', postgresql.FLOAT(), nullable=True), sa.Column('comment', postgresql.TEXT(), nullable=True), sa.ForeignKeyConstraint( ('issue_id',), ('issue.id',), name='issue_history_issue_id_fk', ), sa.ForeignKeyConstraint( ('type_id',), ('issue_type.id',), name='issue_history_type_id_fk', ), sa.ForeignKeyConstraint( ('assignee_id',), ('person.id',), name='issue_history_assignee_id_fk', ), sa.ForeignKeyConstraint( ('reporter_id',), ('person.id',), name='issue_history_reporter_id_fk', ), sa.UniqueConstraint( 'begin_datetime', 'issue_id', name='issue_history_begin_datetime_issue_id_idx' ), ) op.create_table( 'original_estimate', sa.Column('begin_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('end_datetime', postgresql.TIMESTAMP(), nullable=True), sa.Column('issue_id', postgresql.BIGINT(), nullable=False), sa.Column('team_id', postgresql.INTEGER(), nullable=False), sa.Column('implementation_type_id', postgresql.INTEGER(), nullable=False), sa.Column('estimation', postgresql.FLOAT(), nullable=True), sa.Column('comment', postgresql.TEXT(), nullable=True), sa.ForeignKeyConstraint( ('issue_id',), ('issue.id',), name='original_estimate_issue_id_fk', ), sa.ForeignKeyConstraint( ('team_id',), ('team.id',), name='original_estimate_team_id_fk', ), sa.ForeignKeyConstraint( ('implementation_type_id',), ('implementation_type.id',), name='original_estimate_implementation_type_id_fk', ), sa.UniqueConstraint( 'begin_datetime', 'issue_id', 'team_id', name='original_estimate_begin_datetime_issue_id_team_id_idx' ), ) op.create_table( 'remaining_estimate', sa.Column('begin_datetime', postgresql.TIMESTAMP(), nullable=False), sa.Column('end_datetime', postgresql.TIMESTAMP(), nullable=True), sa.Column('issue_id', postgresql.BIGINT(), nullable=False), sa.Column('team_id', postgresql.INTEGER(), nullable=False), sa.Column('implementation_type_id', postgresql.INTEGER(), nullable=False), sa.Column('estimation', postgresql.FLOAT(), nullable=True), sa.Column('comment', postgresql.TEXT(), nullable=True), sa.ForeignKeyConstraint( ('issue_id',), ('issue.id',), name='remaining_estimate_issue_id_fk', ), sa.ForeignKeyConstraint( ('team_id',), ('team.id',), name='remaining_estimate_team_id_fk', ), sa.ForeignKeyConstraint( ('implementation_type_id',), ('implementation_type.id',), name='remaining_estimate_implementation_type_id_fk', ), sa.UniqueConstraint( 'begin_datetime', 'issue_id', 'team_id', name='remaining_estimate_begin_datetime_issue_id_team_id_idx' ), )
short_map = { MYSQL: mysql.SMALLINT(), ORACLE: oracle.NUMBER(5), POSTGRES: postgresql.SMALLINT(), } # Skip Oracle int_map = { MYSQL: mysql.INTEGER(), POSTGRES: postgresql.INTEGER(), } long_map = { MYSQL: mysql.BIGINT(), ORACLE: oracle.NUMBER(38, 0), POSTGRES: postgresql.BIGINT(), } float_map = { MYSQL: mysql.FLOAT(), ORACLE: oracle.BINARY_FLOAT(), POSTGRES: postgresql.FLOAT(), } double_map = { MYSQL: mysql.DOUBLE(), ORACLE: oracle.BINARY_DOUBLE(), POSTGRES: postgresql.DOUBLE_PRECISION(), } char_map = {