Exemple #1
0
def upgrade():
    op.create_table(
        "role",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("foreign_id", sa.Unicode(length=2048), nullable=False),
        sa.Column("name", sa.Unicode(), nullable=False),
        sa.Column("email", sa.Unicode(), nullable=True),
        sa.Column("api_key", sa.Unicode(), nullable=True),
        sa.Column("is_admin", sa.Boolean(), nullable=False),
        sa.Column("is_muted", sa.Boolean(), nullable=False),
        sa.Column("type",
                  sa.Enum("user", "group", "system", name="role_type"),
                  nullable=False),
        sa.Column("password_digest", sa.Unicode(), nullable=True),
        sa.Column("reset_token", sa.Unicode(), nullable=True),
        sa.Column("notified_at", sa.DateTime(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
        sa.UniqueConstraint("foreign_id"),
    )
    op.create_table(
        "alert",
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("query", sa.Unicode(), nullable=True),
        sa.Column("notified_at", sa.DateTime(), nullable=True),
        sa.Column("role_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["role_id"],
            ["role.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_alert_role_id"),
                    "alert", ["role_id"],
                    unique=False)
    op.create_table(
        "collection",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("label", sa.Unicode(), nullable=True),
        sa.Column("summary", sa.Unicode(), nullable=True),
        sa.Column("category", sa.Unicode(), nullable=True),
        sa.Column("countries", postgresql.ARRAY(sa.Unicode()), nullable=True),
        sa.Column("languages", postgresql.ARRAY(sa.Unicode()), nullable=True),
        sa.Column("foreign_id", sa.Unicode(), nullable=False),
        sa.Column("publisher", sa.Unicode(), nullable=True),
        sa.Column("publisher_url", sa.Unicode(), nullable=True),
        sa.Column("info_url", sa.Unicode(), nullable=True),
        sa.Column("data_url", sa.Unicode(), nullable=True),
        sa.Column("casefile", sa.Boolean(), nullable=True),
        sa.Column("creator_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["creator_id"],
            ["role.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
        sa.UniqueConstraint("foreign_id"),
    )
    op.create_table(
        "notification",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("event", sa.String(length=255), nullable=False),
        sa.Column("channels",
                  postgresql.ARRAY(sa.String(length=255)),
                  nullable=True),
        sa.Column("params",
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column("actor_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["actor_id"],
            ["role.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_notification_channels"),
                    "notification", ["channels"],
                    unique=False)
    op.create_table(
        "permission",
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("role_id", sa.Integer(), nullable=True),
        sa.Column("read", sa.Boolean(), nullable=True),
        sa.Column("write", sa.Boolean(), nullable=True),
        sa.Column("collection_id", sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(
            ["role_id"],
            ["role.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_permission_role_id"),
                    "permission", ["role_id"],
                    unique=False)
    op.create_table(
        "query_log",
        sa.Column("id", sa.BigInteger(), nullable=False),
        sa.Column("query", sa.Unicode(), nullable=True),
        sa.Column("session_id", sa.Unicode(), nullable=True),
        sa.Column("role_id", sa.Integer(), nullable=True),
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.ForeignKeyConstraint(
            ["role_id"],
            ["role.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_query_log_role_id"),
                    "query_log", ["role_id"],
                    unique=False)
    op.create_table(
        "role_membership",
        sa.Column("group_id", sa.Integer(), nullable=True),
        sa.Column("member_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["group_id"],
            ["role.id"],
        ),
        sa.ForeignKeyConstraint(
            ["member_id"],
            ["role.id"],
        ),
    )
    op.create_table(
        "document",
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.BigInteger(), nullable=False),
        sa.Column("content_hash", sa.Unicode(length=65), nullable=True),
        sa.Column("foreign_id", sa.Unicode(), nullable=True),
        sa.Column("schema", sa.String(length=255), nullable=False),
        sa.Column("meta",
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column("uploader_id", sa.Integer(), nullable=True),
        sa.Column("parent_id", sa.BigInteger(), nullable=True),
        sa.Column("collection_id", sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(
            ["collection_id"],
            ["collection.id"],
        ),
        sa.ForeignKeyConstraint(
            ["parent_id"],
            ["document.id"],
        ),
        sa.ForeignKeyConstraint(
            ["uploader_id"],
            ["role.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_document_collection_id"),
                    "document", ["collection_id"],
                    unique=False)
    op.create_index(op.f("ix_document_content_hash"),
                    "document", ["content_hash"],
                    unique=False)
    op.create_index(op.f("ix_document_foreign_id"),
                    "document", ["foreign_id"],
                    unique=False)
    op.create_index(op.f("ix_document_parent_id"),
                    "document", ["parent_id"],
                    unique=False)
    op.create_table(
        "entity",
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.String(length=128), nullable=False),
        sa.Column("name", sa.Unicode(), nullable=True),
        sa.Column("schema", sa.String(length=255), nullable=True),
        sa.Column("foreign_id", sa.Unicode(), nullable=True),
        sa.Column("data",
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column("collection_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["collection_id"],
            ["collection.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_entity_collection_id"),
                    "entity", ["collection_id"],
                    unique=False)
    op.create_index(op.f("ix_entity_schema"),
                    "entity", ["schema"],
                    unique=False)
    op.create_table(
        "match",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=True),
        sa.Column("updated_at", sa.DateTime(), nullable=True),
        sa.Column("entity_id", sa.String(length=128), nullable=True),
        sa.Column("collection_id", sa.Integer(), nullable=True),
        sa.Column("match_id", sa.String(length=128), nullable=True),
        sa.Column("match_collection_id", sa.Integer(), nullable=True),
        sa.Column("score", sa.Float(), nullable=True),
        sa.ForeignKeyConstraint(
            ["collection_id"],
            ["collection.id"],
        ),
        sa.ForeignKeyConstraint(
            ["match_collection_id"],
            ["collection.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_match_collection_id"),
                    "match", ["collection_id"],
                    unique=False)
    op.create_index(
        op.f("ix_match_match_collection_id"),
        "match",
        ["match_collection_id"],
        unique=False,
    )
Exemple #2
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        'deployment', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('hostnames', postgresql.ARRAY(sa.String()), nullable=False),
        sa.Column('allow_observer_submission_edit',
                  sa.Boolean(),
                  nullable=True), sa.Column('logo', sa.String(),
                                            nullable=True),
        sa.Column('include_rejected_in_votes', sa.Boolean(), nullable=True),
        sa.Column('is_initialized', sa.Boolean(), nullable=True),
        sa.Column('dashboard_full_locations', sa.Boolean(), nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'phone', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('number', sa.String(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('number'))
    op.create_table(
        'form_set', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('slug', sa.String(), nullable=True),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'location_set', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('slug', sa.String(), nullable=True),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'permission', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('description', sa.String(), nullable=True),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'resource', sa.Column('resource_id', sa.Integer(), nullable=False),
        sa.Column('resource_type', sa.String(), nullable=False),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('resource_id'))
    op.create_table(
        'role', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=True),
        sa.Column('description', sa.String(), nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('deployment_id', 'name'))
    op.create_table(
        'user', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('email', sa.String(), nullable=False),
        sa.Column('username', sa.String(), nullable=False),
        sa.Column('password', sa.String(), nullable=False),
        sa.Column('last_name', sa.String(), nullable=True),
        sa.Column('first_name', sa.String(), nullable=True),
        sa.Column('active', sa.Boolean(), nullable=True),
        sa.Column('confirmed_at', sa.DateTime(), nullable=True),
        sa.Column('current_login_at', sa.DateTime(), nullable=True),
        sa.Column('last_login_at', sa.DateTime(), nullable=True),
        sa.Column('current_login_ip', sa.String(), nullable=True),
        sa.Column('last_login_ip', sa.String(), nullable=True),
        sa.Column('login_count', sa.Integer(), nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'form', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('prefix', sa.String(), nullable=False),
        sa.Column('form_type',
                  sqlalchemy_utils.types.choice.ChoiceType(Form.FORM_TYPES),
                  nullable=False),
        sa.Column('require_exclamation', sa.Boolean(), nullable=True),
        sa.Column('data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('version_identifier', sa.String(), nullable=True),
        sa.Column('form_set_id', sa.Integer(), nullable=False),
        sa.Column('resource_id', sa.Integer(), nullable=True),
        sa.Column('quality_checks',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('party_mappings',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('calculate_moe', sa.Boolean(), nullable=True),
        sa.Column('accredited_voters_tag', sa.String(), nullable=True),
        sa.Column('quality_checks_enabled', sa.Boolean(), nullable=True),
        sa.Column('invalid_votes_tag', sa.String(), nullable=True),
        sa.Column('registered_voters_tag', sa.String(), nullable=True),
        sa.Column('blank_votes_tag', sa.String(), nullable=True),
        sa.ForeignKeyConstraint(
            ['form_set_id'],
            ['form_set.id'],
        ),
        sa.ForeignKeyConstraint(['resource_id'], ['resource.resource_id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'location_data_field', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('label', sa.String(), nullable=False),
        sa.Column('visible_in_lists', sa.Boolean(), nullable=True),
        sa.Column('resource_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['resource_id'], ['resource.resource_id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'location_type', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('is_administrative', sa.Boolean(), nullable=True),
        sa.Column('is_political', sa.Boolean(), nullable=True),
        sa.Column('has_registered_voters', sa.Boolean(), nullable=True),
        sa.Column('slug', sa.String(), nullable=True),
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'participant_set', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('slug', sa.String(), nullable=True),
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'role_resource_permissions',
        sa.Column('role_id', sa.Integer(), nullable=False),
        sa.Column('resource_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['resource_id'], ['resource.resource_id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['role_id'], ['role.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('role_id', 'resource_id'))
    op.create_table(
        'roles_permissions', sa.Column('role_id', sa.Integer(),
                                       nullable=False),
        sa.Column('permission_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['permission_id'], ['permission.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['role_id'], ['role.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('role_id', 'permission_id'))
    op.create_table(
        'roles_users', sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('role_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['role_id'], ['role.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('user_id', 'role_id'))
    op.create_table(
        'sample', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'user_resource_permissions',
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('resource_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['resource_id'], ['resource.resource_id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('user_id', 'resource_id'))
    op.create_table(
        'user_upload', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('created', sa.DateTime(), nullable=True),
        sa.Column('upload_filename', sa.String(), nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'users_permissions', sa.Column('user_id', sa.Integer(),
                                       nullable=False),
        sa.Column('permission_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['permission_id'], ['permission.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('user_id', 'permission_id'))
    op.create_table(
        'event', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('start', sa.DateTime(), nullable=False),
        sa.Column('end', sa.DateTime(), nullable=False),
        sa.Column('form_set_id', sa.Integer(), nullable=True),
        sa.Column('resource_id', sa.Integer(), nullable=True),
        sa.Column('location_set_id', sa.Integer(), nullable=True),
        sa.Column('participant_set_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['form_set_id'], ['form_set.id'],
                                ondelete='SET NULL'),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='SET NULL'),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='SET NULL'),
        sa.ForeignKeyConstraint(['resource_id'], ['resource.resource_id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'location', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('code', sa.String(), nullable=False),
        sa.Column('registered_voters', sa.Integer(), nullable=True),
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('location_type_id', sa.Integer(), nullable=False),
        sa.Column('lat', sa.Float(), nullable=True),
        sa.Column('lon', sa.Float(), nullable=True),
        sa.Column('extra_data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['location_type_id'], ['location_type.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('location_set_id', 'code'))
    op.create_index(op.f('ix_location_code'),
                    'location', ['code'],
                    unique=False)
    op.create_table(
        'location_type_path',
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('ancestor_id', sa.Integer(), nullable=False),
        sa.Column('descendant_id', sa.Integer(), nullable=False),
        sa.Column('depth', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['ancestor_id'], ['location_type.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['descendant_id'], ['location_type.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id']),
        sa.PrimaryKeyConstraint('ancestor_id', 'descendant_id'))
    op.create_index('location_type_paths_ancestor_idx',
                    'location_type_path', ['ancestor_id'],
                    unique=False)
    op.create_index('location_type_paths_descendant_idx',
                    'location_type_path', ['descendant_id'],
                    unique=False)
    op.create_table(
        'participant_data_field', sa.Column('id', sa.Integer(),
                                            nullable=False),
        sa.Column('participant_set_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('label', sa.String(), nullable=False),
        sa.Column('visible_in_lists', sa.Boolean(), nullable=True),
        sa.Column('resource_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['resource_id'], ['resource.resource_id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'participant_group_type', sa.Column('id', sa.Integer(),
                                            nullable=False),
        sa.Column('name', sa.String(), nullable=True),
        sa.Column('participant_set_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'participant_partner', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('participant_set_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'participant_role', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('participant_set_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'location_path',
        sa.Column('location_set_id', sa.Integer(), nullable=False),
        sa.Column('ancestor_id', sa.Integer(), nullable=False),
        sa.Column('descendant_id', sa.Integer(), nullable=False),
        sa.Column('depth', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['ancestor_id'], ['location.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['descendant_id'], ['location.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['location_set_id'], ['location_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('ancestor_id', 'descendant_id'))
    op.create_index('location_paths_ancestor_idx',
                    'location_path', ['ancestor_id'],
                    unique=False)
    op.create_index('location_paths_descendant_idx',
                    'location_path', ['descendant_id'],
                    unique=False)
    op.create_table(
        'participant', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=True),
        sa.Column('participant_id', sa.String(), nullable=True),
        sa.Column('role_id', sa.Integer(), nullable=True),
        sa.Column('partner_id', sa.Integer(), nullable=True),
        sa.Column('supervisor_id', sa.Integer(), nullable=True),
        sa.Column('gender',
                  sqlalchemy_utils.types.choice.ChoiceType(Participant.GENDER),
                  nullable=True), sa.Column('email',
                                            sa.String(),
                                            nullable=True),
        sa.Column('location_id', sa.Integer(), nullable=True),
        sa.Column('participant_set_id', sa.Integer(), nullable=False),
        sa.Column('message_count', sa.Integer(), nullable=True),
        sa.Column('accurate_message_count', sa.Integer(), nullable=True),
        sa.Column('completion_rating', sa.Float(), nullable=True),
        sa.Column('device_id', sa.String(), nullable=True),
        sa.Column('password', sa.String(), nullable=True),
        sa.Column('extra_data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['location_id'], ['location.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['partner_id'], ['participant_partner.id'],
                                ondelete='SET NULL'),
        sa.ForeignKeyConstraint(['role_id'], ['participant_role.id'],
                                ondelete='SET NULL'),
        sa.ForeignKeyConstraint(['supervisor_id'], ['participant.id'],
                                ondelete='SET NULL'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'participant_group', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('group_type_id', sa.Integer(), nullable=False),
        sa.Column('participant_set_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['group_type_id'],
                                ['participant_group_type.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['participant_set_id'], ['participant_set.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'samples_locations',
        sa.Column('sample_id', sa.Integer(), nullable=False),
        sa.Column('location_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['location_id'], ['location.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['sample_id'], ['sample.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('sample_id', 'location_id'))
    op.create_table(
        'participant_groups_participants',
        sa.Column('group_id', sa.Integer(), nullable=False),
        sa.Column('participant_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['group_id'], ['participant_group.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['participant_id'], ['participant.id'],
                                ondelete='CASCADE'))
    op.create_table(
        'participant_phone',
        sa.Column('participant_id', sa.Integer(), nullable=False),
        sa.Column('phone_id', sa.Integer(), nullable=False),
        sa.Column('last_seen', sa.DateTime(), nullable=True),
        sa.Column('verified', sa.Boolean(), nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['participant_id'], ['participant.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['phone_id'], ['phone.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('participant_id', 'phone_id'))
    op.create_table(
        'submission', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('event_id', sa.Integer(), nullable=False),
        sa.Column('form_id', sa.Integer(), nullable=False),
        sa.Column('participant_id', sa.Integer(), nullable=True),
        sa.Column('location_id', sa.Integer(), nullable=False),
        sa.Column('data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('extra_data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('submission_type',
                  sqlalchemy_utils.types.choice.ChoiceType(
                      Submission.SUBMISSION_TYPES),
                  nullable=True),
        sa.Column('created', sa.DateTime(), nullable=True),
        sa.Column('updated', sa.DateTime(), nullable=True),
        sa.Column('sender_verified', sa.Boolean(), nullable=True),
        sa.Column('quarantine_status',
                  sqlalchemy_utils.types.choice.ChoiceType(
                      Submission.QUARANTINE_STATUSES),
                  nullable=True),
        sa.Column('verification_status',
                  sqlalchemy_utils.types.choice.ChoiceType(
                      Submission.VERIFICATION_STATUSES),
                  nullable=True),
        sa.Column('incident_description', sa.String(), nullable=True),
        sa.Column('incident_status',
                  sqlalchemy_utils.types.choice.ChoiceType(
                      Submission.INCIDENT_STATUSES),
                  nullable=True),
        sa.Column('overridden_fields',
                  postgresql.ARRAY(sa.String()),
                  nullable=True),
        sa.Column('conflicts',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['event_id'], ['event.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['form_id'], ['form.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['location_id'], ['location.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['participant_id'], ['participant.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_index('submission_data_idx',
                    'submission', ['data'],
                    unique=False,
                    postgresql_using='gin')
    op.create_table(
        'message', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('direction',
                  sqlalchemy_utils.types.choice.ChoiceType(Message.DIRECTIONS),
                  nullable=False),
        sa.Column('recipient', sa.String(), nullable=True),
        sa.Column('sender', sa.String(), nullable=True),
        sa.Column('text', sa.String(), nullable=True),
        sa.Column('received', sa.DateTime(), nullable=True),
        sa.Column('delivered', sa.DateTime(), nullable=True),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('event_id', sa.Integer(), nullable=False),
        sa.Column('submission_id', sa.Integer(), nullable=True),
        sa.Column('participant_id', sa.Integer(), nullable=True),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['event_id'], ['event.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['participant_id'], ['participant.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['submission_id'], ['submission.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_index(op.f('ix_message_received'),
                    'message', ['received'],
                    unique=False)
    op.create_table(
        'submission_comment', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('submission_id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('comment', sa.String(), nullable=True),
        sa.Column('submit_date', sa.DateTime(), nullable=True),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['submission_id'], ['submission.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(
            ['user_id'],
            ['user.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'submission_version', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('submission_id', sa.Integer(), nullable=False),
        sa.Column('data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column('timestamp', sa.DateTime(), nullable=True),
        sa.Column('channel',
                  sqlalchemy_utils.types.choice.ChoiceType(
                      SubmissionVersion.CHANNEL_CHOICES),
                  nullable=True),
        sa.Column('deployment_id', sa.Integer(), nullable=False),
        sa.Column('identity', sa.String(), nullable=False),
        sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['submission_id'], ['submission.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))

    # ----- initial fixtures -----
    conn = op.get_bind()
    conn.execute(text("""INSERT INTO deployment (
        id, name, hostnames, allow_observer_submission_edit,
        include_rejected_in_votes, is_initialized, dashboard_full_locations,
        uuid)
        VALUES (1, 'Default', '{\"localhost\"}', 't', 'f', 'f', 't',
        :uuid)"""),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('deployment', 'id'))
    """)
    conn.execute(text("""INSERT INTO resource (
        resource_id, resource_type, deployment_id, uuid)
        VALUES (1, 'event', 1, :uuid)"""),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('resource', 'resource_id'))
    """)

    timestamp = datetime(1970, 1, 1, tzinfo=tzutc())
    conn.execute(text("""INSERT INTO event (
        id, name, start, \"end\", resource_id)
        VALUES (1, 'Default', :ts, :ts, 1)
        """),
                 ts=timestamp)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('event', 'id'))
    """)
    conn.execute(text("""INSERT INTO role (
        id, deployment_id, name, uuid) VALUES (1, 1, 'admin', :uuid)"""),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('role', 'id'))
    """)
    conn.execute(text("""INSERT INTO role (
        id, deployment_id, name, uuid) VALUES (2, 1, 'analyst', :uuid)"""),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('role', 'id'))
    """)
    conn.execute(text("""INSERT INTO role (
        id, deployment_id, name, uuid) VALUES (3, 1, 'manager', :uuid)"""),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('role', 'id'))
    """)
    conn.execute(text("""INSERT INTO role (
        id, deployment_id, name, uuid) VALUES (4, 1, 'clerk', :uuid)"""),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('role', 'id'))
    """)
    conn.execute(text("""INSERT INTO role (
        id, deployment_id, name, uuid) VALUES (5, 1, 'field-coordinator', :uuid)"""
                      ),
                 uuid=uuid4().hex)
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('role', 'id'))
    """)
    password = hash_password('admin')
    conn.execute(text("""INSERT INTO \"user\" (
        id, deployment_id, email, username, password, active, uuid)
        VALUES (1, 1, '*****@*****.**', 'admin',
        :password, 't', :uuid)"""),
                 password=password,
                 uuid=uuid4().hex)
    op.execute("INSERT INTO roles_users (user_id, role_id) VALUES (1, 1)")
    op.execute("""
    SELECT nextval(pg_get_serial_sequence('user', 'id'))
    """)

    for name in dir(permissions):
        item = getattr(permissions, name, None)
        if isinstance(item, Permission):
            for need in item.needs:
                if need.method == 'action':
                    conn.execute(text(
                        """INSERT INTO permission (name, deployment_id, uuid)
                                 VALUES (:value, 1, :uuid)"""),
                                 uuid=uuid4().hex,
                                 value=need.value)

    # clerk
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='clerk',
                 permission='add_submission')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='clerk',
                 permission='edit_both_submissions')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='clerk',
                 permission='edit_submission')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='clerk',
                 permission='view_messages')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='clerk',
                 permission='view_participants')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='clerk',
                 permission='view_events')

    # manager
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='add_submission')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='edit_both_submissions')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='edit_submission')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='edit_submission_quarantine_status')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='edit_submission_verification_status')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='send_messages')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='view_events')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='view_messages')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='view_participants')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='manager',
                 permission='view_quality_assurance')

    # analyst
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='add_submission')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='edit_both_submissions')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='edit_participant')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='edit_submission')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='edit_submission_quarantine_status')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='edit_submission_verification_status')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='export_messages')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='export_participants')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='export_submissions')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='send_messages')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='view_events')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='view_messages')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='view_participants')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='view_quality_assurance')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='view_process_analysis')
    conn.execute(text("""INSERT INTO roles_permissions (role_id, permission_id)
                 VALUES ((SELECT id FROM role WHERE deployment_id=1 AND name=:role),
                 (SELECT id FROM permission WHERE deployment_id=1 AND name=:permission))
                 ON CONFLICT DO NOTHING"""),
                 role='analyst',
                 permission='view_result_analysis')
Exemple #3
0
def upgrade():

    op.create_table(
        'itemsequence',
        sa.Column('parent_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('value', sa.Integer(), server_default='0', nullable=False),
        sa.PrimaryKeyConstraint('parent_id', 'value'))
    op.execute(NEXT_ITEM_VALUE_FUNCTION)
    op.create_table(
        'itemoption', sa.Column('id',
                                zeus.db.types.guid.GUID(),
                                nullable=False),
        sa.Column('item_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('name', sa.String(length=64), nullable=False),
        sa.Column('value', sa.Text(), nullable=False),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('item_id', 'name', name='unq_itemoption_name'))
    op.create_table(
        'itemstat', sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('item_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('name', sa.String(length=64), nullable=False),
        sa.Column('value', sa.Integer(), nullable=False),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('item_id', 'name', name='unq_itemstat_name'))
    op.create_table(
        'organization', sa.Column('name',
                                  sa.String(length=200),
                                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False), sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name'))
    op.create_table(
        'user', sa.Column('email', sa.String(length=128), nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False), sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('email'))
    op.create_table(
        'author', sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('name', sa.String(length=128), nullable=False),
        sa.Column('email', sa.String(length=128), nullable=True),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('organization_id',
                            'email',
                            name='unq_author_email'))
    op.create_index(op.f('ix_author_organization_id'),
                    'author', ['organization_id'],
                    unique=False)
    op.create_table(
        'identity',
        sa.Column('user_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('external_id', sa.String(length=64), nullable=False),
        sa.Column('provider', sa.String(length=32), nullable=False),
        sa.Column('config',
                  zeus.db.types.json.JSONEncodedDict(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('external_id'),
        sa.UniqueConstraint('user_id', 'provider', name='unq_identity_user'))
    op.create_index(op.f('ix_identity_user_id'),
                    'identity', ['user_id'],
                    unique=False)
    op.create_table(
        'organization_access',
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('user_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.ForeignKeyConstraint(
            ['organization_id'],
            ['organization.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['user.id'],
        ), sa.PrimaryKeyConstraint('organization_id', 'user_id'))
    op.create_table(
        'repository',
        sa.Column('provider', zeus.db.types.enum.StrEnum(), nullable=False),
        sa.Column('external_id', sa.String(length=64), nullable=True),
        sa.Column('url', sa.String(length=200), nullable=False),
        sa.Column('backend', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('status', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('data', zeus.db.types.json.JSONEncodedDict(), nullable=True),
        sa.Column('last_update', sa.TIMESTAMP(timezone=True), nullable=True),
        sa.Column('last_update_attempt',
                  sa.TIMESTAMP(timezone=True),
                  nullable=True),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('organization_id',
                            'provider',
                            'external_id',
                            name='unq_external_id'))
    op.create_index(op.f('ix_repository_organization_id'),
                    'repository', ['organization_id'],
                    unique=False)
    op.create_table(
        'project', sa.Column('name', sa.String(length=200), nullable=False),
        sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['repository_id'], ['repository.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('organization_id', 'name',
                            name='unq_project_name'))
    op.create_index(op.f('ix_project_organization_id'),
                    'project', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_project_repository_id'),
                    'project', ['repository_id'],
                    unique=False)
    op.create_table(
        'repository_access',
        sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('user_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(
            ['repository_id'],
            ['repository.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['user.id'],
        ), sa.PrimaryKeyConstraint('repository_id', 'user_id'))
    op.create_index(op.f('ix_repository_access_organization_id'),
                    'repository_access', ['organization_id'],
                    unique=False)
    op.create_table(
        'revision', sa.Column('sha', sa.String(length=40), nullable=False),
        sa.Column('author_id', zeus.db.types.guid.GUID(), nullable=True),
        sa.Column('committer_id', zeus.db.types.guid.GUID(), nullable=True),
        sa.Column('message', sa.Text(), nullable=True),
        sa.Column('parents',
                  postgresql.ARRAY(sa.String(length=40)),
                  nullable=True),
        sa.Column('branches',
                  postgresql.ARRAY(sa.String(length=128)),
                  nullable=True),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.Column('date_committed',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.ForeignKeyConstraint(
            ['author_id'],
            ['author.id'],
        ), sa.ForeignKeyConstraint(
            ['committer_id'],
            ['author.id'],
        ),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['repository_id'], ['repository.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('sha'),
        sa.UniqueConstraint('repository_id', 'sha', name='unq_revision'))
    op.create_index(op.f('ix_revision_author_id'),
                    'revision', ['author_id'],
                    unique=False)
    op.create_index(op.f('ix_revision_committer_id'),
                    'revision', ['committer_id'],
                    unique=False)
    op.create_index(op.f('ix_revision_organization_id'),
                    'revision', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_revision_repository_id'),
                    'revision', ['repository_id'],
                    unique=False)
    op.create_table(
        'hook', sa.Column('token', sa.LargeBinary(length=64), nullable=False),
        sa.Column('provider', sa.String(length=64), nullable=False),
        sa.Column('project_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['project_id'], ['project.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token'))
    op.create_index(op.f('ix_hook_organization_id'),
                    'hook', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_hook_project_id'),
                    'hook', ['project_id'],
                    unique=False)
    op.create_table(
        'patch',
        sa.Column('parent_revision_sha', sa.String(length=40), nullable=False),
        sa.Column('diff', sa.Text(), nullable=False),
        sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(
            ['repository_id', 'parent_revision_sha'],
            ['revision.repository_id', 'revision.sha'],
        ),
        sa.ForeignKeyConstraint(['repository_id'], ['repository.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_index('idx_repo_sha',
                    'patch', ['repository_id', 'parent_revision_sha'],
                    unique=False)
    op.create_index(op.f('ix_patch_organization_id'),
                    'patch', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_patch_repository_id'),
                    'patch', ['repository_id'],
                    unique=False)
    op.create_table(
        'source',
        sa.Column('patch_id', zeus.db.types.guid.GUID(), nullable=True),
        sa.Column('revision_sha', sa.String(length=40), nullable=False),
        sa.Column('data', zeus.db.types.json.JSONEncodedDict(), nullable=True),
        sa.Column('author_id', zeus.db.types.guid.GUID(), nullable=True),
        sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(
            ['author_id'],
            ['author.id'],
        ),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(
            ['patch_id'],
            ['patch.id'],
        ),
        sa.ForeignKeyConstraint(
            ['repository_id', 'revision_sha'],
            ['revision.repository_id', 'revision.sha'],
        ),
        sa.ForeignKeyConstraint(['repository_id'], ['repository.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('patch_id'),
        sa.UniqueConstraint('repository_id',
                            'revision_sha',
                            'patch_id',
                            name='unq_source_revision'))
    op.create_index('idx_source_repo_sha',
                    'source', ['repository_id', 'revision_sha'],
                    unique=False)
    op.create_index(op.f('ix_source_author_id'),
                    'source', ['author_id'],
                    unique=False)
    op.create_index(op.f('ix_source_organization_id'),
                    'source', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_source_repository_id'),
                    'source', ['repository_id'],
                    unique=False)
    op.create_table(
        'build',
        sa.Column('source_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('number', sa.Integer(), nullable=False),
        sa.Column('label', sa.String(), nullable=False),
        sa.Column('status', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('result', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('date_started', sa.TIMESTAMP(timezone=True), nullable=True),
        sa.Column('date_finished', sa.TIMESTAMP(timezone=True), nullable=True),
        sa.Column('data', zeus.db.types.json.JSONEncodedDict(), nullable=True),
        sa.Column('provider', sa.String(), nullable=True),
        sa.Column('external_id', sa.String(length=64), nullable=True),
        sa.Column('project_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['project_id'], ['project.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['source_id'], ['source.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('project_id', 'number', name='unq_build_number'),
        sa.UniqueConstraint('project_id',
                            'provider',
                            'external_id',
                            name='unq_build_provider'))
    op.create_index(op.f('ix_build_organization_id'),
                    'build', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_build_project_id'),
                    'build', ['project_id'],
                    unique=False)
    op.create_index(op.f('ix_build_source_id'),
                    'build', ['source_id'],
                    unique=False)
    op.create_table(
        'job', sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('build_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('number', sa.Integer(), nullable=False),
        sa.Column('label', sa.String(), nullable=True),
        sa.Column('status', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('result', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('date_started', sa.TIMESTAMP(timezone=True), nullable=True),
        sa.Column('date_finished', sa.TIMESTAMP(timezone=True), nullable=True),
        sa.Column('data', zeus.db.types.json.JSONEncodedDict(), nullable=True),
        sa.Column('provider', sa.String(), nullable=True),
        sa.Column('external_id', sa.String(length=64), nullable=True),
        sa.Column('project_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['build_id'], ['build.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['project_id'], ['project.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('build_id', 'number', name='unq_job_number'),
        sa.UniqueConstraint('build_id',
                            'provider',
                            'external_id',
                            name='unq_job_provider'))
    op.create_index(op.f('ix_job_build_id'), 'job', ['build_id'], unique=False)
    op.create_index(op.f('ix_job_organization_id'),
                    'job', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_job_project_id'),
                    'job', ['project_id'],
                    unique=False)
    op.create_table(
        'filecoverage',
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('job_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('filename', sa.String(length=256), nullable=False),
        sa.Column('data', sa.Text(), nullable=False),
        sa.Column('lines_covered', sa.Integer(), nullable=False),
        sa.Column('lines_uncovered', sa.Integer(), nullable=False),
        sa.Column('diff_lines_covered', sa.Integer(), nullable=False),
        sa.Column('diff_lines_uncovered', sa.Integer(), nullable=False),
        sa.Column('project_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.ForeignKeyConstraint(['job_id'], ['job.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['project_id'], ['project.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id', 'filename'),
        sa.UniqueConstraint('job_id', 'filename', name='unq_job_filname'))
    op.create_index(op.f('ix_filecoverage_organization_id'),
                    'filecoverage', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_filecoverage_project_id'),
                    'filecoverage', ['project_id'],
                    unique=False)
    op.create_table(
        'testcase', sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('job_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('hash', sa.String(length=40), nullable=False),
        sa.Column('name', sa.Text(), nullable=False),
        sa.Column('result', zeus.db.types.enum.Enum(), nullable=False),
        sa.Column('duration', sa.Integer(), nullable=True),
        sa.Column('message', sa.Text(), nullable=True),
        sa.Column('project_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.ForeignKeyConstraint(['job_id'], ['job.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['project_id'], ['project.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('job_id', 'hash', name='unq_testcase_hash'))
    op.create_index(op.f('ix_testcase_organization_id'),
                    'testcase', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_testcase_project_id'),
                    'testcase', ['project_id'],
                    unique=False)
    op.create_table(
        'artifact',
        sa.Column('job_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('testcase_id', zeus.db.types.guid.GUID(), nullable=True),
        sa.Column('name', sa.String(length=256), nullable=False),
        sa.Column('type',
                  zeus.db.types.enum.Enum(),
                  server_default='0',
                  nullable=False),
        sa.Column('file', zeus.db.types.file.File(), nullable=False),
        sa.Column('project_id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('organization_id', zeus.db.types.guid.GUID(),
                  nullable=False),
        sa.Column('id', zeus.db.types.guid.GUID(), nullable=False),
        sa.Column('date_created',
                  sa.TIMESTAMP(timezone=True),
                  server_default=sa.text('now()'),
                  nullable=False),
        sa.ForeignKeyConstraint(['job_id'], ['job.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['organization_id'], ['organization.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['project_id'], ['project.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['testcase_id'], ['testcase.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_index(op.f('ix_artifact_organization_id'),
                    'artifact', ['organization_id'],
                    unique=False)
    op.create_index(op.f('ix_artifact_project_id'),
                    'artifact', ['project_id'],
                    unique=False)
def downgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column('recipes',
                    'utilities',
                    existing_type=postgresql.ARRAY(sa.TEXT()),
                    nullable=True)
class ImplicitSpanMention(Context, TemporaryImplicitSpanMention):
    """A span of characters that may not appear verbatim in the source text.

    It is identified by Context id, character-index start and end (inclusive),
    as well as a key representing what 'expander' function drew the ImplicitSpanMention
    from an existing SpanMention, and a position (where position=0 corresponds to the
    first ImplicitSpanMention produced from the expander function).

    The character-index start and end point to the segment of text that was
    expanded to produce the ImplicitSpanMention.
    """

    __tablename__ = "implicit_span_mention"

    #: The unique id of the ``ImplicitSpanMention``.
    id = Column(Integer,
                ForeignKey("context.id", ondelete="CASCADE"),
                primary_key=True)

    #: The id of the parent ``Sentence``.
    sentence_id = Column(Integer,
                         ForeignKey("context.id", ondelete="CASCADE"),
                         primary_key=True)
    #: The parent ``Sentence``.
    sentence = relationship("Context", foreign_keys=sentence_id)

    #: The starting character-index of the ``ImplicitSpanMention``.
    char_start = Column(Integer, nullable=False)

    #: The ending character-index of the ``ImplicitSpanMention`` (inclusive).
    char_end = Column(Integer, nullable=False)

    #: The key representing the expander function which produced this
    # ``ImplicitSpanMention``.
    expander_key = Column(String, nullable=False)

    #: The position of the ``ImplicitSpanMention`` where position=0 is the first
    #: ``ImplicitSpanMention`` produced by the expander.
    position = Column(Integer, nullable=False)

    #: The raw text of the ``ImplicitSpanMention``.
    text = Column(String)

    #: A list of the words in the ``ImplicitSpanMention``.
    words = Column(postgresql.ARRAY(String), nullable=False)

    #: A list of the lemmas for each word in the ``ImplicitSpanMention``.
    lemmas = Column(postgresql.ARRAY(String))

    #: A list of the POS tags for each word in the ``ImplicitSpanMention``.
    pos_tags = Column(postgresql.ARRAY(String))

    #: A list of the NER tags for each word in the ``ImplicitSpanMention``.
    ner_tags = Column(postgresql.ARRAY(String))

    #: A list of the dependency parents for each word in the ``ImplicitSpanMention``.
    dep_parents = Column(postgresql.ARRAY(Integer))

    #: A list of the dependency labels for each word in the ``ImplicitSpanMention``.
    dep_labels = Column(postgresql.ARRAY(String))

    #: A list of the page number each word in the ``ImplicitSpanMention``.
    page = Column(postgresql.ARRAY(Integer))

    #: A list of each word's TOP bounding box coordinate in the
    # ``ImplicitSpanMention``.
    top = Column(postgresql.ARRAY(Integer))

    #: A list of each word's LEFT bounding box coordinate in the
    # ``ImplicitSpanMention``.
    left = Column(postgresql.ARRAY(Integer))

    #: A list of each word's BOTTOM bounding box coordinate in the
    # ``ImplicitSpanMention``.
    bottom = Column(postgresql.ARRAY(Integer))

    #: A list of each word's RIGHT bounding box coordinate in the
    # ``ImplicitSpanMention``.
    right = Column(postgresql.ARRAY(Integer))

    #: Pickled metadata about the ``ImplicitSpanMention``.
    meta = Column(PickleType)

    __table_args__ = (UniqueConstraint(sentence_id, char_start, char_end,
                                       expander_key, position), )

    __mapper_args__ = {
        "polymorphic_identity": "implicit_span_mention",
        "inherit_condition": (id == Context.id),
    }

    def _get_instance(self, **kwargs: Any) -> "ImplicitSpanMention":
        return ImplicitSpanMention(**kwargs)

    # We redefine these to use default semantics, overriding the operators
    # inherited from TemporarySpan
    def __eq__(self, other: object) -> bool:
        if not isinstance(other, ImplicitSpanMention):
            return NotImplemented
        return self is other

    def __ne__(self, other: object) -> bool:
        if not isinstance(other, ImplicitSpanMention):
            return NotImplemented
        return self is not other

    def __hash__(self) -> int:
        return id(self)
Exemple #6
0
class TestResult(db.Model, BaseModel):
    LOG_TYPE = LogMessage.RUN_TEST

    STATUS_QUEUED = 'Queued'
    STATUS_IMPORTING = 'Importing'
    STATUS_IMPORTED = 'Imported'
    STATUS_IN_PROGRESS = 'In Progress'
    STATUS_STORING = 'Storing'
    STATUS_COMPLETED = 'Completed'
    STATUS_ERROR = 'Error'

    STATUSES = [
        STATUS_QUEUED, STATUS_IMPORTING, STATUS_IMPORTED, STATUS_IN_PROGRESS,
        STATUS_STORING, STATUS_COMPLETED, STATUS_ERROR
    ]

    TEST_STATUSES = [
        STATUS_QUEUED, STATUS_IMPORTING, STATUS_IMPORTED, STATUS_IN_PROGRESS,
        STATUS_STORING
    ]

    __tablename__ = 'test_result'

    name = db.Column(db.String(200), nullable=False)
    status = db.Column(db.Enum(*STATUSES, name='test_statuses'))
    error = db.Column(db.String(300))

    model_id = db.Column(db.Integer, db.ForeignKey('model.id'))
    model = relationship(Model, backref=backref('tests', cascade='all,delete'))
    model_name = db.Column(db.String(200))

    data_set_id = db.Column(db.Integer,
                            db.ForeignKey('data_set.id', ondelete='SET NULL'))
    dataset = relationship(DataSet, foreign_keys=[data_set_id])

    examples_count = db.Column(db.Integer)
    examples_fields = db.Column(postgresql.ARRAY(db.String))
    examples_size = db.Column(db.Float)

    parameters = db.Column(JSONType)
    classes_set = db.Column(postgresql.ARRAY(db.String))
    accuracy = db.Column(db.Float)
    roc_auc = db.Column(JSONType)
    metrics = db.Column(JSONType)
    memory_usage = db.Column(db.Integer)

    vect_data = deferred(db.Column(S3File))
    fill_weights = db.Column(db.Boolean, default=False)

    def __repr__(self):
        return '<TestResult {0}>'.format(self.name)

    def get_vect_data(self, num, segment):
        from pickle import loads
        data = loads(self.vect_data)
        offset = 0
        for k, v in data.items():
            offset += v.shape[0]
            if k == segment:
                break
        import numpy
        if isinstance(data[segment], numpy.ndarray):
            return data[num - offset]
        return data[segment].getrow(num - offset).todense().tolist()[0]

    def set_error(self, error, commit=True):
        self.error = str(error)[:299]
        self.status = TestResult.STATUS_ERROR
        if commit:
            self.save()

    @property
    def exports(self):
        from api.async_tasks.models import AsyncTask
        return AsyncTask.get_current_by_object(
            self,
            'api.model_tests.tasks.get_csv_results',
        )

    @property
    def db_exports(self):
        from api.async_tasks.models import AsyncTask
        return AsyncTask.get_current_by_object(
            self,
            'api.model_tests.tasks.export_results_to_db',
        )

    @property
    def confusion_matrix_calculations(self):
        from api.async_tasks.models import AsyncTask
        return AsyncTask.get_current_by_object(
            self,
            'api.model_tests.tasks.calculate_confusion_matrix',
            statuses=AsyncTask.STATUSES)

    @property
    def can_edit(self):
        if not self.model.can_edit:
            self.reason_msg = self.model.reason_msg
            return False
        return super(TestResult, self).can_edit

    @property
    def can_delete(self):
        if not self.model.can_delete:
            self.reason_msg = self.model.reason_msg
            return False
        return super(TestResult, self).can_delete

    def delete(self):
        ds = self.dataset
        super(TestResult, self).delete()
        ds.unlock()

    @property
    def test_in_progress(self):
        return self.status in self.TEST_STATUSES
Exemple #7
0
class Item(Base):
    __tablename__ = 'item'

    item_id = Column(Integer, primary_key=True, autoincrement=False)
    location = Column(Geography('POINT', spatial_index=True), nullable=False)
    enwiki = Column(String, index=True)
    entity = Column(postgresql.JSON)
    categories = Column(postgresql.ARRAY(String))
    old_tags = Column(postgresql.ARRAY(String))
    qid = column_property('Q' + cast(item_id, String))
    ewkt = column_property(func.ST_AsEWKT(location), deferred=True)
    query_label = Column(String, index=True)
    # extract = Column(String)
    extract_names = Column(postgresql.ARRAY(String))

    db_tags = relationship('ItemTag',
                           collection_class=set,
                           cascade='save-update, merge, delete, delete-orphan',
                           backref='item')

    tags = association_proxy('db_tags', 'tag_or_key')

    isa = relationship('IsA', secondary='item_isa')
    wiki_extracts = relationship('Extract',
                                 collection_class=attribute_mapped_collection('site'),
                                 cascade='save-update, merge, delete, delete-orphan',
                                 backref='item')
    extracts = association_proxy('wiki_extracts', 'extract')

    @property
    def extract(self):
        return self.extracts.get('enwiki')

    @extract.setter
    def extract(self, value):
        self.extracts['enwiki'] = value

    @property
    def labels(self):
        if not self.entity:
            return None

        return {l['language']: l['value']
                for l in self.entity['labels'].values()}

    def lang_text(self, field_name, lang='en'):
        field_values = self.entity.get(field_name)
        if not field_values:
            return
        if lang not in field_values:
            lang = 'en' if 'en' in field_values else list(field_values.keys())[0]
        return field_values[lang]

    def label(self, lang='en'):
        if not self.entity:
            return self.enwiki or self.query_label or None

        l = self.lang_text('labels', lang=lang)
        if l:
            return l['value']

    def label_detail(self, lang='en'):
        return self.lang_text('labels', lang=lang)

    def description(self, lang='en'):
        if not self.entity:
            return
        return self.lang_text('descriptions', lang=lang)

    def label_and_description(self, languages):
        labels = self.entity['labels']
        descriptions = self.entity['descriptions']
        for lang in languages:
            code = lang.wikimedia_language_code
            if code not in labels:
                continue

            description = descriptions[code]['value'] if code in descriptions else None
            return {
                'lang': lang,
                'label': labels[code]['value'],
                'description': description,
            }

    def label_best_language(self, languages):
        if not languages:
            return self.label()
        labels = self.entity['labels']
        for lang in languages:
            code = lang if isinstance(lang, str) else lang.wikimedia_language_code
            if code in labels:
                return labels[code]['value']
        return self.label()

    def languages(self):
        entity = self.entity
        labels = {lang for lang in entity['labels'].keys() if '-' not in lang}
        sitelinks = {i[:-4] for i in entity['sitelinks'].keys() if i.endswith('wiki')}

        return labels | sitelinks

    def more_endings_from_isa(self):
        endings = set()
        langs = self.languages()
        # avoid trimming "cottage", it produces too many mismatches
        skip_isa = {
            5783996,  # cottage
        }
        for isa in self.isa:
            if isa.item_id in skip_isa or not isa.entity or 'missing' in isa.entity:
                continue
            for lang, label in isa.entity.get('labels', {}).items():
                if lang in langs:
                    endings.add(label['value'])
        return endings

    @classmethod
    def get_by_qid(cls, qid):
        if qid and len(qid) > 1 and qid[0].upper() == 'Q' and qid[1:].isdigit():
            return cls.query.get(qid[1:])

    def label_and_qid(self, lang='en'):
        label = self.label(lang=lang)
        if label:
            return '{label} ({item.qid})'.format(label=label, item=self)
        else:
            return self.qid

    @property
    def wikidata_uri(self):
        return 'https://www.wikidata.org/wiki/Q{}'.format(self.item_id)

    def get_lat_lon(self):
        return session.query(func.ST_Y(self.location),
                             func.ST_X(self.location)).one()

    def get_osm_url(self, zoom=18):
        lat, lon = self.get_lat_lon()
        return f'https://www.openstreetmap.org/#map={zoom}/{lat}/{lon}'

    def get_extra_tags(self):
        tags = set()
        for qid in self.instanceof():
            for tag in wikidata.extra_keys.get(qid, []):
                if tag:
                    tags.add(tag[4:])

        return tags

    @property
    def ref_keys(self):
        return {f'ref:nrhp={v}' for v in (self.ref_nrhp() or [])}

    def disused_tags(self):
        tags = set()
        prefixes = ('disused', 'was', 'abandoned', 'demolished',
                    'destroyed', 'ruins', 'historic')
        for i in self.tags:
            if i == 'amenity':  # too generic
                continue
            if i == 'shop' and self.is_shopping_street():
                continue
            key = i.split('=')[0] if '=' in i else i
            if key in disused_prefix_key:
                tags |= {prefix + ':' + i for prefix in prefixes}
        return tags

    def calculate_tags(self, ignore_tags=None):
        ignore_tags = set(ignore_tags or [])

        # Ignore some overly generic tags from Wikidata objects:
        # facility (Q13226383)            - osm tag: amenity
        # geographic location (Q2221906)  - osm tag: location
        # artificial entity (Q16686448)   - osm tag: man_made

        ignore_tags.update('amenity', 'location', 'man_made')

        instanceof = self.instanceof()

        tags = (self.get_extra_tags() | set(self.tags)) - ignore_tags
        if matcher.could_be_building(tags, instanceof):
            tags.add('building')
            if any(n.lower().endswith(' church') for n in self.names().keys()):
                tags.update({'amenity=place_of_worship', 'building=church'})

        if 'shop' in tags and self.is_shopping_street():
            tags.discard('shop')

        tags |= self.ref_keys | self.disused_tags()
        tags -= ignore_tags
        return tags

    def instanceof(self):
        if self.entity and 'claims' not in self.entity:
            subject = f'missing claims: {self.qid}'
            body = f'''
Wikidata entity is missing claims

https://www.wikidata.org/wiki/{self.qid}
'''
            mail.send_mail(subject, body)

        if not self.entity or 'claims' not in self.entity:
            return []

        return [i['mainsnak']['datavalue']['value']['id']
                for i in self.entity['claims'].get('P31', [])
                if 'datavalue' in i['mainsnak']]

    def identifiers(self):
        ret = set()
        for v in self.get_item_identifiers().values():
            ret.update(v)
        return ret

    def identifier_values(self):
        ret = defaultdict(set)
        for osm_key, wikidata_values in self.get_item_identifiers().items():
            for values, _ in wikidata_values:
                ret[osm_key].update(values)
        return ret

    def get_item_identifiers(self):
        if not self.entity:
            return {}

        property_map = [
            ('P238', ['iata'], 'IATA airport code'),
            ('P239', ['icao'], 'ICAO airport code'),
            ('P240', ['faa', 'ref'], 'FAA airport code'),
            # ('P281', ['addr:postcode', 'postal_code'], 'postal code'),
            ('P296', ['ref', 'ref:train', 'railway:ref'], 'station code'),
            ('P300', ['ISO3166-2'], 'ISO 3166-2 code'),
            ('P590', ['ref:gnis', 'GNISID', 'gnis:id', 'gnis:feature_id'], 'USGS GNIS ID'),
            ('P649', ['ref:nrhp'], 'NRHP reference number'),
            ('P722', ['uic_ref'], 'UIC station code'),
            ('P782', ['ref'], 'LAU (local administrative unit)'),
            ('P836', ['ref:gss'], 'UK Government Statistical Service code'),
            ('P856', ['website', 'contact:website', 'url'], 'website'),
            ('P882', ['nist:fips_code'], 'FIPS 6-4 (US counties)'),
            ('P883', ['state_code', 'ref', 'nist:fips_code'], 'FIPS 5-2 (code for US states)'),
            # A UIC id can be a IBNR, but not every IBNR is an UIC id
            ('P954', ['uic_ref'], 'IBNR ID'),
            ('P1216', ['HE_ref'], 'National Heritage List for England number'),
            ('P2253', ['ref:edubase'], 'EDUBase URN'),
            ('P2815', ['esr:user', 'ref', 'ref:train'], 'ESR station code'),
            ('P3425', ['ref', 'ref:SIC'], 'Natura 2000 site ID'),
            ('P3562', ['seamark:light:reference'], 'Admiralty number'),
            ('P4755', ['ref', 'ref:train', 'ref:crs', 'crs', 'nat_ref'], 'UK railway station code'),
            ('P4803', ['ref', 'ref:train'], 'Amtrak station code'),
            ('P6082', ['nycdoitt:bin'], 'NYC Building Identification Number'),
        ]

        tags = defaultdict(list)
        for claim, osm_keys, label in property_map:
            values = [i['mainsnak']['datavalue']['value']
                      for i in self.entity['claims'].get(claim, [])
                      if 'datavalue' in i['mainsnak']]
            if not values:
                continue
            if claim == 'P782':
                values += [m.group(1) for m in (re_lau_code.match(v) for v in values) if m]
            for osm_key in osm_keys:
                tags[osm_key].append((tuple(values), label))
        return tags

    def ref_nrhp(self):
        if self.entity:
            return [i['mainsnak']['datavalue']['value']
                    for i in self.entity['claims'].get('P649', [])]
        else:
            return []

    def is_cricket_ground(self):
        return any('cricket' in name.lower() for name in self.names())

    def names(self):
        part_of_names = set()
        if self.entity and 'claims' in self.entity:
            for p361 in self.entity['claims'].get('P361', []):
                try:
                    part_of_id = p361['mainsnak']['datavalue']['value']['numeric-id']
                except KeyError:
                    continue
                # TODO: download item if it doesn't exist
                part_of_item = Item.query.get(part_of_id)
                if part_of_item:
                    names = part_of_item.names()
                    if names:
                        part_of_names |= names.keys()

        d = wikidata.names_from_entity(self.entity) or defaultdict(list)
        for name in self.extract_names or []:
            d[name].append(('extract', 'enwiki'))

        for name, sources in list(d.items()):
            if len(sources) == 1 and sources[0][0] == 'image':
                continue
            for part_of_name in part_of_names:
                if not name.startswith(part_of_name):
                    continue
                prefix_removed = name[len(part_of_name):].strip()
                if prefix_removed not in d:
                    d[prefix_removed] = sources

        # A terrace of buildings can be illustrated with a photo of a single building.
        # We try to determine if this is the case and avoid using the filename of the
        # single building photo as a name for matching.

        def has_digit(s):
            return any(c.isdigit() for c in s)

        image_names = {name for name, sources in d.items()
                       if len(sources) == 1 and
                          sources[0][0] == 'image' and
                          has_digit(name)}
        if not image_names:
            return dict(d) or None

        other_names = {n for n in d.keys() if n not in image_names and has_digit(n)}
        for image_name in image_names:
            for other in other_names:
                if not utils.is_in_range(other, image_name):
                    continue
                del d[image_name]
                break

        return dict(d) or None

    def refresh_extract_names(self):
        self.extract_names = wikipedia.html_names(self.extract)

    def get_oql(self):
        lat, lon = session.query(func.ST_Y(self.location), func.ST_X(self.location)).one()
        union = []
        for tag in self.tags:
            osm_filter = 'around:1000,{:f},{:f}'.format(lat, lon)
            union += oql_from_tag(tag, False, osm_filter)
        return union

    def coords(self):
        return session.query(func.ST_Y(self.location), func.ST_X(self.location)).one()

    def image_filenames(self):
        return [i['mainsnak']['datavalue']['value']
                for i in self.entity['claims'].get('P18', [])]

    def defunct_cats(self):
        words = {'demolish', 'disestablishment', 'defunct', 'abandon', 'mothballed',
                 'decommission', 'former', 'dismantled', 'disused', 'disassembled',
                 'abandoned', 'disband', 'scrapped', 'unused', 'closed', 'condemned',
                 'redundant'}

        exclude = {'Defunct baseball venues in the United States',
                   'Defunct National Football League venues',
                   'Enclosed roller coasters',
                   'Former civil parishes in England',
                   'Capitals of former nations',
                   'Former state capitals in the United States'}

        found = []
        for item_cat in self.categories or []:
            if item_cat in exclude:
                continue
            if item_cat.startswith('Former') and item_cat.endswith('Railway stations'):
                # Category:Railway stations in the United Kingdom by former operator
                # contains subcategories named 'Former ... Railway stations.'
                # Most of the stations in these subcategories still exist.
                # If a station doesn't exist it'll be in other defunct categories.
                continue
            lc_item_cat = item_cat.lower()
            found += [item_cat for i in words if i in lc_item_cat]
        return found

    def get_claim(self, pid):
        return [i['mainsnak']['datavalue']['value']
                for i in self.entity['claims'].get(pid, [])]

    @property
    def criteria(self):
        return {('Tag:' if '=' in t else 'Key:') + t for t in self.tags or []}

    @property
    def category_map(self):
        if self.categories:
            return matcher.categories_to_tags_map(self.categories)

    def sitelinks(self):
        if self.entity:
            return self.entity.get('sitelinks')

    def is_hamlet(self):
        return ('Q5084' in self.instanceof() or
                any(cat.startswith('Hamlets ')
                    for cat in self.categories or []))

    def is_shopping_street(self):
        return any(cat.startswith('Shopping street ')
                   for cat in self.categories or [])

    def is_farm_house(self):
        return 'Q489357' in self.instanceof()

    def is_mountain_range(self):
        return 'Q46831' in self.instanceof()

    def is_farmhouse(self):
        return 'Q489357' in self.instanceof()

    def is_church_building(self):
        return 'Q16970' in self.instanceof()

    def is_reservoir(self):
        return 'Q131681' in self.instanceof()

    def is_proposed(self):
        '''is this item a proposed building or structure?'''

        cats = self.categories or []
        if any(cat.startswith('Disused ') for cat in cats):
            # disused stations that might be reopened could be in OSM
            return False
        if any(cat.startswith('Proposed ') for cat in cats):
            return True
        # proposed building or structure (Q811683)
        return 'Q811683' in (self.instanceof() or [])

    def is_a_historic_district(self):
        cats = self.categories or []
        return (('Q15243209' in (self.instanceof() or []) or
                    any(cat.startswith('Historic district') for cat in cats)) and
                not any(cat.startswith('Historic district contributing properties') or
                        cat.startswith('Churches ') or
                        cat.startswith('Towers ') or
                        cat.startswith('Educational institutions ') or
                        cat.startswith('Schools ') or
                        cat.startswith('Houses ') or
                        cat.startswith('Historic house ') or
                        cat.startswith('Museums ') or
                        ' buildings ' in cat or
                        cat.startswith('Buildings and structures ') for cat in cats))

    def is_a_station(self):
        stations = {
            'Q55488',    # railway station
            'Q928830',   # metro station
            'Q4663385',  # former railway station
        }
        if set(self.instanceof()) & stations:
            return True

        cats = {'railway stations', 'railroad stations', 'train stations',
                'metro stations', 'subway stations'}

        return any(any(cat in item_cat.lower() for cat in cats)
                   for item_cat in (self.categories or []))

    def is_a_stadium(self):
        isa = {
            'Q483110',   # stadium
            'Q641226',   # arena
            'Q1076486',  # sports venue
        }
        if set(self.instanceof()) & isa:
            return True

        cats = {'football venues', 'ice rinks', 'stadiums', 'velodromes',
                'cycling venues', 'grounds'}

        return any(any(cat in item_cat.lower() for cat in cats)
                   for item_cat in (self.categories or []))

    def is_a_school(self):
        return 'amenity=school' in self.tags

    def skip_item_during_match(self):
        ''' cebwiki and svwiki contain lots of poor quality stubs
        best to skip items that are only cebwiki or cebwiki + svwiki
        '''
        if self.is_proposed():  # skip proposed building or structure
            return True
        if not self.entity:
            return False

        item_isa_set = set(self.instanceof())

        skip_isa = {
            'Q21561328',  # English unitary authority council
            'Q21451686',  # Scottish unitary authority council
            'Q21451695',  # Scottish local authority council
            'Q1160920',   # unitary authority
        }
        if item_isa_set & skip_isa:
            return True

        isa = {
            'Q349084',    # district of England
            'Q1002812',   # metropolitan borough
            'Q1006876',   # borough in the United Kingdom
            'Q1187580',   # non-metropolitan district
            'Q1136601',   # unitary authority of England
        }
        if item_isa_set & isa:
            return False

        sitelinks = self.entity.get('sitelinks')
        if not sitelinks:
            return False
        sites = set(sitelinks.keys())
        return sites == {'cebwiki'} or sites == {'cebwiki', 'svwiki'}

    def get_names(self):
        item = self.entity
        if not item:
            return

        names = defaultdict(list)
        skip_lang = {'ar', 'arc', 'pl'}
        # only include aliases if there are less than 6 other names
        if len(item.get('sitelinks', {})) < 6 and len(item['labels']) < 6:
            for k, v in item.get('aliases', {}).items():
                if k in skip_lang:
                    continue
                if len(v) > 3:
                    continue
                for name in v:
                    names[name].append(('alias', k))
        for k, v in item['labels'].items():
            if k in skip_lang:
                continue
            names[v].append(('label', k))
        for k, v in item.get('sitelinks', {}).items():
            if k + 'wiki' in skip_lang:
                continue
            names[v].append(('sitelink', k))
        return names

    def first_paragraph_all(self, languages):
        for lang in languages:
            if not lang:
                continue
            extract = self.first_paragraph_language(lang.site_name)
            if extract:
                yield {'lang': lang, 'extract': extract}

    def first_paragraph(self, languages=None):
        if languages is None:
            languages = [Language.get_by_code('en')]
        for lang in languages:
            extract = self.first_paragraph_language(lang.site_name)
            if extract:
                return {'lang': lang, 'extract': extract}

    def first_paragraph_language(self, lang):
        extract = self.extracts.get(lang)
        if not extract:
            return

        empty_list = ['<p><span></span></p>',
                      '<p><span></span>\n</p>',
                      '<p><span></span>\n\n</p>',
                      '<p>\n<span></span>\n</p>',
                      '<p>\n\n<span></span>\n</p>',
                      '<p>.\n</p>',
                      '<p class="mw-empty-elt">\n</p>',
                      '<p class="mw-empty-elt">\n\n</p>',
                      '<p class="mw-empty-elt">\n\n\n</p>']

        text = extract.strip()
        while True:
            found_empty = False
            for empty in empty_list:
                if text.startswith(empty):
                    text = text[len(empty):].strip()
                    found_empty = True
            if not found_empty:
                break

        close_tag = '</p>'
        first_end_p_tag = text.find(close_tag)
        if first_end_p_tag == -1:
            # FIXME: e-mail admin
            return text

        return text[:first_end_p_tag + len(close_tag)]

    def place_names(self):
        names = set()
        for place in self.places:
            if not isinstance(place.address, list):
                continue
            names.update({i['name'] for i in place.address
                         if i['type'] != 'country_code'})
        start = 'Isle of '
        trimmed = {utils.drop_start(n, start) for n in names if n.startswith(start)}
        return names | trimmed

    def set_country_code(self):
        for place in self.places:
            if place.country_code:
                g.country_code = place.country_code
                return

    @property
    def is_nhle(self):
        '''Is this a National Heritage List for England item?'''
        return self.entity and 'P1216' in self.entity.get('claims', {})

    def is_instance_of(self, isa_filter):
        for isa in self.isa:
            if isa.qid in isa_filter:
                return True
            for claim in isa.entity['claims'].get('P279', []):
                if claim['mainsnak']['datavalue']['value']['id'] in isa_filter:
                    return True

    def place_languages(self):
        found = {}
        for place in self.places:
            for l in place.languages():
                code = l['code']
                if code not in found:
                    found[code] = {
                        'wikidata': l['wikidata'],
                        'osm': l['osm'] or 0,
                        'code': code,
                    }
                else:
                    for key in 'wikidata', 'osm':
                        found[code][key] += l[key] or 0

        top = sorted(found.items(),
                     key=lambda i: i[1]['wikidata'],
                     reverse=True)[:10]
        return [v for k, v in top]
Exemple #8
0
           None,
           ForeignKey(DATASET.c.id),
           nullable=False),
    PrimaryKeyConstraint('dataset_ref', 'classifier'),
    UniqueConstraint('source_dataset_ref', 'dataset_ref'),
)

# --- S3-driver specific Tables ---
S3_DATASET = Table(
    's3_dataset', _core.S3_METADATA,
    Column('id', postgres.UUID(as_uuid=True), primary_key=True),
    Column('base_name', String, nullable=False),
    Column('band', String, nullable=False),
    Column('bucket', String, nullable=False),
    Column('macro_shape',
           postgres.ARRAY(Integer, dimensions=1),
           nullable=False),
    Column('chunk_size', postgres.ARRAY(Integer, dimensions=1),
           nullable=False), Column('numpy_type', String, nullable=False),
    Column('dimensions', postgres.ARRAY(String, dimensions=1), nullable=False),
    Column('regular_dims',
           postgres.ARRAY(Boolean, dimensions=1),
           nullable=False),
    Column('regular_index',
           postgres.ARRAY(Float(), dimensions=2),
           nullable=False),
    Column('irregular_index',
           postgres.ARRAY(Float(), dimensions=2),
           nullable=False))
'''An S3 dataset: an N-dimensional single band
An example record may look like:
Exemple #9
0
from sqlalchemy.dialects import postgresql

from auth import models
from its_on.utils import AwareDateTime

metadata = sa.MetaData()

switches = sa.Table(
    'switches',
    metadata,
    sa.Column('id', sa.Integer, primary_key=True),
    sa.Column('is_active', sa.Boolean, default=True),
    sa.Column('is_hidden', sa.Boolean, default=False),
    sa.Column('name', sa.String(255), unique=True),
    sa.Column('group', sa.String(255)),
    sa.Column('groups', postgresql.ARRAY(sa.String(255))),
    sa.Column('version', sa.Integer, nullable=True),
    sa.Column('ttl',
              sa.Integer,
              nullable=False,
              default=lambda: settings.FLAG_TTL_DAYS),
    sa.Column('comment', sa.Text),
    sa.Column('created_at',
              AwareDateTime,
              default=lambda: datetime.datetime.utcnow(),
              nullable=True),
    sa.Column(
        'updated_at',
        AwareDateTime,
        default=lambda: datetime.datetime.utcnow(),
        onupdate=lambda: datetime.datetime.utcnow(),
Exemple #10
0
from builtins import object
from sqlalchemy import Column, String, Integer, Text, ForeignKey, UniqueConstraint
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship, backref
from sqlalchemy.sql import text
from sqlalchemy.types import PickleType

from fonduer.snorkel.models.context import Context
from fonduer.snorkel.models.context import Document
from fonduer.snorkel.models.context import TemporaryContext
from fonduer.snorkel.models.context import TemporarySpan
from fonduer.snorkel.models.context import split_stable_id
from fonduer.snorkel.models.meta import snorkel_postgres

INT_ARRAY_TYPE = postgresql.ARRAY(Integer) if snorkel_postgres else PickleType
STR_ARRAY_TYPE = postgresql.ARRAY(String) if snorkel_postgres else PickleType


class Webpage(Document):
    """
    Declares name for storage table.
    """
    __tablename__ = 'webpage'
    id = Column(Integer,
                ForeignKey('document.id', ondelete='CASCADE'),
                primary_key=True)
    # Connects NewType records to generic Context records
    url = Column(String)
    host = Column(String)
    page_type = Column(String)
Exemple #11
0
class ImplicitSpan(Context, TemporaryImplicitSpan):
    """A span of characters that may not have appeared verbatim in the source text.

    It is identified by Context id, character-index start and end (inclusive),
    as well as a key representing what 'expander' function drew the ImplicitSpan
    from an  existing Span, and a position (where position=0 corresponds to the
    first ImplicitSpan produced from the expander function).

    The character-index start and end point to the segment of text that was
    expanded to produce the ImplicitSpan.
    """
    __tablename__ = 'implicit_span'
    id = Column(Integer,
                ForeignKey('context.id', ondelete='CASCADE'),
                primary_key=True)
    sentence_id = Column(Integer,
                         ForeignKey('context.id', ondelete='CASCADE'),
                         primary_key=True)
    char_start = Column(Integer, nullable=False)
    char_end = Column(Integer, nullable=False)
    expander_key = Column(String, nullable=False)
    position = Column(Integer, nullable=False)
    text = Column(String)
    if snorkel_postgres:
        words = Column(postgresql.ARRAY(String), nullable=False)
        lemmas = Column(postgresql.ARRAY(String))
        pos_tags = Column(postgresql.ARRAY(String))
        ner_tags = Column(postgresql.ARRAY(String))
        dep_parents = Column(postgresql.ARRAY(Integer))
        dep_labels = Column(postgresql.ARRAY(String))
        page = Column(postgresql.ARRAY(Integer))
        top = Column(postgresql.ARRAY(Integer))
        left = Column(postgresql.ARRAY(Integer))
        bottom = Column(postgresql.ARRAY(Integer))
        right = Column(postgresql.ARRAY(Integer))
    else:
        words = Column(PickleType, nullable=False)
        lemmas = Column(PickleType)
        pos_tags = Column(PickleType)
        ner_tags = Column(PickleType)
        dep_parents = Column(PickleType)
        dep_labels = Column(PickleType)
        page = Column(PickleType)
        top = Column(PickleType)
        left = Column(PickleType)
        bottom = Column(PickleType)
        right = Column(PickleType)
    meta = Column(PickleType)

    __table_args__ = (UniqueConstraint(sentence_id, char_start, char_end,
                                       expander_key, position), )

    __mapper_args__ = {
        'polymorphic_identity': 'implicit_span',
        'inherit_condition': (id == Context.id)
    }

    sentence = relationship('Context',
                            backref=backref('implicit_spans',
                                            cascade='all, delete-orphan'),
                            foreign_keys=sentence_id)

    def _get_instance(self, **kwargs):
        return ImplicitSpan(**kwargs)

    # We redefine these to use default semantics, overriding the operators inherited from TemporarySpan
    def __eq__(self, other):
        return self is other

    def __ne__(self, other):
        return self is not other

    def __hash__(self):
        return id(self)
Exemple #12
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        "datasets",
        sa.Column("dataset", sa.String(), nullable=False),
        sa.Column(
            "metadata",
            postgresql.JSONB(astext_type=sa.Text()),
            nullable=True,
            server_default=sa.text("'{}'"),
        ),
        sa.Column("created_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.Column("updated_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.PrimaryKeyConstraint("dataset"),
    )
    op.create_table(
        "geostore",
        sa.Column("gfw_geostore_id", postgresql.UUID(), nullable=False),
        sa.Column("gfw_geojson", sa.TEXT(), nullable=True),
        sa.Column("gfw_area__ha", sa.Numeric(), nullable=True),
        sa.Column("gfw_bbox", postgresql.ARRAY(sa.Numeric()), nullable=True),
        sa.Column("created_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.Column("updated_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.PrimaryKeyConstraint("gfw_geostore_id"),
    )
    op.create_index(
        "geostore_gfw_geostore_id_idx",
        "geostore",
        ["gfw_geostore_id"],
        unique=False,
        postgresql_using="hash",
    )
    op.create_table(
        "userareas",
        sa.Column("gfw_geostore_id", postgresql.UUID(), nullable=False),
        sa.Column("gfw_geojson", sa.TEXT(), nullable=True),
        sa.Column("gfw_area__ha", sa.Numeric(), nullable=True),
        sa.Column("gfw_bbox", postgresql.ARRAY(sa.Numeric()), nullable=True),
        sa.Column("created_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.Column("updated_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.PrimaryKeyConstraint("gfw_geostore_id"),
    )
    op.create_index(
        "userarea_gfw_geostore_id_idx",
        "userareas",
        ["gfw_geostore_id"],
        unique=False,
        postgresql_using="hash",
    )
    op.create_table(
        "versions",
        sa.Column("dataset", sa.String(), nullable=False),
        sa.Column("version", sa.String(), nullable=False),
        sa.Column("is_latest",
                  sa.Boolean(),
                  nullable=False,
                  server_default=sa.text("false")),
        sa.Column("is_mutable",
                  sa.Boolean(),
                  nullable=False,
                  server_default=sa.text("false")),
        sa.Column("status",
                  sa.String(),
                  nullable=False,
                  server_default=sa.text("'pending'")),
        sa.Column(
            "metadata",
            postgresql.JSONB(astext_type=sa.Text()),
            nullable=True,
            server_default=sa.text("'{}'"),
        ),
        sa.Column(
            "change_log",
            postgresql.ARRAY(postgresql.JSONB(astext_type=sa.Text())),
            nullable=True,
            server_default=sa.text("array[]::jsonb[]"),
        ),
        sa.Column("created_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.Column("updated_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.ForeignKeyConstraint(
            ["dataset"],
            ["datasets.dataset"],
            name="fk",
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
        sa.PrimaryKeyConstraint("dataset", "version"),
    )
    op.create_table(
        "assets",
        sa.Column(
            "asset_id",
            postgresql.UUID(),
            nullable=False,
            server_default=sa.text("uuid_generate_v4()"),
        ),
        sa.Column("dataset", sa.String(), nullable=False),
        sa.Column("version", sa.String(), nullable=False),
        sa.Column("asset_type", sa.String(), nullable=False),
        sa.Column("asset_uri", sa.String(), nullable=False),
        sa.Column("status",
                  sa.String(),
                  nullable=False,
                  server_default=sa.text("'pending'")),
        sa.Column("is_managed",
                  sa.Boolean(),
                  nullable=False,
                  server_default=sa.text("true")),
        sa.Column("is_default",
                  sa.Boolean(),
                  nullable=False,
                  server_default=sa.text("false")),
        sa.Column(
            "creation_options",
            postgresql.JSONB(astext_type=sa.Text()),
            nullable=False,
            server_default=sa.text("'{}'"),
        ),
        sa.Column(
            "metadata",
            postgresql.JSONB(astext_type=sa.Text()),
            nullable=False,
            server_default=sa.text("'{}'"),
        ),
        sa.Column(
            "fields",
            postgresql.JSONB(astext_type=sa.Text()),
            nullable=False,
            server_default=sa.text("'[]'"),
        ),
        sa.Column("extent",
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column("stats",
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=True),
        sa.Column(
            "change_log",
            postgresql.ARRAY(postgresql.JSONB(astext_type=sa.Text())),
            nullable=False,
            server_default=sa.text("array[]::jsonb[]"),
        ),
        sa.Column("created_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.Column("updated_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.ForeignKeyConstraint(
            ["dataset", "version"],
            ["versions.dataset", "versions.version"],
            name="fk",
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
        sa.PrimaryKeyConstraint("asset_id"),
        sa.UniqueConstraint("asset_uri", name="uq_asset_uri"),
    )
    op.create_table(
        "tasks",
        sa.Column("task_id", postgresql.UUID(), nullable=False),
        sa.Column("asset_id", postgresql.UUID(), nullable=False),
        sa.Column("status", sa.String(), nullable=False),
        sa.Column(
            "change_log",
            postgresql.ARRAY(postgresql.JSONB(astext_type=sa.Text())),
            nullable=True,
        ),
        sa.Column("created_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.Column("updated_on",
                  sa.DateTime(),
                  server_default=sa.text("now()"),
                  nullable=True),
        sa.ForeignKeyConstraint(
            ["asset_id"],
            ["assets.asset_id"],
            name="fk",
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
        sa.PrimaryKeyConstraint("task_id"),
    )

    op.execute("""
        CREATE TRIGGER latest_version
            BEFORE INSERT OR UPDATE
            ON public.versions
            FOR EACH ROW
            EXECUTE PROCEDURE public.reset_latest();
        """)

    op.execute("""
        ALTER TABLE
            public.userareas
            INHERIT geostore
        """)
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    op.create_table(
        'room_principals',
        sa.Column('read_access', sa.Boolean(), nullable=False),
        sa.Column('full_access', sa.Boolean(), nullable=False),
        sa.Column('permissions', postgresql.ARRAY(sa.String()),
                  nullable=False),
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('room_id', sa.Integer(), nullable=False, index=True),
        sa.Column('local_group_id', sa.Integer(), nullable=True, index=True),
        sa.Column('mp_group_provider', sa.String(), nullable=True),
        sa.Column('mp_group_name', sa.String(), nullable=True),
        sa.Column('user_id', sa.Integer(), nullable=True, index=True),
        sa.Column('type',
                  PyIntEnum(PrincipalType,
                            exclude_values={
                                PrincipalType.email, PrincipalType.network,
                                PrincipalType.event_role
                            }),
                  nullable=False),
        sa.CheckConstraint('NOT read_access', name='no_read_access'),
        sa.CheckConstraint(
            'read_access OR full_access OR array_length(permissions, 1) IS NOT NULL',
            name='has_privs'),
        sa.CheckConstraint(
            'type != 1 OR (local_group_id IS NULL AND mp_group_name IS NULL AND '
            'mp_group_provider IS NULL AND user_id IS NOT NULL)',
            name='valid_user'),
        sa.CheckConstraint(
            'type != 2 OR (mp_group_name IS NULL AND mp_group_provider IS NULL AND user_id IS NULL AND '
            'local_group_id IS NOT NULL)',
            name='valid_local_group'),
        sa.CheckConstraint(
            'type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND mp_group_name IS NOT NULL AND '
            'mp_group_provider IS NOT NULL)',
            name='valid_multipass_group'),
        sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']),
        sa.ForeignKeyConstraint(['room_id'], ['roombooking.rooms.id']),
        sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
        sa.PrimaryKeyConstraint('id'),
        schema='roombooking')
    op.create_index(None,
                    'room_principals', ['mp_group_provider', 'mp_group_name'],
                    schema='roombooking')
    op.create_index('ix_uq_room_principals_user',
                    'room_principals', ['user_id', 'room_id'],
                    unique=True,
                    schema='roombooking',
                    postgresql_where=sa.text('type = 1'))
    op.create_index('ix_uq_room_principals_local_group',
                    'room_principals', ['local_group_id', 'room_id'],
                    unique=True,
                    schema='roombooking',
                    postgresql_where=sa.text('type = 2'))
    op.create_index('ix_uq_room_principals_mp_group',
                    'room_principals',
                    ['mp_group_provider', 'mp_group_name', 'room_id'],
                    unique=True,
                    schema='roombooking',
                    postgresql_where=sa.text('type = 3'))
    op.add_column('rooms',
                  sa.Column(
                      'protection_mode',
                      PyIntEnum(ProtectionMode,
                                exclude_values={ProtectionMode.inheriting}),
                      nullable=False,
                      server_default=str(ProtectionMode.protected.value)),
                  schema='roombooking')
    _upgrade_permissions()
    op.alter_column('rooms',
                    'protection_mode',
                    server_default=None,
                    schema='roombooking')
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        'companies', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=64), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('currency', sa.String(length=10), nullable=False),
        sa.Column('logo_url', sa.String(length=4096), nullable=True),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'clients', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('company_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=64), nullable=False),
        sa.Column('contacts', postgresql.ARRAY(sa.String()), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ['company_id'],
            ['companies.id'],
        ), sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name', 'company_id'))
    op.create_table(
        'users', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('company_id', sa.Integer(), nullable=False),
        sa.Column('username', sa.String(length=64), nullable=False),
        sa.Column('email', sa.String(length=255), nullable=False),
        sa.Column('password', postgresql.BYTEA(length=255), nullable=False),
        sa.Column('is_admin', sa.Boolean(), nullable=False),
        sa.Column('activation_token', postgresql.BYTEA(), nullable=False),
        sa.Column('reset_password_token', postgresql.BYTEA(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ['company_id'],
            ['companies.id'],
        ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'))
    op.create_table(
        'proposals', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('company_id', sa.Integer(), nullable=False),
        sa.Column('client_id', sa.Integer(), nullable=True),
        sa.Column('share_uid', sa.String(length=10), nullable=False),
        sa.Column('title', sa.String(length=100), nullable=False),
        sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('changed_status_at', sa.DateTime(), nullable=False),
        sa.Column('status',
                  sa.Enum('draft',
                          'sent',
                          'won',
                          'lost',
                          'trash',
                          name='ProposalStatusEnum'),
                  nullable=False),
        sa.Column('cover_image_url', sa.String(length=2048), nullable=False),
        sa.ForeignKeyConstraint(
            ['client_id'],
            ['clients.id'],
        ), sa.ForeignKeyConstraint(
            ['company_id'],
            ['companies.id'],
        ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('share_uid'))
    op.create_table(
        'blocks', sa.Column('uid', postgresql.UUID(), nullable=False),
        sa.Column('proposal_id', sa.Integer(), nullable=True),
        sa.Column('type',
                  sa.Enum('section',
                          'paragraph',
                          'subtitle',
                          'uli',
                          'oli',
                          'image',
                          'cost_table',
                          'quote',
                          'embed',
                          name='BlockTypeEnum'),
                  nullable=False),
        sa.Column('data', postgresql.JSONB(), nullable=False),
        sa.Column('ordering', sa.Integer(), nullable=False),
        sa.Column('version', sa.Integer(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ['proposal_id'],
            ['proposals.id'],
        ), sa.PrimaryKeyConstraint('uid'))
    op.create_table(
        'proposal_last_seen', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('proposal_id', sa.Integer(), nullable=False),
        sa.Column('seen_at', sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ['proposal_id'],
            ['proposals.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['users.id'],
        ), sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('user_id', 'proposal_id'))
    op.create_table(
        'shared_proposals', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('proposal_id', sa.Integer(), nullable=False),
        sa.Column('title', sa.String(), nullable=False),
        sa.Column('cover_image_url', sa.String(length=2048), nullable=False),
        sa.Column('version', sa.Integer(), nullable=False),
        sa.Column('sent_to', postgresql.ARRAY(sa.String()), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ['proposal_id'],
            ['proposals.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'comment_threads', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('proposal_id', sa.Integer(), nullable=False),
        sa.Column('block_uid', postgresql.UUID(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('resolved', sa.Boolean(), nullable=False),
        sa.ForeignKeyConstraint(['block_uid'], ['blocks.uid'],
                                ondelete='CASCADE',
                                initially='DEFERRED',
                                deferrable=True),
        sa.ForeignKeyConstraint(['proposal_id'], ['proposals.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'import_section_search',
        sa.Column('company_id', sa.Integer(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('section_uid', postgresql.UUID(), nullable=False),
        sa.Column('proposal_title', sa.Unicode(), nullable=False),
        sa.Column('title', sa.Unicode(), nullable=False),
        sa.Column('ts_title', postgresql.TSVECTOR(), nullable=False),
        sa.Column('client', sa.Unicode(), nullable=False),
        sa.Column('all_content', sa.Unicode(), nullable=False),
        sa.Column('ts_all_content', postgresql.TSVECTOR(), nullable=False),
        sa.ForeignKeyConstraint(
            ['company_id'],
            ['companies.id'],
        ),
        sa.ForeignKeyConstraint(['section_uid'], ['blocks.uid'],
                                ondelete='CASCADE',
                                initially='DEFERRED',
                                deferrable=True),
        sa.PrimaryKeyConstraint('section_uid'))
    op.create_table(
        'shared_blocks', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('uid', postgresql.UUID(), nullable=False),
        sa.Column('shared_proposal_id', sa.Integer(), nullable=False),
        sa.Column('type',
                  sa.Enum('section',
                          'paragraph',
                          'subtitle',
                          'uli',
                          'oli',
                          'image',
                          'cost_table',
                          'quote',
                          'embed',
                          name='BlockTypeEnum'),
                  nullable=False),
        sa.Column('data', postgresql.JSONB(), nullable=False),
        sa.Column('ordering', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(
            ['shared_proposal_id'],
            ['shared_proposals.id'],
        ), sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('shared_proposal_id', 'uid'))
    op.create_table(
        'comments', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('thread_id', sa.Integer(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('comment', sa.Unicode(), nullable=False),
        sa.ForeignKeyConstraint(['thread_id'], ['comment_threads.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(
            ['user_id'],
            ['users.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'shared_comment_threads', sa.Column('id', sa.Integer(),
                                            nullable=False),
        sa.Column('shared_proposal_id', sa.Integer(), nullable=False),
        sa.Column('block_id', sa.Integer(), nullable=False),
        sa.Column('block_uid', postgresql.UUID(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('resolved', sa.Boolean(), nullable=False),
        sa.ForeignKeyConstraint(
            ['block_id'],
            ['shared_blocks.id'],
        ),
        sa.ForeignKeyConstraint(
            ['shared_proposal_id'],
            ['shared_proposals.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'shared_comments', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('username', sa.String(), nullable=False),
        sa.Column('from_client', sa.Boolean(), nullable=False),
        sa.Column('thread_id', sa.Integer(), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('comment', sa.Unicode(), nullable=False),
        sa.ForeignKeyConstraint(
            ['thread_id'],
            ['shared_comment_threads.id'],
        ), sa.PrimaryKeyConstraint('id'))
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        'canceled_formal_offer_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id'))
    op.create_table(
        'created_formal_offer_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_announcement_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_secret', postgresql.BYTEA(), nullable=False),
        sa.Column('offer_created_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id'))
    op.create_table(
        'failed_payment_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.Column('payer_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('payer_payment_order_seqnum', sa.Integer(), nullable=False),
        sa.Column('details',
                  postgresql.JSON(astext_type=sa.Text()),
                  nullable=False),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id',
                                'payer_creditor_id',
                                'payer_payment_order_seqnum'))
    op.create_table(
        'failed_reciprocal_payment_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.Column('details',
                  postgresql.JSON(astext_type=sa.Text()),
                  nullable=False),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id'))
    op.create_table(
        'finalize_prepared_transfer_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('signal_id',
                  sa.BigInteger(),
                  autoincrement=True,
                  nullable=False),
        sa.Column('debtor_id', sa.BigInteger(), nullable=False),
        sa.Column('sender_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('transfer_id', sa.BigInteger(), nullable=False),
        sa.Column('committed_amount', sa.BigInteger(), nullable=False),
        sa.Column('transfer_info',
                  postgresql.JSON(astext_type=sa.Text()),
                  nullable=False), sa.CheckConstraint('committed_amount >= 0'),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'signal_id'))
    op.create_table(
        'formal_offer',
        sa.Column(
            'payee_creditor_id',
            sa.BigInteger(),
            nullable=False,
            comment=
            'The payee, also the one that is responsible to supply the goods or services.'
        ),
        sa.Column('offer_id',
                  sa.BigInteger(),
                  autoincrement=True,
                  nullable=False),
        sa.Column(
            'offer_secret',
            postgresql.BYTEA(),
            nullable=False,
            comment=
            'A random sequence of bytes that the potential payer should know in order to view the offer or make a payment.'
        ),
        sa.Column(
            'debtor_ids',
            postgresql.ARRAY(sa.BigInteger(), dimensions=1),
            nullable=False,
            comment=
            'The payment should go through one of these debtors. Each element in this array must have a corresponding element in the `debtor_amounts` array. Note thatthe database schema allows some or all of the elements to be `NULL`, which should be handled with care.'
        ),
        sa.Column(
            'debtor_amounts',
            postgresql.ARRAY(sa.BigInteger(), dimensions=1),
            nullable=False,
            comment=
            'Each element in this array must have a corresponding element in the `debtor_ids` array. Note that the database schema allows one debtor ID to occur more than once in the `debtor_ids` array, each time with a different corresponding amount. The payer is expected to transfer one of the amounts corresponding to the chosen debtor. Also note that the database schema allows some or all of the `debtor_amounts` elements to be `NULL` or negative numbers, which should be handled as if they were zeros.'
        ),
        sa.Column(
            'description',
            postgresql.JSON(astext_type=sa.Text()),
            nullable=True,
            comment=
            'A more or less detailed description of the goods or services that will be supplied if a payment is made to the offer. `NULL` means that the payee has no responsibilities whatsoever.'
        ),
        sa.Column(
            'reciprocal_payment_debtor_id',
            sa.BigInteger(),
            nullable=True,
            comment=
            'The ID of the debtor through which the reciprocal payment will go.If this is not NULL, when a payment is made to the offer, an automatic reciprocal payment will be made from the payee to the payer.'
        ),
        sa.Column(
            'reciprocal_payment_amount',
            sa.BigInteger(),
            server_default=sa.text('0'),
            nullable=False,
            comment='The amount to be transferred in the reciprocal payment.'),
        sa.Column('valid_until_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False,
                  comment='The offer will not be valid after this deadline.'),
        sa.Column('created_at_ts', sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.CheckConstraint('array_ndims(debtor_amounts) = 1'),
        sa.CheckConstraint('array_ndims(debtor_ids) = 1'),
        sa.CheckConstraint(
            'cardinality(debtor_ids) = cardinality(debtor_amounts)'),
        sa.CheckConstraint('reciprocal_payment_amount >= 0'),
        sa.CheckConstraint(
            'reciprocal_payment_debtor_id IS NOT NULL OR reciprocal_payment_amount = 0'
        ),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id'),
        comment=
        'Represents an offer to supply some goods or services for a stated price.'
    )
    op.create_table(
        'payment_order',
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.Column('payer_creditor_id',
                  sa.BigInteger(),
                  nullable=False,
                  comment='The payer.'),
        sa.Column(
            'payer_payment_order_seqnum',
            sa.Integer(),
            nullable=False,
            comment=
            'A number generated by the payer. It is used to distinguish between several payment orders issued against one offer.'
        ),
        sa.Column(
            'debtor_id',
            sa.BigInteger(),
            nullable=False,
            comment=
            'The ID of the debtor through which the payment should go. Must be one of the values in the `formal_offer.debtor_ids` array.'
        ),
        sa.Column(
            'amount',
            sa.BigInteger(),
            nullable=False,
            comment=
            'The amount to be transferred in the payment. Must be equal to the corresponding value in the `formal_offer.debtor_amounts` array.'
        ),
        sa.Column(
            'reciprocal_payment_debtor_id',
            sa.BigInteger(),
            nullable=True,
            comment=
            'A copy of the corresponding `formal_offer.reciprocal_payment_debtor_id`.'
        ),
        sa.Column(
            'reciprocal_payment_amount',
            sa.BigInteger(),
            nullable=False,
            comment=
            'A copy of the corresponding `formal_offer.reciprocal_payment_amount`.'
        ),
        sa.Column(
            'payer_note',
            postgresql.JSON(astext_type=sa.Text()),
            nullable=True,
            comment=
            'A note from the payer. Can be anything that the payer wants the payee to see.If the payment is successful, the content of this column will be copied over to the `payment_proof.payer_note` column. After that, the value can be set to NULL.'
        ),
        sa.Column(
            'proof_secret',
            postgresql.BYTEA(),
            nullable=True,
            comment=
            'A random sequence of bytes that the interested party should know in order to view the payment proof. If the payment is successful, the content of this column will be copied over to the `payment_proof.proof_secret` column. After that, the value can be set to NULL.'
        ),
        sa.Column(
            'payment_coordinator_request_id',
            sa.BigInteger(),
            server_default=sa.text(
                "nextval('payment_coordinator_request_id_seq')"),
            nullable=False,
            comment=
            'This is the value of the `coordinator_request_id` parameter, which has been sent with the `prepare_transfer` message for the payment. The value of `payee_creditor_id` is sent as the `coordinator_id` parameter. `coordinator_type` is "payment".'
        ),
        sa.Column(
            'payment_transfer_id',
            sa.BigInteger(),
            nullable=True,
            comment=
            'This value, along with `debtor_id` and `payer_creditor_id` uniquely identifies the prepared transfer for the payment.'
        ),
        sa.Column(
            'reciprocal_payment_transfer_id',
            sa.BigInteger(),
            nullable=True,
            comment=
            'When a reciprocal payment is required, this value along with `reciprocal_payment_debtor_id` and `payee_creditor_id` uniquely identifiesthe prepared transfer for the reciprocal payment. The reciprocal payment should be initiated only after the primary payment has been prepared successfully. The value of the `coordinator_request_id` parameter for the reciprocal payment should be `-payment_coordinator_request_id` (always a negative number). `coordinator_id` should be `payee_creditor_id`. `coordinator_type` should be "payment".'
        ),
        sa.Column(
            'finalized_at_ts',
            sa.TIMESTAMP(timezone=True),
            nullable=True,
            comment=
            'The moment at which the payment order was finalized. NULL means that the payment order has not been finalized yet.'
        ),
        sa.CheckConstraint('amount >= 0'),
        sa.CheckConstraint(
            'finalized_at_ts IS NOT NULL OR payer_note IS NOT NULL AND proof_secret IS NOT NULL'
        ),
        sa.CheckConstraint('payment_coordinator_request_id > 0'),
        sa.CheckConstraint('reciprocal_payment_amount >= 0'),
        sa.CheckConstraint(
            'reciprocal_payment_debtor_id IS NOT NULL OR reciprocal_payment_amount = 0'
        ),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id',
                                'payer_creditor_id',
                                'payer_payment_order_seqnum'),
        comment=
        'Represents a recent order from a payer to make a payment to an offer. Note that finalized payment orders (failed or successful) must not be deleted right away. Instead, after they have been finalized, they should stay in the database for at least few days. This is necessary in order to prevent problems caused by message re-delivery.'
    )
    op.create_index('idx_payment_coordinator_request_id',
                    'payment_order',
                    ['payee_creditor_id', 'payment_coordinator_request_id'],
                    unique=True)
    op.create_table(
        'payment_proof',
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('proof_id',
                  sa.BigInteger(),
                  autoincrement=True,
                  nullable=False),
        sa.Column('proof_secret', postgresql.BYTEA(), nullable=False),
        sa.Column('payer_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column(
            'debtor_id',
            sa.BigInteger(),
            nullable=False,
            comment='The ID of the debtor through which the payment went.'),
        sa.Column('amount', sa.BigInteger(), nullable=False),
        sa.Column('payer_note',
                  postgresql.JSON(astext_type=sa.Text()),
                  nullable=False),
        sa.Column('paid_at_ts', sa.TIMESTAMP(timezone=True), nullable=False),
        sa.Column('reciprocal_payment_debtor_id',
                  sa.BigInteger(),
                  nullable=True),
        sa.Column('reciprocal_payment_amount', sa.BigInteger(),
                  nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_created_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('offer_description',
                  postgresql.JSON(astext_type=sa.Text()),
                  nullable=True),
        sa.CheckConstraint('amount >= 0'),
        sa.CheckConstraint('reciprocal_payment_amount >= 0'),
        sa.CheckConstraint(
            'reciprocal_payment_debtor_id IS NOT NULL OR reciprocal_payment_amount = 0'
        ),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'proof_id'),
        comment=
        'Represents an evidence that a payment has been made to an offer. (The corresponding offer has been deleted.)'
    )
    op.create_table(
        'prepare_transfer_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('coordinator_request_id', sa.BigInteger(), nullable=False),
        sa.Column('min_amount', sa.BigInteger(), nullable=False),
        sa.Column('max_amount', sa.BigInteger(), nullable=False),
        sa.Column('debtor_id', sa.BigInteger(), nullable=False),
        sa.Column('sender_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('recipient_creditor_id', sa.BigInteger(), nullable=False),
        sa.CheckConstraint('max_amount >= min_amount'),
        sa.CheckConstraint('min_amount > 0'),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'coordinator_request_id'))
    op.create_table(
        'successful_payment_signal',
        sa.Column('inserted_at_ts',
                  sa.TIMESTAMP(timezone=True),
                  nullable=False),
        sa.Column('payee_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('offer_id', sa.BigInteger(), nullable=False),
        sa.Column('payer_creditor_id', sa.BigInteger(), nullable=False),
        sa.Column('payer_payment_order_seqnum', sa.Integer(), nullable=False),
        sa.Column('debtor_id', sa.BigInteger(), nullable=False),
        sa.Column('amount', sa.BigInteger(), nullable=False),
        sa.Column('payer_note',
                  postgresql.JSON(astext_type=sa.Text()),
                  nullable=False),
        sa.Column('paid_at_ts', sa.TIMESTAMP(timezone=True), nullable=False),
        sa.Column('reciprocal_payment_debtor_id',
                  sa.BigInteger(),
                  nullable=True),
        sa.Column('reciprocal_payment_amount', sa.BigInteger(),
                  nullable=False),
        sa.Column('proof_id', sa.BigInteger(), nullable=False),
        sa.CheckConstraint('amount >= 0'),
        sa.CheckConstraint('reciprocal_payment_amount >= 0'),
        sa.CheckConstraint(
            'reciprocal_payment_debtor_id IS NOT NULL OR reciprocal_payment_amount = 0'
        ),
        sa.PrimaryKeyConstraint('payee_creditor_id', 'offer_id',
                                'payer_creditor_id',
                                'payer_payment_order_seqnum'))
Exemple #16
0
class Filterset(BaseScopedNameMixin, db.Model):
    """
    Store filters to display a filtered set of jobs scoped by a board on SEO friendly URLs

    Eg: `https://hasjob.co/f/machine-learning-jobs-in-bangalore`
    """

    __tablename__ = 'filterset'

    board_id = db.Column(None,
                         db.ForeignKey('board.id'),
                         nullable=False,
                         index=True)
    board = db.relationship(Board)
    parent = db.synonym('board')

    #: Welcome text
    description = db.Column(db.UnicodeText, nullable=False, default=u'')

    #: Associated job types
    types = db.relationship(JobType, secondary=filterset_jobtype_table)
    #: Associated job categories
    categories = db.relationship(JobCategory,
                                 secondary=filterset_jobcategory_table)
    tags = db.relationship(Tag, secondary=filterset_tag_table)
    auto_tags = association_proxy('tags',
                                  'title',
                                  creator=lambda t: Tag.get(t, create=True))
    domains = db.relationship(Domain, secondary=filterset_domain_table)
    auto_domains = association_proxy('domains',
                                     'name',
                                     creator=lambda d: Domain.get(d))
    geonameids = db.Column(postgresql.ARRAY(db.Integer(), dimensions=1),
                           default=[],
                           nullable=False)
    remote_location = db.Column(db.Boolean,
                                default=False,
                                nullable=False,
                                index=True)
    pay_currency = db.Column(db.CHAR(3), nullable=True, index=True)
    pay_cash = db.Column(db.Integer, nullable=True, index=True)
    equity = db.Column(db.Boolean, nullable=False, default=False, index=True)
    keywords = db.Column(db.Unicode(250),
                         nullable=False,
                         default=u'',
                         index=True)

    def __repr__(self):
        return '<Filterset %s "%s">' % (self.board.title, self.title)

    @classmethod
    def get(cls, board, name):
        return cls.query.filter(cls.board == board,
                                cls.name == name).one_or_none()

    def url_for(self, action='view', _external=True, **kwargs):
        kwargs.setdefault('subdomain',
                          self.board.name if self.board.not_root else None)
        return super(Filterset, self).url_for(action,
                                              name=self.name,
                                              _external=_external,
                                              **kwargs)

    def to_filters(self, translate_geonameids=False):
        location_names = []
        if translate_geonameids and self.geonameids:
            location_dict = location_geodata(self.geonameids)
            for geonameid in self.geonameids:
                # location_geodata returns related geonames as well
                # so we prune it down to our original list
                location_names.append(location_dict[geonameid]['name'])

        return {
            't': [jobtype.name for jobtype in self.types],
            'c': [jobcategory.name for jobcategory in self.categories],
            'k': [tag.name for tag in self.tags],
            'd': [domain.name for domain in self.domains],
            'l': location_names if translate_geonameids else self.geonameids,
            'currency': self.pay_currency,
            'pay': self.pay_cash,
            'equity': self.equity,
            'anywhere': self.remote_location,
            'q': self.keywords
        }

    @classmethod
    def from_filters(cls, board, filters):
        basequery = cls.query.filter(cls.board == board)

        if filters.get('t'):
            basequery = basequery.join(filterset_jobtype_table).join(
                JobType).filter(JobType.name.in_(filters['t'])).group_by(
                    Filterset.id).having(
                        db.func.count(filterset_jobtype_table.c.filterset_id)
                        == len(filters['t']))
        else:
            basequery = basequery.filter(~db.exists(
                db.select([1]).where(
                    Filterset.id == filterset_jobtype_table.c.filterset_id)))

        if filters.get('c'):
            basequery = basequery.join(filterset_jobcategory_table).join(
                JobCategory).filter(JobCategory.name.in_(
                    filters['c'])).group_by(Filterset.id).having(
                        db.func.count(filterset_jobcategory_table.c.
                                      filterset_id) == len(filters['c']))
        else:
            basequery = basequery.filter(~db.exists(
                db.select([1]).where(
                    Filterset.id ==
                    filterset_jobcategory_table.c.filterset_id)))

        if filters.get('k'):
            basequery = basequery.join(filterset_tag_table).join(Tag).filter(
                Tag.name.in_(filters['k'])).group_by(Filterset.id).having(
                    db.func.count(filterset_tag_table.c.filterset_id) == len(
                        filters['k']))
        else:
            basequery = basequery.filter(~db.exists(
                db.select([1]).where(
                    Filterset.id == filterset_tag_table.c.filterset_id)))

        if filters.get('d'):
            basequery = basequery.join(filterset_domain_table).join(
                Domain).filter(Domain.name.in_(filters['d'])).group_by(
                    Filterset.id).having(
                        db.func.count(filterset_domain_table.c.filterset_id) ==
                        len(filters['d']))
        else:
            basequery = basequery.filter(~db.exists(
                db.select([1]).where(
                    Filterset.id == filterset_domain_table.c.filterset_id)))

        if filters.get('l'):
            basequery = basequery.filter(
                cls.geonameids == sorted(filters['l']))
        else:
            basequery = basequery.filter(cls.geonameids == [])

        if filters.get('equity'):
            basequery = basequery.filter(cls.equity == True)  # NOQA
        else:
            basequery = basequery.filter(cls.equity == False)  # NOQA

        if filters.get('pay') and filters.get('currency'):
            basequery = basequery.filter(
                cls.pay_cash == filters['pay'],
                cls.pay_currency == filters['currency'])
        else:
            basequery = basequery.filter(cls.pay_cash == None,
                                         cls.pay_currency == None)  # NOQA

        if filters.get('q'):
            basequery = basequery.filter(cls.keywords == filters['q'])
        else:
            basequery = basequery.filter(cls.keywords == '')

        if filters.get('anywhere'):
            basequery = basequery.filter(cls.remote_location == True)  # NOQA
        else:
            basequery = basequery.filter(cls.remote_location == False)  # NOQA

        return basequery.one_or_none()
Exemple #17
0
class TestExample(db.Model, BaseModel):
    __tablename__ = 'test_example'

    NONAME = 'noname'
    NOT_FILED_ID = '-1'

    example_id = db.Column(db.String(100))
    name = db.Column(db.String(100))
    label = db.Column(db.String(100))
    pred_label = db.Column(db.String(100))
    num = db.Column(db.Integer)
    prob = db.Column(postgresql.ARRAY(db.Float))

    data_input = db.Column(JSONType)
    weighted_data_input = db.Column(JSONType)

    test_result_id = db.Column(db.Integer, db.ForeignKey('test_result.id'))
    test_result = relationship('TestResult',
                               backref=backref('examples',
                                               cascade='all,delete'))
    test_name = db.Column(db.String(200))

    model_id = db.Column(db.Integer, db.ForeignKey('model.id'))
    model = relationship('Model')
    model_name = db.Column(db.String(200))

    def __repr__(self):
        return '<TestExample {0}>'.format(self.name)

    @property
    def parameters_weights(self):
        res = []

        def sort_by_weight(val):
            return -val['weight']

        def go_tree(params, prefix=''):
            for name, val in params.iteritems():
                if 'weight' in val and val['weight'] != 0:
                    if prefix:
                        val['name'] = '{0}->{1}'.format(prefix, name)
                    else:
                        val['name'] = name
                    res.append(val)
                if 'weights' in val:
                    go_tree(val['weights'], prefix=name)
            return res

        go_tree(self.weighted_data_input)

        res.sort(key=sort_by_weight)
        return res

    @property
    def is_weights_calculated(self):
        return self.weighted_data_input and self.weighted_data_input != {}

    def calc_weighted_data(self):
        if not self.data_input:
            return None

        from api.ml_models.helpers.features import get_features_vect_data
        model = self.model
        trainer = model.get_trainer()
        feature_model = trainer._feature_model
        segment = 'default'
        if len(trainer.with_segmentation) > 0:
            ndata = dict([(key.replace('->', '.'), val)
                          for key, val in self.data_input.iteritems()])
            data = trainer._apply_feature_types(ndata)
            segment = "_".join([
                str(data[feature_name])
                for feature_name in trainer._feature_model.group_by
            ])
            features = trainer.features[segment]
            for feature_name in trainer._feature_model.group_by:
                features.pop(feature_name)
        else:
            try:
                features = trainer.features[segment]
            except:
                features = feature_model.features

        ndata = dict([(key.replace('->', '.'), val)
                      for key, val in self.data_input.iteritems()])
        trainer._prepare_data(iter([
            ndata,
        ]),
                              callback=None,
                              save_raw=False,
                              is_predict=True)
        vect_data1 = trainer._get_vectorized_data(
            segment, trainer._test_prepare_feature)

        vect = scipy.sparse.hstack(vect_data1)
        vect_data = vect.todense().tolist()[0]

        data = get_features_vect_data(vect_data, features.items(),
                                      feature_model.target_variable)

        from api.ml_models.helpers.weights import get_example_params
        segment = Segment.query.filter(Segment.name == segment,
                                       Segment.model == model)[0]
        model_weights = Weight.query.with_entities(
            Weight.name, Weight.value).filter(Weight.segment_id == segment.id)
        weighted_data = dict(
            get_example_params(model_weights, self.data_input, data))
        self.weighted_data_input = weighted_data
        self.save()
        del trainer
        gc.collect()

    @classmethod
    def get_grouped(cls, field, model_id, test_result_id):
        cursor = cls.query.filter_by(
            model_id=model_id, test_result_id=test_result_id
        ).with_entities(
            cls.pred_label,
            cls.label,
            cls.prob,
            # Selecting field from json object isn't supported by alchemy,
            # using literal column instead
            expression.literal_column("data_input->>'{!s}'".format(field)
                                      ).label('group'))

        groups = defaultdict(list)
        for row in cursor.all():
            groups[row[3]].append({
                'label': row[0],
                'pred': row[1],
                'prob': row[2],
            })

        return [{
            field: key,
            'list': value
        } for key, value in groups.iteritems()]

    @classmethod
    def get_data(cls, test_result_id, fields):
        db_fields = []
        for field in fields:
            if field == 'id':
                field = 'example_id'
            db_field = getattr(cls, field, None)
            if db_field:
                db_fields.append(db_field)
            else:
                # Selecting field from json object isn't supported by alchemy,
                # using literal column instead
                db_fields.append(
                    expression.literal_column("data_input->>'{!s}'".format(
                        field.replace('data_input.', ''))).label(field))

        cursor = cls.query.filter_by(
            test_result_id=test_result_id).with_entities(*db_fields)

        for row in cursor.all():
            yield dict(zip(row.keys(), row))
 def __init__(self, arg, default=None, **kw):
     self.type = postgresql.ARRAY(arg.type)
     self.default = default
     GenericFunction.__init__(self, arg, **kw)
Exemple #19
0
class _HStoreValsFunction(safunc.GenericFunction):
    __return_type__ = pgdialect.ARRAY(satypes.Text)

    def __init__(self, store, **kwargs):
        safunc.GenericFunction.__init__(self, args=[store], **kwargs)
        self.name = 'avals'
def upgrade():
    # In snapshots < 4.5.5  failed_logins_counter may be null, from 4.5.5
    # we want to make sure all null values will be replaced with zeros.
    op.execute("""
      UPDATE users
      SET failed_logins_counter = 0
      WHERE failed_logins_counter IS NULL;
    """)

    # Return the null constraint to the `failed_logins_counter` column.
    op.alter_column('users', 'failed_logins_counter', nullable=False)
    # server_default accepts string or SQL element only
    op.add_column(
        'executions',
        sa.Column('is_dry_run',
                  sa.Boolean(),
                  nullable=False,
                  server_default='f'))
    op.add_column('executions',
                  sa.Column('scheduled_for', UTCDateTime(), nullable=True))

    op.execute('COMMIT')
    # Add new execution status
    op.execute("alter type execution_status add value 'scheduled'")
    op.add_column(
        'deployments',
        sa.Column('capabilities', sa.PickleType(comparator=lambda *a: False)))
    op.add_column('events', sa.Column('source_id', sa.Text(), nullable=True))
    op.add_column('events', sa.Column('target_id', sa.Text(), nullable=True))
    op.add_column('logs', sa.Column('source_id', sa.Text(), nullable=True))
    op.add_column('logs', sa.Column('target_id', sa.Text(), nullable=True))

    # Create the agents table
    visibility_enum = postgresql.ENUM(*VisibilityState.STATES,
                                      name='visibility_states',
                                      create_type=False)
    agent_states_enum = postgresql.ENUM(*AgentState.STATES,
                                        name='agent_states')
    op.create_table(
        'agents', sa.Column('_storage_id', sa.Integer(), nullable=False),
        sa.Column('id', sa.Text(), nullable=True),
        sa.Column('name', sa.Text(), nullable=False),
        sa.Column('ip', sa.Text(), nullable=True),
        sa.Column('install_method', sa.Text(), nullable=False),
        sa.Column('system', sa.Text(), nullable=True),
        sa.Column('version', sa.Text(), nullable=False),
        sa.Column('state', agent_states_enum, nullable=False),
        sa.Column('visibility', visibility_enum, nullable=True),
        sa.Column('rabbitmq_username', sa.Text(), nullable=True),
        sa.Column('rabbitmq_password', sa.Text(), nullable=True),
        sa.Column('rabbitmq_exchange', sa.Text(), nullable=False),
        sa.Column('created_at', UTCDateTime(), nullable=False),
        sa.Column('updated_at', UTCDateTime(), nullable=True),
        sa.Column('_node_instance_fk', sa.Integer(), nullable=False),
        sa.Column('_tenant_id', sa.Integer(), nullable=False),
        sa.Column('_creator_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['_creator_id'], [u'users.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['_node_instance_fk'],
                                [u'node_instances._storage_id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['_tenant_id'], [u'tenants.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('_storage_id'))
    op.create_index(op.f('agents__tenant_id_idx'),
                    'agents', ['_tenant_id'],
                    unique=False)
    op.create_index(op.f('agents_created_at_idx'),
                    'agents', ['created_at'],
                    unique=False)
    op.create_index(op.f('agents_id_idx'), 'agents', ['id'], unique=False)

    # Remove the deprecated column private_resource from all the
    # resources tables
    for table_name in resource_tables:
        op.drop_column(table_name, 'private_resource')

    op.create_table(
        'tasks_graphs',
        sa.Column('_storage_id',
                  sa.Integer(),
                  autoincrement=True,
                  nullable=False), sa.Column('id', sa.Text(), nullable=True),
        sa.Column('visibility', visibility_enum, nullable=True),
        sa.Column('name', sa.Text(), nullable=True),
        sa.Column('created_at', UTCDateTime(), nullable=False),
        sa.Column('_execution_fk', sa.Integer(), nullable=False),
        sa.Column('_tenant_id', sa.Integer(), nullable=False),
        sa.Column('_creator_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['_creator_id'], [u'users.id'],
                                name=op.f('tasks_graphs__creator_id_fkey'),
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['_execution_fk'], [u'executions._storage_id'],
                                name=op.f('tasks_graphs__execution_fk_fkey'),
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['_tenant_id'], [u'tenants.id'],
                                name=op.f('tasks_graphs__tenant_id_fkey'),
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('_storage_id', name=op.f('tasks_graphs_pkey')))
    op.create_index(op.f('tasks_graphs__tenant_id_idx'),
                    'tasks_graphs', ['_tenant_id'],
                    unique=False)
    op.create_index(op.f('tasks_graphs_created_at_idx'),
                    'tasks_graphs', ['created_at'],
                    unique=False)
    op.create_index(op.f('tasks_graphs_id_idx'),
                    'tasks_graphs', ['id'],
                    unique=False)
    op.create_table(
        'operations',
        sa.Column('_storage_id',
                  sa.Integer(),
                  autoincrement=True,
                  nullable=False), sa.Column('id', sa.Text(), nullable=True),
        sa.Column('visibility', visibility_enum, nullable=True),
        sa.Column('name', sa.Text(), nullable=True),
        sa.Column('state', sa.Text(), nullable=False),
        sa.Column('created_at', UTCDateTime(), nullable=False),
        sa.Column('dependencies', postgresql.ARRAY(sa.Text()), nullable=True),
        sa.Column('type', sa.Text(), nullable=True),
        sa.Column('parameters', JSONString(), nullable=True),
        sa.Column('_tasks_graph_fk', sa.Integer(), nullable=False),
        sa.Column('_tenant_id', sa.Integer(), nullable=False),
        sa.Column('_creator_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['_creator_id'], [u'users.id'],
                                name=op.f('operations__creator_id_fkey'),
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['_tasks_graph_fk'],
                                [u'tasks_graphs._storage_id'],
                                name=op.f('operations__tasks_graph_fk_fkey'),
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['_tenant_id'], [u'tenants.id'],
                                name=op.f('operations__tenant_id_fkey'),
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('_storage_id', name=op.f('operations_pkey')))
    op.create_index(op.f('operations__tenant_id_idx'),
                    'operations', ['_tenant_id'],
                    unique=False)
    op.create_index(op.f('operations_created_at_idx'),
                    'operations', ['created_at'],
                    unique=False)
    op.create_index(op.f('operations_id_idx'),
                    'operations', ['id'],
                    unique=False)
Exemple #21
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column(
        'Venue',
        sa.Column('genres', postgresql.ARRAY(sa.String()), nullable=True))
Exemple #22
0
class GoldLabel(AnnotationMixin, _meta.Base):
    """A separate class for labels from human annotators or other gold standards."""

    values = Column(postgresql.ARRAY(Integer), nullable=False)
Exemple #23
0
class Annotation(Base):
    """Model class representing a single annotation."""

    __tablename__ = "annotation"
    __table_args__ = (
        # Tags are stored in an array-type column, and indexed using a
        # generalised inverted index. For more information on the use of GIN
        # indices for array columns, see:
        #
        #   http://www.databasesoup.com/2015/01/tag-all-things.html
        #   http://www.postgresql.org/docs/9.5/static/gin-intro.html
        #
        sa.Index("ix__annotation_tags", "tags", postgresql_using="gin"),
        sa.Index("ix__annotation_updated", "updated"),
        # This is a functional index on the *first* of the annotation's
        # references, pointing to the top-level annotation it refers to. We're
        # using 1 here because Postgres uses 1-based array indexing.
        sa.Index("ix__annotation_thread_root", sa.text('("references"[1])')),
    )

    #: Annotation ID: these are stored as UUIDs in the database, and mapped
    #: transparently to a URL-safe Base64-encoded string.
    id = sa.Column(types.URLSafeUUID,
                   server_default=sa.func.uuid_generate_v1mc(),
                   primary_key=True)

    #: The timestamp when the annotation was created.
    created = sa.Column(
        sa.DateTime,
        default=datetime.datetime.utcnow,
        server_default=sa.func.now(),
        nullable=False,
    )

    #: The timestamp when the user edited the annotation last.
    updated = sa.Column(
        sa.DateTime,
        server_default=sa.func.now(),
        default=datetime.datetime.utcnow,
        nullable=False,
    )

    #: The full userid (e.g. 'acct:[email protected]') of the owner of this
    #: annotation.
    userid = sa.Column(sa.UnicodeText, nullable=False, index=True)
    #: The string id of the group in which this annotation is published.
    #: Defaults to the global public group, "__world__".
    groupid = sa.Column(
        sa.UnicodeText,
        default="__world__",
        server_default="__world__",
        nullable=False,
        index=True,
    )

    group = sa.orm.relationship(
        Group,
        primaryjoin=(Group.pubid == groupid),
        foreign_keys=[groupid],
        lazy="select",
    )

    #: The textual body of the annotation.
    _text = sa.Column("text", sa.UnicodeText)
    #: The Markdown-rendered and HTML-sanitized textual body of the annotation.
    _text_rendered = sa.Column("text_rendered", sa.UnicodeText)

    #: The tags associated with the annotation.
    tags = sa.Column(
        MutableList.as_mutable(pg.ARRAY(sa.UnicodeText, zero_indexes=True)))

    #: A boolean indicating whether this annotation is shared with members of
    #: the group it is published in. "Private"/"Only me" annotations have
    #: shared=False.
    shared = sa.Column(
        sa.Boolean,
        nullable=False,
        default=False,
        server_default=sa.sql.expression.false(),
    )

    #: The URI of the annotated page, as provided by the client.
    _target_uri = sa.Column("target_uri", sa.UnicodeText)
    #: The URI of the annotated page in normalized form.
    _target_uri_normalized = sa.Column("target_uri_normalized", sa.UnicodeText)
    #: The serialized selectors for the annotation on the annotated page.
    target_selectors = sa.Column(types.AnnotationSelectorJSONB,
                                 default=list,
                                 server_default=sa.func.jsonb("[]"))

    #: An array of annotation IDs which are ancestors of this annotation.
    references = sa.Column(
        pg.ARRAY(types.URLSafeUUID, zero_indexes=True),
        default=list,
        server_default=sa.text("ARRAY[]::uuid[]"),
    )

    #: Any additional serialisable data provided by the client.
    extra = sa.Column(
        MutableDict.as_mutable(pg.JSONB),
        default=dict,
        server_default=sa.func.jsonb("{}"),
        nullable=False,
    )

    #: Has the annotation been deleted?
    deleted = sa.Column(
        sa.Boolean,
        nullable=False,
        default=False,
        server_default=sa.sql.expression.false(),
    )

    document_id = sa.Column(sa.Integer,
                            sa.ForeignKey("document.id"),
                            nullable=False)

    document = sa.orm.relationship("Document", backref="annotations")

    thread = sa.orm.relationship(
        "Annotation",
        primaryjoin=(sa.orm.foreign(id) == sa.orm.remote(references[0])),
        viewonly=True,
        uselist=True,
    )

    @hybrid_property
    def target_uri(self):
        return self._target_uri

    @target_uri.setter
    def target_uri(self, value):
        self._target_uri = value
        self._target_uri_normalized = uri.normalize(value)

    @hybrid_property
    def target_uri_normalized(self):
        return self._target_uri_normalized

    @property
    def target(self):
        target = {"source": self.target_uri}
        if self.target_selectors:
            target["selector"] = self.target_selectors

        return [target]

    @hybrid_property
    def text(self):
        return self._text

    @text.setter
    def text(self, value):
        self._text = value
        # N.B. We MUST take care here of appropriately escaping the user
        # input. Code elsewhere will assume that the content of the
        # `text_rendered` field is safe for printing without further escaping.
        #
        # `markdown.render` does the hard work for now.
        self._text_rendered = markdown_render.render(value)

    @hybrid_property
    def text_rendered(self):
        return self._text_rendered

    @property
    def thread_ids(self):
        return [thread_annotation.id for thread_annotation in self.thread]

    @property
    def is_reply(self):
        return bool(self.references)

    @property
    def parent_id(self):
        """
        Return the ID of the annotation that this annotation is a reply to.

        Return None if this annotation is not a reply.

        """
        if not self.references:
            return None

        return self.references[-1]

    @property
    def thread_root_id(self):
        """
        Return the ID of the root annotation of this annotation's thread.

        Return the ID of the root annotation of the thread to which this
        annotation belongs. May be this annotation's own ID if it is the root
        annotation of its thread.

        """
        if self.references:
            return self.references[0]
        return self.id

    @property
    def authority(self):
        """
        Return the authority of the user and group this annotation belongs to.

        For example, returns "hypothes.is" for Hypothesis first-party
        annotations, or "elifesciences.org" for eLife third-party annotations.

        If this annotation doesn't have a userid (which is possible for
        annotations that haven't been saved to the DB yet) then return None.

        :raises ValueError: if the annotation's userid is invalid

        """
        if self.userid is None:
            return None
        return split_user(self.userid)["domain"]

    @property
    def is_hidden(self):
        """Check if this annotation id is hidden."""

        return self.moderation is not None

    def __repr__(self):
        return f"<Annotation {self.id}>"
Exemple #24
0
def downgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column('venue',
                    'genres',
                    existing_type=postgresql.ARRAY(sa.VARCHAR()),
                    nullable=True)
Exemple #25
0
def SQLARRAY(item_type):
    return postgresql.ARRAY(item_type)\
        .with_variant(DefaultARRAY(item_type), "sqlite")
#   (breaking, eg, product deletion scripts)
# - they may be in a separate database.
PRODUCT = Table(
    "product",
    METADATA,
    Column("id", SmallInteger, primary_key=True),
    Column("name", String, unique=True, nullable=False),
    Column("dataset_count", Integer, nullable=False),
    Column(
        "last_refresh",
        DateTime(timezone=True),
        nullable=False,
        server_default=func.now(),
        comment="Last refresh of this product in the dataset_spatial table",
    ),
    Column("source_product_refs", postgres.ARRAY(SmallInteger)),
    Column("derived_product_refs", postgres.ARRAY(SmallInteger)),
    Column("time_earliest", DateTime(timezone=True)),
    Column("time_latest", DateTime(timezone=True)),
)
TIME_OVERVIEW = Table(
    "time_overview",
    METADATA,
    # Uniquely identified by three values:
    Column("product_ref", None, ForeignKey(PRODUCT.c.id)),
    Column("period_type",
           Enum("all", "year", "month", "day", name="overviewperiod")),
    Column("start_day", Date),
    Column("dataset_count", Integer, nullable=False),
    # Time range (if there's at least one dataset)
    Column("time_earliest", DateTime(timezone=True)),
Exemple #27
0
import loguru
from collections import defaultdict
from itertools import chain
import argparse
from fonduer import Meta
from fonduer.parser.models import Document, Sentence
import json

from fonduer.meta import Meta as Mt
from sqlalchemy import Column, Integer, String, Text, ForeignKey
from sqlalchemy.dialects import postgresql
from os.path import join

import re

STR_ARRAY_TYPE = postgresql.ARRAY(String)
_meta = Mt.init()


class Latex(_meta.Base):
    """Latex representation of sentences"""
    __tablename__ = "latexsentence"

    id = Column(Integer, primary_key=True)

    name = Column(String, unique=False, nullable=True)

    #: The id of the parent ``Document``.
    document_id = Column(Integer)

    #: The id of the parent ``Section``.
Exemple #28
0
class NumpyArray(sa.types.TypeDecorator):
    impl = psql.ARRAY(sa.Float)

    def process_result_value(self, value, dialect):
        return np.array(value)
Exemple #29
0
              sa.DateTime(timezone=True),
              server_default=sa.func.now(),
              onupdate=sa.func.current_timestamp()),
    #: Field for synchronization with external services.
    sa.Column('integration_id', sa.String(length=512)),
    sa.Column('domain_name',
              sa.String(length=64),
              sa.ForeignKey('domains.name',
                            onupdate='CASCADE',
                            ondelete='CASCADE'),
              nullable=False,
              index=True),
    # TODO: separate resource-related fields with new domain resource policy table when needed.
    sa.Column('total_resource_slots', ResourceSlotColumn(), default='{}'),
    sa.Column('allowed_vfolder_hosts',
              pgsql.ARRAY(sa.String),
              nullable=False,
              default='{}'),
    sa.UniqueConstraint('name',
                        'domain_name',
                        name='uq_groups_name_domain_name'),
    # dotfiles column, \x90 means empty list in msgpack
    sa.Column('dotfiles',
              sa.LargeBinary(length=MAXIMUM_DOTFILE_SIZE),
              nullable=False,
              default=b'\x90'),
)


async def resolve_group_name_or_id(
        db_conn: SAConnection, domain_name: str,
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table('ratings',
    sa.Column('creation_date', sa.Integer(), nullable=False),
    sa.Column('modification_date', sa.Integer(), nullable=True),
    sa.Column('id', postgresql.UUID(), nullable=False),
    sa.Column('version', sa.Integer(), nullable=True),
    sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
    sa.Column('creator', sa.String(), nullable=True),
    sa.Column('modifier', sa.String(), nullable=True),
    sa.Column('movie_id', postgresql.UUID(), nullable=False),
    sa.Column('person_id', postgresql.UUID(), nullable=False),
    sa.Column('overall_rate', sa.Float(), nullable=True),
    sa.Column('comment', sa.String(), nullable=True),
    sa.Column('novel', sa.Float(), nullable=True),
    sa.Column('character', sa.Float(), nullable=True),
    sa.Column('reason', sa.Float(), nullable=True),
    sa.Column('directing', sa.Float(), nullable=True),
    sa.Column('acting', sa.Float(), nullable=True),
    sa.Column('editing', sa.Float(), nullable=True),
    sa.Column('visualization', sa.Float(), nullable=True),
    sa.Column('sound', sa.Float(), nullable=True),
    sa.Column('music', sa.Float(), nullable=True),
    sa.Column('violence_range', sa.Float(), nullable=True),
    sa.Column('insulting_range', sa.Float(), nullable=True),
    sa.Column('sexual_content', sa.Float(), nullable=True),
    sa.Column('unsuitable_wearing', sa.Float(), nullable=True),
    sa.Column('addiction_promotion', sa.Float(), nullable=True),
    sa.Column('horror_content', sa.Float(), nullable=True),
    sa.Column('suicide_encouragement', sa.Float(), nullable=True),
    sa.Column('breaking_law_encouragement', sa.Float(), nullable=True),
    sa.Column('gambling_promotion', sa.Float(), nullable=True),
    sa.Column('alcoholic_promotion', sa.Float(), nullable=True),
    sa.Column('family_subject', sa.Float(), nullable=True),
    sa.Column('individual_social_behavior', sa.Float(), nullable=True),
    sa.Column('feminism_exposure', sa.Float(), nullable=True),
    sa.Column('justice_seeking', sa.Float(), nullable=True),
    sa.Column('theism', sa.Float(), nullable=True),
    sa.Column('bright_future_exposure', sa.Float(), nullable=True),
    sa.Column('hope', sa.Float(), nullable=True),
    sa.Column('iranian_life_style', sa.Float(), nullable=True),
    sa.Column('true_vision_of_enemy', sa.Float(), nullable=True),
    sa.Column('true_historiography', sa.Float(), nullable=True),
    sa.Column('question_1', sa.String(), nullable=True),
    sa.Column('question_2', sa.String(), nullable=True),
    sa.Column('question_3', sa.String(), nullable=True),
    sa.Column('question_4', sa.String(), nullable=True),
    sa.Column('question_5', sa.String(), nullable=True),
    sa.Column('question_6', sa.String(), nullable=True),
    sa.Column('question_7', sa.String(), nullable=True),
    sa.Column('question_8', sa.String(), nullable=True),
    sa.Column('question_9', sa.String(), nullable=True),
    sa.Column('question_10', sa.String(), nullable=True),
    sa.ForeignKeyConstraint(['movie_id'], ['movies.id'], name=op.f('fk_tb_ratings_col_movie_id')),
    sa.ForeignKeyConstraint(['person_id'], ['persons.id'], name=op.f('fk_tb_ratings_col_person_id')),
    sa.PrimaryKeyConstraint('id', name=op.f('pk_tb_ratings')),
    sa.UniqueConstraint('id', name=op.f('ratings_id_key'))
    )
    op.create_unique_constraint(op.f('movies_id_key'), 'movies', ['id'])
    op.drop_constraint('users_id_key', 'users', type_='unique')
    op.create_unique_constraint(op.f('users_id_key'), 'users', ['id'])