def test_fail_create_sql_statement_create():
    trig = PGTrigger(
        schema="public",
        signature="lower_account_email",
        on_entity="public.sometab",
        definition="INVALID DEF",
    )

    with pytest.raises(SQLParseFailure):
        trig.to_sql_statement_create()
Exemple #2
0
def test_fail_create_sql_statement_create():
    trig = PGTrigger(
        schema=TRIG.schema,
        signature=TRIG.signature,
        on_entity=TRIG.on_entity,
        definition="INVALID DEF",
    )

    with pytest.raises(SQLParseFailure):
        trig.to_sql_statement_create()
Exemple #3
0
def test_on_entity_schema_not_qualified() -> None:
    SQL = """create trigger lower_account_email
    AFTER INSERT ON account
    FOR EACH ROW EXECUTE PROCEDURE public.downcase_email()
    """
    trigger = PGTrigger.from_sql(SQL)
    assert trigger.schema == "public"
Exemple #4
0
    def collect_all_db_entities(sess: Session) -> List[ReplaceableEntity]:
        """Collect all entities from the database"""

        return [
            *PGFunction.from_database(sess, "%"),
            *PGTrigger.from_database(sess, "%"),
            *PGView.from_database(sess, "%"),
            *PGMaterializedView.from_database(sess, "%"),
        ]
def test_trig_update_revision(sql_setup, engine) -> None:
    engine.execute(FUNC.to_sql_statement_create())
    engine.execute(TRIG.to_sql_statement_create())

    UPDATED_TRIG = PGTrigger(
        schema="public",
        signature="lower_account_email",
        on_entity="public.account",
        is_constraint=True,
        definition="""
            AFTER INSERT OR UPDATE ON public.account
            FOR EACH ROW EXECUTE PROCEDURE public.downcase_email()
        """,
    )

    register_entities([FUNC, UPDATED_TRIG], entity_types=[PGTrigger])

    # Autogenerate a new migration
    # It should detect the change we made and produce a "replace_function" statement
    run_alembic_command(
        engine=engine,
        command="revision",
        command_kwargs={
            "autogenerate": True,
            "rev_id": "2",
            "message": "replace"
        },
    )

    migration_replace_path = TEST_VERSIONS_ROOT / "2_replace.py"

    with migration_replace_path.open() as migration_file:
        migration_contents = migration_file.read()

    assert "op.replace_entity" in migration_contents
    assert "op.create_entity" not in migration_contents
    assert "op.drop_entity" not in migration_contents
    assert "from alembic_utils.pg_trigger import PGTrigger" in migration_contents

    # Execute upgrade
    run_alembic_command(engine=engine,
                        command="upgrade",
                        command_kwargs={"revision": "head"})

    # Execute Downgrade
    run_alembic_command(engine=engine,
                        command="downgrade",
                        command_kwargs={"revision": "base"})
Exemple #6
0
def downgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.drop_index("idx_40750_similarity_opinion_b_id", table_name="similarity")
    op.drop_index("idx_40750_similarity_opinion_a_id", table_name="similarity")
    op.drop_table("similarity")
    op.drop_index("opinionparenthetical_citing_opinion_id",
                  table_name="opinionparenthetical")
    op.drop_index("opinionparenthetical_cited_opinion_id",
                  table_name="opinionparenthetical")
    op.drop_table("opinionparenthetical")
    op.drop_index("idx_40705_opinion_cluster_id", table_name="opinion")
    op.drop_table("opinion")
    op.drop_index("idx_40753_clustercitation_citing_cluster_id",
                  table_name="clustercitation")
    op.drop_index("idx_40753_clustercitation_cited_cluster_id",
                  table_name="clustercitation")
    op.drop_table("clustercitation")
    op.drop_index("searchable_case_name_idx", table_name="cluster")
    op.drop_table("cluster")
    op.drop_index("idx_40711_citation_citing_opinion_id",
                  table_name="citation")
    op.drop_index("idx_40711_citation_cited_opinion_id", table_name="citation")
    op.drop_table("citation")
    # ### end Alembic commands ###
    public_pg_trgm = PGExtension(schema="public", signature="pg_trgm")
    op.drop_entity(public_pg_trgm)
    public_cluster_update_searchable_case_name = PGTrigger(
        schema="public",
        signature="update_searchable_case_name",
        on_entity="public.cluster",
        is_constraint=False,
        definition=
        "BEFORE INSERT OR UPDATE ON public.cluster\n      FOR EACH ROW EXECUTE PROCEDURE public.update_searchable_case_name_trigger()",
    )
    op.drop_entity(public_cluster_update_searchable_case_name)

    public_update_searchable_case_name_trigger = PGFunction(
        schema="public",
        signature="update_searchable_case_name_trigger()",
        definition=
        "RETURNS trigger\n      LANGUAGE plpgsql\n      AS $$\n      begin\n          new.searchable_case_name := \n              to_tsvector('pg_catalog.english', new.case_name || ' ' || coalesce(new.reporter, '') || ' ' || new.year);\n          return new;\n      end\n      $$",
    )
    op.drop_entity(public_update_searchable_case_name_trigger)
Exemple #7
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.execute(CreateSequence(Sequence("citation_seq")))
    op.create_table(
        "citation",
        sa.Column(
            "id",
            sa.BigInteger(),
            server_default=sa.text("nextval('citation_seq')"),
            nullable=False,
        ),
        sa.Column("citing_opinion_id", sa.BigInteger(), nullable=True),
        sa.Column("cited_opinion_id", sa.BigInteger(), nullable=True),
        sa.Column("depth", sa.BigInteger(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(
        "idx_40711_citation_cited_opinion_id",
        "citation",
        ["cited_opinion_id"],
        unique=False,
    )
    op.create_index(
        "idx_40711_citation_citing_opinion_id",
        "citation",
        ["citing_opinion_id"],
        unique=False,
    )
    op.execute(CreateSequence(Sequence("cluster_seq")))
    op.create_table(
        "cluster",
        sa.Column(
            "id",
            sa.BigInteger(),
            server_default=sa.text("nextval('cluster_seq')"),
            nullable=False,
        ),
        sa.Column("resource_id", sa.BigInteger(), nullable=True),
        sa.Column("case_name", sa.Text(), nullable=True),
        sa.Column("reporter", sa.Text(), nullable=True),
        sa.Column("citation_count", sa.BigInteger(), nullable=True),
        sa.Column("cluster_uri", sa.Text(), nullable=True),
        sa.Column("docket_uri", sa.Text(), nullable=True),
        sa.Column("year", sa.BigInteger(), nullable=True),
        sa.Column("time", sa.BigInteger(), nullable=True),
        sa.Column("searchable_case_name", postgresql.TSVECTOR(),
                  nullable=True),
        sa.Column("court", sa.Text(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.execute(CreateSequence(Sequence("clustercitation_seq")))
    op.create_table(
        "clustercitation",
        sa.Column(
            "id",
            sa.BigInteger(),
            server_default=sa.text("nextval('clustercitation_seq')"),
            nullable=False,
        ),
        sa.Column("citing_cluster_id", sa.BigInteger(), nullable=True),
        sa.Column("cited_cluster_id", sa.BigInteger(), nullable=True),
        sa.Column("depth", sa.BigInteger(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(
        "idx_40753_clustercitation_cited_cluster_id",
        "clustercitation",
        ["cited_cluster_id"],
        unique=False,
    )
    op.create_index(
        "idx_40753_clustercitation_citing_cluster_id",
        "clustercitation",
        ["citing_cluster_id"],
        unique=False,
    )
    op.execute(CreateSequence(Sequence("opinion_seq")))
    op.create_table(
        "opinion",
        sa.Column(
            "id",
            sa.BigInteger(),
            server_default=sa.text("nextval('opinion_seq')"),
            nullable=False,
        ),
        sa.Column("resource_id", sa.BigInteger(), nullable=True),
        sa.Column("opinion_uri", sa.Text(), nullable=True),
        sa.Column("cluster_uri", sa.Text(), nullable=True),
        sa.Column("cluster_id", sa.BigInteger(), nullable=True),
        sa.Column("html_text", sa.Text(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index("idx_40705_opinion_cluster_id",
                    "opinion", ["cluster_id"],
                    unique=False)
    op.execute(CreateSequence(Sequence("opinionparenthetical_id_seq")))
    op.create_table(
        "opinionparenthetical",
        sa.Column(
            "id",
            sa.Integer(),
            server_default=sa.text("nextval('opinionparenthetical_id_seq')"),
            nullable=False,
        ),
        sa.Column("citing_opinion_id", sa.Integer(), nullable=False),
        sa.Column("cited_opinion_id", sa.Integer(), nullable=False),
        sa.Column("text", sa.Text(), nullable=False),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(
        "opinionparenthetical_cited_opinion_id",
        "opinionparenthetical",
        ["cited_opinion_id"],
        unique=False,
    )
    op.create_index(
        "opinionparenthetical_citing_opinion_id",
        "opinionparenthetical",
        ["citing_opinion_id"],
        unique=False,
    )
    op.execute(CreateSequence(Sequence("similarity_seq")))
    op.create_table(
        "similarity",
        sa.Column(
            "id",
            sa.BigInteger(),
            server_default=sa.text("nextval('similarity_seq')"),
            nullable=False,
        ),
        sa.Column("opinion_a_id", sa.BigInteger(), nullable=True),
        sa.Column("opinion_b_id", sa.BigInteger(), nullable=True),
        sa.Column("similarity_index", sa.Float(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(
        "idx_40750_similarity_opinion_a_id",
        "similarity",
        ["opinion_a_id"],
        unique=False,
    )
    op.create_index(
        "idx_40750_similarity_opinion_b_id",
        "similarity",
        ["opinion_b_id"],
        unique=False,
    )
    # ### end Alembic commands ###

    op.create_index(
        "searchable_case_name_idx",
        "cluster",
        ["searchable_case_name"],
        unique=False,
        postgresql_using="gin",
    )

    public_pg_trgm = PGExtension(schema="public", signature="pg_trgm")
    op.create_entity(public_pg_trgm)

    public_update_searchable_case_name_trigger = PGFunction(
        schema="public",
        signature="update_searchable_case_name_trigger()",
        definition=
        "RETURNS trigger\n      LANGUAGE plpgsql\n      AS $$\n      begin\n          new.searchable_case_name := \n              to_tsvector('pg_catalog.english', new.case_name || ' ' || coalesce(new.reporter, '') || ' ' || new.year);\n          return new;\n      end\n      $$",
    )
    op.create_entity(public_update_searchable_case_name_trigger)

    public_cluster_update_searchable_case_name = PGTrigger(
        schema="public",
        signature="update_searchable_case_name",
        on_entity="public.cluster",
        is_constraint=False,
        definition=
        "BEFORE INSERT OR UPDATE ON public.cluster\n      FOR EACH ROW EXECUTE PROCEDURE public.update_searchable_case_name_trigger()",
    )
    op.create_entity(public_cluster_update_searchable_case_name)
Exemple #8
0
    schema="public",
    signature="update_searchable_case_name_trigger()",
    definition="""
      RETURNS trigger
      LANGUAGE plpgsql
      AS $$
      begin
          new.searchable_case_name := 
              to_tsvector('pg_catalog.english', new.case_name || ' ' || coalesce(new.reporter, '') || ' ' || new.year);
          return new;
      end
      $$;
  """,
)

update_searchable_case_name_trigger = PGTrigger(
    schema="public",
    signature="update_searchable_case_name",
    on_entity="public.cluster",
    definition="""
      BEFORE INSERT OR UPDATE ON public.cluster
      FOR EACH ROW EXECUTE PROCEDURE public.update_searchable_case_name_trigger()
  """,
)

PG_ENTITY_LIST = [
    pg_trgm_extension,
    update_searchable_case_name_func,
    update_searchable_case_name_trigger,
]
Exemple #9
0
def test_unparsable() -> None:
    SQL = "create trigger lower_account_email faile fail fail"
    with pytest.raises(SQLParseFailure):
        PGTrigger.from_sql(SQL)
Exemple #10
0
    conn.execute("drop table public.account cascade")


FUNC = PGFunction.from_sql(
    """create function public.downcase_email() returns trigger as $$
begin
    return new;
end;
$$ language plpgsql;
""")

TRIG = PGTrigger(
    schema="public",
    signature="lower_account_EMAIL",
    on_entity="public.account",
    definition="""
        BEFORE INSERT ON public.account
        FOR EACH ROW EXECUTE PROCEDURE public.downcase_email()
    """,
)


def test_create_revision(sql_setup, engine) -> None:
    engine.execute(FUNC.to_sql_statement_create())

    register_entities([FUNC, TRIG], entity_types=[PGTrigger])
    run_alembic_command(
        engine=engine,
        command="revision",
        command_kwargs={
            "autogenerate": True,
    conn.execute("drop table public.account cascade")


FUNC = PGFunction.from_sql(
    """create function public.downcase_email() returns trigger as $$
begin
    return new;
end;
$$ language plpgsql;
""")

TRIG = PGTrigger(
    schema="public",
    signature="lower_account_email",
    on_entity="public.account",
    is_constraint=True,
    definition="""
        AFTER INSERT ON public.account
        FOR EACH ROW EXECUTE PROCEDURE public.downcase_email()
    """,
)


def test_create_revision(sql_setup, engine) -> None:
    engine.execute(FUNC.to_sql_statement_create())

    register_entities([FUNC, TRIG], entity_types=[PGTrigger])
    run_alembic_command(
        engine=engine,
        command="revision",
        command_kwargs={
            "autogenerate": True,
Exemple #12
0
def sql_trigger_entities():
    sql_trigger_entities = []
    for p in Path("./triggers").glob("*.sql"):
        pg_trigger_entity = PGTrigger.from_sql(p.read_text())
        sql_trigger_entities.append(pg_trigger_entity)
    return sql_trigger_entities