def test_archive_table_collision_fails_1(session, user_table, p1): """ Try to insert two records with the same version and foreign key in the same transaction and ensure the write fails. In other words, ensure the unique constraint is correctly imposed on the archive table. """ # Insert an element so it exists in the archive table add_and_return_version(p1, session) to_insert = { "deleted": False, "user_id": "bar", "updated_at": datetime.now(), "data": {}, "product_id": p1.product_id, } session.add(user_table.ArchiveTable(**to_insert)) to_insert = { "deleted": True, "user_id": "foo", "updated_at": datetime.now(), "data": {}, "product_id": p1.product_id, } session.add(user_table.ArchiveTable(**to_insert)) with pytest.raises(IntegrityError): session.flush()
def test_unique_constraint(session, user_table, p1): add_and_return_version(p1, session) invalid_p_dict = dict(product_id_1=11, product_id_2="foo", col1="bar", col2=100) invalid_p = user_table(**invalid_p_dict) with pytest.raises(IntegrityError): add_and_return_version(invalid_p, session)
def test_update(session, user_table, p1_dict, p1): add_and_return_version(p1, session) p1.col1 = "bar" p1.col2 = 300 version_2 = add_and_return_version(p1, session) version_2_dict = dict(p1_dict, col1="bar", col2=300) verify_row(version_2_dict, version_2, session, user_table=user_table) verify_archive(version_2_dict, version_2, session, user_table=user_table)
def test_product_update(session, p1_dict, p1): version = add_and_return_version(p1, session) p1.col1 = "new" p1.col2 = -1 updated_version = add_and_return_version(p1, session) updated_dict = dict(p1_dict, col1="new", col2=-1) verify_row(updated_dict, updated_version, session) verify_archive(p1_dict, version, session) verify_archive(updated_dict, updated_version, session)
def test_product_update_with_json(session, p1_dict, p1): version = add_and_return_version(p1, session) json_dict = {"foo": "bar"} p1.jsonb_col = json_dict.copy() updated_version = add_and_return_version(p1, session) updated_dict = dict(p1_dict, jsonb_col=json_dict) verify_row(updated_dict, updated_version, session) verify_archive(p1_dict, version, session) verify_archive(updated_dict, updated_version, session)
def delete_api_test_setup(session, p1, p2): add_multiple_and_return_versions([p1, p2], session) p1.col1 = "change1" add_and_return_version(p1, session) p1.col1 = "change2" add_and_return_version(p1, session) p1.col2 = 15 p2.col2 = 12 add_multiple_and_return_versions([p1, p2], session)
def test_product_update_fails(session, user_table, p1): """ Insert a product. Construct a new ORM object with the same id as the inserted object and make sure the insertion fails. """ # Initial product insert add_and_return_version(p1, session) # Create a new row with the same primary key and try to insert it p_up_dict = dict(col1="newcol", col2=5, col3=False, product_id=10) p_up = user_table(**p_up_dict) with pytest.raises(IntegrityError): add_and_return_version(p_up, session)
def test_product_update_with_user(session, p1_dict, p1): p1.updated_by("test_user1") version = add_and_return_version(p1, session) p1.col1 = "new" p1.col2 = -1 p1.updated_by("test_user2") updated_version = add_and_return_version(p1, session) updated_dict = dict(p1_dict, col1="new", col2=-1) verify_row(updated_dict, updated_version, session) verify_archive(p1_dict, version, session, user="******") verify_archive(updated_dict, updated_version, session, user="******")
def get_api_test_setup(mocker, session, p1, p2, p3, t1, t2, t3, t4): mock_datetime = mocker.patch("savage.models.datetime") p1_history, p2_history, p3_history = [], [], [] mock_datetime.utcnow.return_value = t1 versions = add_multiple_and_return_versions([p1, p3], session) p1_history.append(_history(p1, t1, versions[0], session)) p3_history.append(_history(p3, t1, versions[1], session)) p1.col1 = "change1" mock_datetime.utcnow.return_value = t2 versions = add_multiple_and_return_versions([p1, p2], session) p1_history.append(_history(p1, t2, versions[0], session)) p2_history.append(_history(p2, t2, versions[1], session)) p1.col3 = False p1.col1 = "change2" mock_datetime.utcnow.return_value = t3 version = add_and_return_version(p1, session) p1_history.append(_history(p1, t3, version, session)) p1.col2 = 15 p2.col2 = 12 mock_datetime.utcnow.return_value = t4 versions = add_multiple_and_return_versions([p1, p2], session) p1_history.append(_history(p1, t4, versions[0], session)) p2_history.append(_history(p2, t4, versions[1], session)) return [p1_history, p2_history, p3_history]
def test_insert_new_product_with_json(session, p1_dict, p1): json_dict = {"foo": "bar"} p1.jsonb_col = json_dict.copy() version = add_and_return_version(p1, session) expected = dict(jsonb_col=json_dict, **p1_dict) verify_row(expected, version, session) verify_archive(expected, version, session)
def test_insert_new_product(session, p1_dict, p1): assert savage.is_initialized() p1.col4 = 11 version = add_and_return_version(p1, session) expected = dict(other_name=11, **p1_dict) verify_row(expected, version, session) verify_archive(expected, version, session)
def test_update_version_column(session, user_table, p1_dict, p1): version = add_and_return_version(p1, session) verify_row(p1_dict, version, session, user_table=user_table) verify_archive(p1_dict, version, session, user_table=user_table) p1.product_id_1 = 12 version_2 = add_and_return_version(p1, session) version_2_dict = dict(p1_dict, product_id_1=12) verify_row(version_2_dict, version_2, session, user_table=user_table) verify_archive(version_2_dict, version_2, session, user_table=user_table) verify_archive(p1_dict, version_2, session, deleted=True, user_table=user_table)
def test_insert_after_delete(session, user_table, p1_dict, p1): version = add_and_return_version(p1, session) session.delete(p1) session.commit() p_new = dict( p1_dict, **dict(product_id_1=11, product_id_2="foo", col1="new", col2=101)) q = user_table(**p_new) new_version = add_and_return_version(q, session) verify_row(p_new, new_version, session, user_table=user_table) verify_archive(p1_dict, version, session, user_table=user_table) deleted_version = verify_deleted_archive(p1_dict, p1, version, session, user_table) verify_archive(p_new, new_version, session, user_table=user_table) assert new_version > deleted_version
def test_delete(session, user_table, p1_dict, p1): version = add_and_return_version(p1, session) session.delete(p1) session.commit() filter_kwargs = dict(product_id_1=p1.product_id_1, product_id_2=p1.product_id_2) assert not session.query(user_table).filter_by(**filter_kwargs).count() verify_archive(p1_dict, version, session, user_table=user_table) verify_deleted_archive(p1_dict, p1, version, session, user_table)
def test_delete(session, user_table, p1_dict, p1): version = add_and_return_version(p1, session) session.delete(p1) session.commit() assert not session.query(user_table).filter_by( product_id=p1.product_id).count() verify_archive(p1_dict, version, session) verify_deleted_archive(p1_dict, p1, version, session, user_table)
def test_insert_after_delete(session, user_table, p1_dict, p1): """Inserting a row that has already been deleted should version where it left off (not at 0). """ version = add_and_return_version(p1, session) session.delete(p1) session.commit() p_new = dict(p1_dict, **{"col1": "changed", "col2": 139}) q = user_table(**p_new) new_version = add_and_return_version(q, session) verify_row(p_new, new_version, session) verify_archive(p1_dict, version, session) deleted_version = verify_deleted_archive(p1_dict, p1, version, session, user_table) verify_archive(p_new, new_version, session) assert new_version > deleted_version
def test_update_no_changes(session, user_table, p1_dict, p1): """ Add an unchanged row and make sure the version does not get bumped. """ version = add_and_return_version(p1, session) p1.col1 = p1_dict["col1"] session.add(p1) session.commit() verify_archive(p1_dict, version, session) res = session.query(user_table.ArchiveTable).all() assert len(res) == 1
def test_multiple_product_updates(session, p1_dict, p1): """ Update a product multiple times and ensure each one gets correctly versioned. """ version = add_and_return_version(p1, session) p1.col1 = "new" p1.col2 = -1 version_2 = add_and_return_version(p1, session) version_2_dict = dict(p1_dict, col1="new", col2=-1) p1.col1 = "third change" p1.col2 = 139 p1.col3 = False version_3 = add_and_return_version(p1, session) version_3_dict = dict(p1_dict, col1="third change", col2=139, col3=False) verify_row(version_3_dict, version_3, session) verify_archive(p1_dict, version, session) verify_archive(version_2_dict, version_2, session) verify_archive(version_3_dict, version_3, session)
def test_concurrent_product_updates(engine_1, engine_2, user_table, p1_dict, p1): """ Assert that if two separate sessions try to update a product row, one succeeds and the other fails. """ session_1 = Session(bind=engine_1) session_2 = Session(bind=engine_2) try: # Add the initial row and flush it to the table version = add_and_return_version(p1, session_1) # Update 1 in session p1.col1 = "changed col 1" session_1.add(p1) # Update 2 in session 2 p2 = session_2.query(user_table).first() p2.col2 = 1245600 # this flush should succeed version_2 = add_and_return_version(p2, session_2) # this flush should fail with pytest.raises(StaleDataError): session_1.commit() final = dict(p1_dict, **dict(col2=1245600)) verify_row(final, version_2, session_2) verify_archive(p1_dict, version, session_2) verify_archive(final, version_2, session_2) finally: # Clean up session_1.close() session_2.close() for t in (user_table, user_table.ArchiveTable): engine_1.execute("delete from {}".format(t.__tablename__))
def test_insert(session, user_table, p1_dict, p1): version = add_and_return_version(p1, session) verify_row(p1_dict, version, session, user_table=user_table) verify_archive(p1_dict, version, session, user_table=user_table)
def test_insert_new_product_with_user(session, p1_dict, p1): p1.updated_by("test_user") version = add_and_return_version(p1, session) verify_row(p1_dict, version, session) verify_archive(p1_dict, version, session, user="******")
def test_paging_results(mocker, session, user_table, p1_dict, p1): t = datetime.utcfromtimestamp(10000) mock_datetime = mocker.patch("savage.models.datetime") mock_datetime.utcnow.return_value = t history = [] p1.col2 = 0 version = add_and_return_version(p1, session) history.append(_history(p1, t, version, session)) # make 500 changes for i in range(500): p1.col1 = "foobar" + "1" * ((i + 1) // 10) p1.col2 += 1 p1.col3 = i < 250 version = add_and_return_version(p1, session) history.append(_history(p1, t, version, session)) result = get( user_table, session, t1=datetime.utcfromtimestamp(0), t2=datetime.utcfromtimestamp(10000000000), page=1, page_size=1000, ) assert_result(result, history) result = get( user_table, session, t1=datetime.utcfromtimestamp(0), t2=datetime.utcfromtimestamp(10000000000), page=1, page_size=100, ) assert_result(result, history[:100]) result = get( user_table, session, t1=datetime.utcfromtimestamp(0), t2=datetime.utcfromtimestamp(10000000000), page=3, page_size=100, ) assert_result(result, history[200:300]) result = get( user_table, session, t1=datetime.utcfromtimestamp(0), t2=datetime.utcfromtimestamp(10000000000), page=5, page_size=100, ) assert_result(result, history[400:500]) result = get( user_table, session, t1=datetime.utcfromtimestamp(0), t2=datetime.utcfromtimestamp(10000000000), fields=["col1"], page=1, page_size=80, ) assert_result(result, history[0:80:10], fields=["col1"])