Пример #1
0
def test_view_query_performance(setup_client_syncgateway_test):
    """
    @summary
     Run this test when new iOS version arrives to make sure CBL query performance is not diminishing
    1. Add 100000 docs to the client with content
    2. Create design doc version 1 to fetch doc._id, doc._rev for docs with content
    3. Update docs 3 times which gets revision number 4-
    4. Run a query and check for 100000 expected docs with design doc version 1
    3. Verify view query finished less than 5 seconds
    """

    log_info("Running 'test_design_doc_update'")

    ls_url = setup_client_syncgateway_test["ls_url"]
    log_info("ls_url: {}".format(ls_url))

    client = MobileRestClient()

    ls_url = setup_client_syncgateway_test["ls_url"]

    num_content_docs_per_db = 100000
    d_doc_name = "dd"
    ls_db = client.create_database(ls_url, name="ls_db")

    # Add 100000 docs to the client with content
    bulk_docs_content = create_docs("doc_content_",
                                    num_content_docs_per_db,
                                    content={"hi": "I should be in the view"})
    ls_db_docs1 = client.add_bulk_docs(url=ls_url,
                                       db=ls_db,
                                       docs=bulk_docs_content)
    assert len(ls_db_docs1) == num_content_docs_per_db

    client.update_docs(url=ls_url,
                       db=ls_db,
                       docs=ls_db_docs1,
                       number_updates=3,
                       delay=0.1)
    # Design doc to to fetch doc._id, doc._rev for docs with content
    view = """{
    "language" : "javascript",
    "views" : {
        "content_view" : {
            "map" : "function(doc, meta) { if (doc.content) { emit(doc._id, doc._rev); } }"
        }
    }
}"""

    client.add_design_doc(url=ls_url, db=ls_db, name=d_doc_name, doc=view)
    start = time.time()
    content_view_rows_1 = client.get_view(url=ls_url,
                                          db=ls_db,
                                          design_doc_name=d_doc_name,
                                          view_name="content_view")
    finish = time.time()
    assert finish - start < 5
    client.verify_view_row_num(view_response=content_view_rows_1,
                               expected_num_rows=10000)
def test_multiple_replications_created_with_unique_properties(setup_client_syncgateway_test):
    """Regression test for couchbase/couchbase-lite-java-core#1386
    1. Setup SGW with a remote database name db for an example
    2. Create a local database such as ls_db
    3. Send POST /_replicate with source = ls_db, target = http://localhost:4985/db, continuous = true
    4. Send POST /_replicate with source = ls_db, target = http://localhost:4985/db, continuous = true, doc_ids=["doc1", "doc2"]
    5. Send POST /_replicate with source = ls_db, target = http://localhost:4985/db, continuous = true, filter="filter1"
    6. Make sure that the session_id from each POST /_replicate are different.
    7. Send GET /_active_tasks to make sure that there are 3 tasks created.
    8. Send 3 POST /_replicate withe the same parameter as Step 3=5 plus cancel=true to stop those replicators
    9. Repeat Step 3 - 8 with source = and target = db for testing the pull replicator.
    """

    sg_db = "db"
    ls_db = "ls_db"

    cluster_config = setup_client_syncgateway_test["cluster_config"]
    ls_url = setup_client_syncgateway_test["ls_url"]
    sg_one_admin = setup_client_syncgateway_test["sg_admin_url"]
    sg_one_public = setup_client_syncgateway_test["sg_url"]

    sg_helper = SyncGateway()
    sg_helper.start_sync_gateway(
        cluster_config=cluster_config,
        url=sg_one_public,
        config="{}/walrus.json".format(SYNC_GATEWAY_CONFIGS)
    )

    log_info("Running 'test_multiple_replications_created_with_unique_properties'")
    log_info("ls_url: {}".format(ls_url))
    log_info("sg_one_admin: {}".format(sg_one_admin))
    log_info("sg_one_public: {}".format(sg_one_public))

    client = MobileRestClient()
    client.create_database(url=ls_url, name=ls_db)

    ########
    # PUSH #
    ########
    # Start 3 unique push replication requests
    repl_one = client.start_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_one_admin,
        to_db=sg_db
    )
    client.wait_for_replication_status_idle(ls_url, repl_one)

    repl_two = client.start_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_one_admin,
        to_db=sg_db,
        doc_ids=["doc_1", "doc_2"]
    )
    client.wait_for_replication_status_idle(ls_url, repl_two)

    # Create doc filter and add to the design doc
    filters = {
        "language": "javascript",
        "filters": {
            "sample_filter": "function(doc, req) { if (doc.type && doc.type === \"skip\") { return false; } return true; }"
        }
    }
    client.add_design_doc(url=ls_url, db=ls_db, name="by_type", doc=json.dumps(filters))

    repl_three = client.start_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_one_admin,
        to_db=sg_db,
        repl_filter="by_type/sample_filter"
    )
    client.wait_for_replication_status_idle(ls_url, repl_three)

    # Verify 3 replicaitons are running
    replications = client.get_replications(ls_url)
    log_info(replications)
    assert len(replications) == 3, "Number of replications, Expected: {} Actual: {}".format(
        3,
        len(replications)
    )

    # Stop repl001
    client.stop_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_one_admin,
        to_db=sg_db
    )

    # Stop repl002
    client.stop_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_one_admin,
        to_db=sg_db,
        doc_ids=["doc_1", "doc_2"]
    )

    # Stop repl003
    client.stop_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_one_admin,
        to_db=sg_db,
        repl_filter="by_type/sample_filter"
    )

    # Verify no replications are running
    client.wait_for_no_replications(ls_url)
    replications = client.get_replications(ls_url)
    log_info(replications)
    assert len(replications) == 0, "Number of replications, Expected: {} Actual: {}".format(
        0,
        len(replications)
    )

    ########
    # PULL #
    ########
    # Start 3 unique push replication requests
    repl_four = client.start_replication(
        url=ls_url,
        continuous=True,
        from_url=sg_one_admin,
        from_db=sg_db,
        to_db=ls_db
    )
    client.wait_for_replication_status_idle(ls_url, repl_four)

    # Start filtered pull from sync gateway to LiteServ
    repl_five = client.start_replication(
        url=ls_url,
        continuous=True,
        from_url=sg_one_admin,
        from_db=sg_db,
        to_db=ls_db,
        channels_filter=["ABC", "CBS"]
    )
    client.wait_for_replication_status_idle(ls_url, repl_five)

    # Verify 3 replicaitons are running
    replications = client.get_replications(ls_url)
    log_info(replications)
    assert len(replications) == 2, "Number of replications, Expected: {} Actual: {}".format(
        2,
        len(replications)
    )

    # Stop repl_four
    client.stop_replication(
        url=ls_url,
        continuous=True,
        from_url=sg_one_admin,
        from_db=sg_db,
        to_db=ls_db
    )

    # Stop repl_five
    client.stop_replication(
        url=ls_url,
        continuous=True,
        from_url=sg_one_admin,
        from_db=sg_db,
        to_db=ls_db,
        channels_filter=["ABC", "CBS"]
    )

    # Verify no replications are running
    client.wait_for_no_replications(ls_url)
    replications = client.get_replications(ls_url)
    log_info(replications)
    assert len(replications) == 0, "Number of replications, Expected: {} Actual: {}".format(
        0,
        len(replications)
    )
Пример #3
0
def test_design_doc_update(setup_client_syncgateway_test):
    """
    Ref: https://github.com/couchbase/couchbase-lite-android/issues/1139
    https://github.com/couchbaselabs/mobile-testkit/issues/1155
    1. Add 10 docs to the client with content
    2. Add 5 docs to the client without content
    2. Create design doc version 1 to fetch doc._id, doc._rev for docs with content
    3. Run a query and check for 10 expected docs with design doc version 1
    4. Update design doc to version 2 to fetch doc._id, doc._rev for docs with no content
    5. Run a query and check for 5 expected docs with design doc version 2
    -> With CBL 1.4.0 or earlier, even though design doc is updated to version 2,
       the second query will use design doc 1 and return 10 docs instead of 5
    """

    log_info("Running 'test_design_doc_update'")

    ls_url = setup_client_syncgateway_test["ls_url"]
    log_info("ls_url: {}".format(ls_url))

    client = MobileRestClient()

    ls_url = setup_client_syncgateway_test["ls_url"]

    num_content_docs_per_db = 10
    num_no_content_docs_per_db = 5
    d_doc_name = "dd"
    ls_db = client.create_database(ls_url, name="ls_db")

    # Add 10 docs to the client with content
    bulk_docs_content = create_docs("doc_content_",
                                    num_content_docs_per_db,
                                    content={"hi": "I should be in the view"})
    ls_db_docs1 = client.add_bulk_docs(url=ls_url,
                                       db=ls_db,
                                       docs=bulk_docs_content)
    assert len(ls_db_docs1) == num_content_docs_per_db

    # Add 5 docs to the client without content
    bulk_docs_no_content = create_docs("doc_no_content_",
                                       num_no_content_docs_per_db)
    ls_db_docs2 = client.add_bulk_docs(url=ls_url,
                                       db=ls_db,
                                       docs=bulk_docs_no_content)
    assert len(ls_db_docs2) == num_no_content_docs_per_db

    # Design doc to to fetch doc._id, doc._rev for docs with content
    view = """{
    "language" : "javascript",
    "views" : {
        "content_view" : {
            "map" : "function(doc, meta) { if (doc.content) { emit(doc._id, doc._rev); } }"
        }
    }
}"""

    client.add_design_doc(url=ls_url, db=ls_db, name=d_doc_name, doc=view)
    content_view_rows_1 = client.get_view(url=ls_url,
                                          db=ls_db,
                                          design_doc_name=d_doc_name,
                                          view_name="content_view")
    client.verify_view_row_num(view_response=content_view_rows_1,
                               expected_num_rows=10)

    # Design doc to to fetch doc._id, doc._rev for docs with no content
    view = """{
    "language" : "javascript",
    "views" : {
        "content_view" : {
            "map" : "function(doc, meta) { if (!(doc.content)) { emit(doc._id, doc._rev); } }"
        }
    }
}"""

    dd_rev = client.get_design_doc_rev(url=ls_url, db=ls_db, name=d_doc_name)
    assert dd_rev and dd_rev.startswith("1-")
    log_info("dd_rev: {}".format(dd_rev))

    client.update_design_doc(url=ls_url,
                             db=ls_db,
                             name=d_doc_name,
                             doc=view,
                             rev=dd_rev)
    dd_rev_new = client.get_design_doc_rev(url=ls_url,
                                           db=ls_db,
                                           name=d_doc_name)
    assert dd_rev_new != dd_rev and dd_rev_new.startswith("2-")
    log_info("dd_rev_new: {}".format(dd_rev_new))

    content_view_rows_2 = client.get_view(url=ls_url,
                                          db=ls_db,
                                          design_doc_name=d_doc_name,
                                          view_name="content_view")
    client.verify_view_row_num(view_response=content_view_rows_2,
                               expected_num_rows=5)
Пример #4
0
def test_user_views_sanity(params_from_base_test_setup, sg_conf_name):

    cluster_conf = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    log_info("Running 'single_user_multiple_channels'")
    log_info("cluster_conf: {}".format(cluster_conf))
    log_info("conf: {}".format(sg_conf))

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_config_path=sg_conf)

    sg_db = "db"
    number_docs_per_channel = 100

    topology = params_from_base_test_setup["cluster_topology"]
    cbs_url = topology["couchbase_servers"][0]
    bucket = "data-bucket"
    sg_admin_url = topology["sync_gateways"][0]["admin"]
    sg_public_url = topology["sync_gateways"][0]["public"]

    client = MobileRestClient()

    # These are defined in the sg config

    # Scientist role has channels ["Download"]
    # Researcher role has channels ["Upload"]

    # "seth" has "Scientist" role and ["Create"] channel
    # "raghu" has "Researcher" role and ["Edit"] channel

    # Issue GET /_user to exercise principal views
    users = client.get_users(url=sg_admin_url, db=sg_db)

    # These are defined in the config
    assert len(users) == 2 and "seth" in users and "raghu" in users

    # Issue GET /_role to exercise principal views
    roles = client.get_roles(url=sg_admin_url, db=sg_db)

    # These are defined in the config
    assert len(roles) == 2 and "Scientist" in roles and "Researcher" in roles

    # Verify channels on each role
    scientist_role = client.get_role(url=sg_admin_url,
                                     db=sg_db,
                                     name="Scientist")
    researcher_role = client.get_role(url=sg_admin_url,
                                      db=sg_db,
                                      name="Researcher")

    assert len(scientist_role["all_channels"]) == 2
    assert "Download" in scientist_role["all_channels"]
    assert "!" in scientist_role["all_channels"]

    assert scientist_role["name"] == "Scientist"
    assert len(scientist_role["admin_channels"]
               ) == 1 and "Download" in scientist_role["admin_channels"]

    assert len(researcher_role["all_channels"]) == 2
    assert "Upload" in researcher_role["all_channels"]
    assert "!" in researcher_role["all_channels"]

    assert researcher_role["name"] == "Researcher"
    assert len(researcher_role["admin_channels"]
               ) == 1 and "Upload" in researcher_role["admin_channels"]

    # Verify roles are assigned to the user
    seth_user = client.get_user(url=sg_admin_url, db=sg_db, name="seth")
    raghu_user = client.get_user(url=sg_admin_url, db=sg_db, name="raghu")

    assert len(seth_user["all_channels"]) == 3
    assert "!" in seth_user["all_channels"] and "Create" in seth_user[
        "all_channels"] and "Download" in seth_user["all_channels"]
    assert seth_user["admin_roles"] == ["Scientist"]
    assert seth_user["roles"] == ["Scientist"]

    assert len(raghu_user["all_channels"]) == 3
    assert "!" in raghu_user["all_channels"] and "Edit" in raghu_user[
        "all_channels"] and "Upload" in raghu_user["all_channels"]
    assert raghu_user["admin_roles"] == ["Researcher"]
    assert raghu_user["roles"] == ["Researcher"]

    seth_session = client.create_session(url=sg_admin_url,
                                         db=sg_db,
                                         name="seth",
                                         password="******")
    raghu_session = client.create_session(url=sg_admin_url,
                                          db=sg_db,
                                          name="raghu",
                                          password="******")

    start = time.time()

    download_doc_bodies = document.create_docs(
        doc_id_prefix="download_doc",
        number=number_docs_per_channel,
        attachments_generator=attachment.generate_2_png_100_100,
        channels=["Download"])
    upload_doc_bodies = document.create_docs(
        doc_id_prefix="upload_doc",
        number=number_docs_per_channel,
        attachments_generator=attachment.generate_png_100_100,
        channels=["Upload"])
    create_doc_bodies = document.create_docs(
        doc_id_prefix="create_doc",
        number=number_docs_per_channel,
        attachments_generator=attachment.generate_2_png_100_100,
        channels=["Create"])
    edit_doc_bodies = document.create_docs(
        doc_id_prefix="edit_doc",
        number=number_docs_per_channel,
        attachments_generator=attachment.generate_png_100_100,
        channels=["Edit"])

    end = time.time() - start
    log_info("Time to create docs: {}s".format(end))

    download_docs = client.add_bulk_docs(url=sg_public_url,
                                         db=sg_db,
                                         docs=download_doc_bodies,
                                         auth=seth_session)
    assert len(download_docs) == number_docs_per_channel

    upload_docs = client.add_bulk_docs(url=sg_public_url,
                                       db=sg_db,
                                       docs=upload_doc_bodies,
                                       auth=raghu_session)
    assert len(upload_docs) == number_docs_per_channel

    create_docs = client.add_bulk_docs(url=sg_public_url,
                                       db=sg_db,
                                       docs=create_doc_bodies,
                                       auth=seth_session)
    assert len(create_docs) == number_docs_per_channel

    edit_docs = client.add_bulk_docs(url=sg_public_url,
                                     db=sg_db,
                                     docs=edit_doc_bodies,
                                     auth=raghu_session)
    assert len(edit_docs) == number_docs_per_channel

    # Assert that the attachment docs gets written to couchbase server
    server = couchbaseserver.CouchbaseServer(cbs_url)
    server_att_docs = server.get_server_docs_with_prefix(bucket=bucket,
                                                         prefix="_sync:att:")
    expected_num_attachments = (number_docs_per_channel * 2) + \
        number_docs_per_channel + \
        (number_docs_per_channel * 2) + \
        number_docs_per_channel
    assert len(server_att_docs) == expected_num_attachments

    design_doc = {
        "views": {
            "filtered": {
                "map": 'function(doc, meta) {emit(meta._id, doc.channels);}'
            },
            "filtered_more": {
                "map":
                'function(doc, meta) { if (doc.channels.indexOf("Create") != -1 || doc.channels.indexOf("Edit") != -1) {emit(meta._id, doc.channels);}}'
            }
        }
    }

    client.add_design_doc(url=sg_admin_url,
                          db=sg_db,
                          name="test_views",
                          doc=json.dumps(design_doc))

    # "seth" should see docs for channels ["Create", "Download"]
    seth_filtered = client.get_view(url=sg_public_url,
                                    db=sg_db,
                                    design_doc_name="test_views",
                                    view_name="filtered",
                                    auth=seth_session)
    seth_filtered_rows = seth_filtered["rows"]
    validate_rows(rows=seth_filtered_rows,
                  num_expected_rows=2 * number_docs_per_channel,
                  expected_id_prefixes=["create_doc", "download_doc"],
                  number_of_prefixed_docs=number_docs_per_channel)

    # "seth" should only see docs with "Create" channel through this view
    seth_filtered_more = client.get_view(url=sg_public_url,
                                         db=sg_db,
                                         design_doc_name="test_views",
                                         view_name="filtered_more",
                                         auth=seth_session)
    seth_filtered_more_rows = seth_filtered_more["rows"]
    validate_rows(rows=seth_filtered_more_rows,
                  num_expected_rows=number_docs_per_channel,
                  expected_id_prefixes=["create_doc"],
                  number_of_prefixed_docs=number_docs_per_channel)

    # "raghu" should see docs for channels ["Upload", "Edit"]
    raghu_filtered = client.get_view(url=sg_public_url,
                                     db=sg_db,
                                     design_doc_name="test_views",
                                     view_name="filtered",
                                     auth=raghu_session)
    raghu_rows = raghu_filtered["rows"]
    validate_rows(rows=raghu_rows,
                  num_expected_rows=2 * number_docs_per_channel,
                  expected_id_prefixes=["upload_doc", "edit_doc"],
                  number_of_prefixed_docs=number_docs_per_channel)

    # "raghu" should only see docs with "Edit" channel through this view
    raghu_filtered_more = client.get_view(url=sg_public_url,
                                          db=sg_db,
                                          design_doc_name="test_views",
                                          view_name="filtered_more",
                                          auth=raghu_session)
    raghu_filtered_more_rows = raghu_filtered_more["rows"]
    validate_rows(rows=raghu_filtered_more_rows,
                  num_expected_rows=number_docs_per_channel,
                  expected_id_prefixes=["edit_doc"],
                  number_of_prefixed_docs=number_docs_per_channel)
Пример #5
0
def test_stale_revision_should_not_be_in_the_index(setup_client_syncgateway_test):
    """original ticket: https://github.com/couchbase/couchbase-lite-android/issues/855

    scenario:
    1. Running sync_gateway
    2. Create database and starts both push and pull replicators through client REST API
    3. Create two or more views through client REST API
    4. Add doc, and verify doc is index with current revision through client REST API
    5. Make sure document is pushed to sync gateway through sync gateway REST API
    6. Update doc with sync gateway (not client side) through sync gateway REST API
    7. Make sure updated document is pull replicated to client  through client REST API
    8. Make sure updated document is indexed through client REST API
    9. Make sure stale revision is deleted from index.  through client REST API
    10. Pass criteria
    """

    cluster_config = setup_client_syncgateway_test["cluster_config"]
    sg_mode = setup_client_syncgateway_test["sg_mode"]
    ls_url = setup_client_syncgateway_test["ls_url"]
    sg_url = setup_client_syncgateway_test["sg_url"]
    sg_admin_url = setup_client_syncgateway_test["sg_admin_url"]

    num_docs = 10
    num_revs = 100

    d_doc_name = "dd"
    sg_db = "db"
    sg_user_name = "sg_user"

    sg_config = sync_gateway_config_path_for_mode("listener_tests/listener_tests", sg_mode)
    c = cluster.Cluster(config=cluster_config)
    c.reset(sg_config_path=sg_config)

    log_info("Running 'test_stale_revision_should_not_be_in_the_index'")
    log_info("ls_url: {}".format(ls_url))
    log_info("sg_admin_url: {}".format(sg_admin_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("num_docs: {}".format(num_docs))
    log_info("num_revs: {}".format(num_revs))

    client = MobileRestClient()

    sg_user_channels = ["NBC"]
    client.create_user(url=sg_admin_url, db=sg_db, name=sg_user_name, password="******", channels=sg_user_channels)
    sg_session = client.create_session(url=sg_admin_url, db=sg_db, name=sg_user_name)

    view = """{
    "language" : "javascript",
    "views" : {
        "content_view" : {
            "map" : "function(doc, meta) { if (doc.content) { emit(doc._id, doc._rev); } }"
        },
        "update_view" : {
            "map" : "function(doc, meta) { emit(doc.updates, null); }"
        }
    }
}"""

    ls_db = client.create_database(url=ls_url, name="ls_db")

    # Setup continuous push / pull replication from ls_db1 to sg_db
    client.start_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_admin_url, to_db=sg_db
    )

    client.start_replication(
        url=ls_url,
        continuous=True,
        from_url=sg_admin_url, from_db=sg_db,
        to_db=ls_db
    )

    design_doc_id = client.add_design_doc(url=ls_url, db=ls_db, name=d_doc_name, doc=view)
    client.get_doc(url=ls_url, db=ls_db, doc_id=design_doc_id)

    doc_body = document.create_doc(doc_id="doc_1", content={"hi": "I should be in the view"}, channels=sg_user_channels)

    log_info(doc_body)

    doc_body_2 = document.create_doc(doc_id="doc_2", channels=sg_user_channels)

    doc = client.add_doc(url=ls_url, db=ls_db, doc=doc_body)
    doc_2 = client.add_doc(url=ls_url, db=ls_db, doc=doc_body_2)

    content_view_rows = client.get_view(url=ls_url, db=ls_db, design_doc_name=d_doc_name, view_name="content_view")
    client.verify_view_row_num(view_response=content_view_rows, expected_num_rows=1)

    update_view_rows = client.get_view(url=ls_url, db=ls_db, design_doc_name=d_doc_name, view_name="update_view")
    client.verify_view_row_num(view_response=update_view_rows, expected_num_rows=2)

    expected_docs_list = [doc, doc_2]
    client.verify_docs_present(url=sg_url, db=sg_db, expected_docs=expected_docs_list, auth=sg_session)

    updated_doc = client.update_doc(url=sg_url, db=sg_db, doc_id=doc["id"], number_updates=10, auth=sg_session)

    client.verify_docs_present(url=ls_url, db=ls_db, expected_docs=updated_doc)

    content_view_rows_2 = client.get_view(url=ls_url, db=ls_db, design_doc_name=d_doc_name, view_name="content_view")
    client.verify_view_row_num(view_response=content_view_rows_2, expected_num_rows=1)

    client.verify_view_contains_keys(view_response=content_view_rows_2, keys=doc["id"])
    client.verify_view_contains_values(view_response=content_view_rows_2, values=updated_doc["rev"])
Пример #6
0
def test_stale_revision_should_not_be_in_the_index(setup_client_syncgateway_test):
    """original ticket: https://github.com/couchbase/couchbase-lite-android/issues/855

    scenario:
    1. Running sync_gateway
    2. Create database and starts both push and pull replicators through client REST API
    3. Create two or more views through client REST API
    4. Add doc, and verify doc is index with current revision through client REST API
    5. Make sure document is pushed to sync gateway through sync gateway REST API
    6. Update doc with sync gateway (not client side) through sync gateway REST API
    7. Make sure updated document is pull replicated to client  through client REST API
    8. Make sure updated document is indexed through client REST API
    9. Make sure stale revision is deleted from index.  through client REST API
    10. Pass criteria
    """

    cluster_config = setup_client_syncgateway_test["cluster_config"]
    ls_url = setup_client_syncgateway_test["ls_url"]
    sg_url = setup_client_syncgateway_test["sg_url"]
    sg_admin_url = setup_client_syncgateway_test["sg_admin_url"]

    num_docs = 10
    num_revs = 100

    d_doc_name = "dd"
    sg_db = "db"
    sg_user_name = "sg_user"

    sg_helper = SyncGateway()
    sg_helper.start_sync_gateway(
        cluster_config=cluster_config,
        url=sg_url,
        config="{}/walrus.json".format(SYNC_GATEWAY_CONFIGS)
    )

    log_info("Running 'test_stale_revision_should_not_be_in_the_index'")
    log_info("ls_url: {}".format(ls_url))
    log_info("sg_admin_url: {}".format(sg_admin_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("num_docs: {}".format(num_docs))
    log_info("num_revs: {}".format(num_revs))

    client = MobileRestClient()

    sg_user_channels = ["NBC"]
    client.create_user(url=sg_admin_url, db=sg_db, name=sg_user_name, password="******", channels=sg_user_channels)
    sg_session = client.create_session(url=sg_admin_url, db=sg_db, name=sg_user_name)

    view = """{
    "language" : "javascript",
    "views" : {
        "content_view" : {
            "map" : "function(doc, meta) { if (doc.content) { emit(doc._id, doc._rev); } }"
        },
        "update_view" : {
            "map" : "function(doc, meta) { emit(doc.updates, null); }"
        }
    }
}"""

    ls_db = client.create_database(url=ls_url, name="ls_db")

    # Setup continuous push / pull replication from ls_db1 to sg_db
    client.start_replication(
        url=ls_url,
        continuous=True,
        from_db=ls_db,
        to_url=sg_admin_url, to_db=sg_db
    )

    client.start_replication(
        url=ls_url,
        continuous=True,
        from_url=sg_admin_url, from_db=sg_db,
        to_db=ls_db
    )

    design_doc_id = client.add_design_doc(url=ls_url, db=ls_db, name=d_doc_name, doc=view)
    client.get_doc(url=ls_url, db=ls_db, doc_id=design_doc_id)

    doc_body = document.create_doc(doc_id="doc_1", content={"hi": "I should be in the view"}, channels=sg_user_channels)

    log_info(doc_body)

    doc_body_2 = document.create_doc(doc_id="doc_2", channels=sg_user_channels)

    doc = client.add_doc(url=ls_url, db=ls_db, doc=doc_body)
    doc_2 = client.add_doc(url=ls_url, db=ls_db, doc=doc_body_2)

    content_view_rows = client.get_view(url=ls_url, db=ls_db, design_doc_id=design_doc_id, view_name="content_view")
    client.verify_view_row_num(view_response=content_view_rows, expected_num_rows=1)

    update_view_rows = client.get_view(url=ls_url, db=ls_db, design_doc_id=design_doc_id, view_name="update_view")
    client.verify_view_row_num(view_response=update_view_rows, expected_num_rows=2)

    expected_docs_list = [doc, doc_2]
    client.verify_docs_present(url=sg_url, db=sg_db, expected_docs=expected_docs_list, auth=sg_session)

    updated_doc = client.update_doc(url=sg_url, db=sg_db, doc_id=doc["id"], number_updates=10, auth=sg_session)

    client.verify_docs_present(url=ls_url, db=ls_db, expected_docs=updated_doc)

    content_view_rows_2 = client.get_view(url=ls_url, db=ls_db, design_doc_id=design_doc_id, view_name="content_view")
    client.verify_view_row_num(view_response=content_view_rows_2, expected_num_rows=1)

    client.verify_view_contains_keys(view_response=content_view_rows_2, keys=doc["id"])
    client.verify_view_contains_values(view_response=content_view_rows_2, values=updated_doc["rev"])