Ejemplo n.º 1
0
def upgrade_sg_accel(sg_accels, sync_gateway_version,
                     sync_gateway_upgraded_version, sg_conf, cluster_config):
    log_info('------------------------------------------')
    log_info('START SG Accel cluster upgrade')
    log_info('------------------------------------------')

    ac_obj = SyncGateway()

    for ac in sg_accels:
        ac_ip = host_for_url(ac)
        log_info("Checking for sg_accel version before upgrade: {}".format(
            sync_gateway_version))
        verify_sg_accel_version(ac_ip, sync_gateway_version)
        log_info("Upgrading sg_accel: {}".format(ac_ip))
        ac_obj.upgrade_sync_gateways(
            cluster_config=cluster_config,
            sg_conf=sg_conf,
            sync_gateway_version=sync_gateway_upgraded_version,
            url=ac_ip)
        time.sleep(10)

        log_info("Checking for sg accel product info after upgrade")
        verify_sg_accel_product_info(ac_ip)
        log_info("Checking for sg accel version after upgrade: {}".format(
            sync_gateway_upgraded_version))
        verify_sg_accel_version(ac_ip, sync_gateway_upgraded_version)

    log_info("Upgraded all the sg accel nodes in the cluster")
    log_info('------------------------------------------')
    log_info('END SG Accel cluster upgrade')
    log_info('------------------------------------------')
Ejemplo n.º 2
0
def upgrade_sync_gateway(sync_gateways, sync_gateway_version,
                         sync_gateway_upgraded_version, sg_conf,
                         cluster_config):
    log_info('------------------------------------------')
    log_info('START Sync Gateway cluster upgrade')
    log_info('------------------------------------------')

    sg_obj = SyncGateway()

    for sg in sync_gateways:
        sg_ip = host_for_url(sg["admin"])
        log_info("Checking for sync gateway product info before upgrade")
        verify_sync_gateway_product_info(sg_ip)
        log_info("Checking for sync gateway version: {}".format(
            sync_gateway_version))
        verify_sync_gateway_version(sg_ip, sync_gateway_version)
        log_info("Upgrading sync gateway: {}".format(sg_ip))
        sg_obj.upgrade_sync_gateways(
            cluster_config=cluster_config,
            sg_conf=sg_conf,
            sync_gateway_version=sync_gateway_upgraded_version,
            url=sg_ip)

        time.sleep(10)
        log_info("Checking for sync gateway product info after upgrade")
        verify_sync_gateway_product_info(sg_ip)
        log_info("Checking for sync gateway version after upgrade: {}".format(
            sync_gateway_upgraded_version))
        verify_sync_gateway_version(sg_ip, sync_gateway_upgraded_version)

    log_info("Upgraded all the sync gateway nodes in the cluster")
    log_info('------------------------------------------')
    log_info('END Sync Gateway cluster upgrade')
    log_info('------------------------------------------')
Ejemplo n.º 3
0
def test_setting_expiry_in_bulk_docs(params_from_base_test_setup,
                                     sg_conf_name):
    """
    1. PUT /db/_bulk_docs with 10 documents.  Set the "_exp":3 on 5 of these documents
    2. Wait five seconds
    3. POST /db/_bulk_get for the 10 documents.  Validate that only the 5 non-expiring documents are returned
    """

    cluster_config = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster_helper = ClusterKeywords()
    topology = cluster_helper.get_cluster_topology(cluster_config)

    cluster_helper.reset_cluster(cluster_config=cluster_config,
                                 sync_gateway_config=sg_conf)

    cbs_url = topology["couchbase_servers"][0]
    sg_url = topology["sync_gateways"][0]["public"]
    sg_url_admin = topology["sync_gateways"][0]["admin"]

    log_info("Running 'test_setting_expiry_in_bulk_docs'")
    log_info("cbs_url: {}".format(cbs_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("sg_url_admin: {}".format(sg_url_admin))

    sg_db = "db"
    sg_user_name = "sg_user"
    sg_user_password = "******"
    sg_user_channels = ["NBC", "ABC"]
    bucket_name = "data-bucket"
    cbs_ip = host_for_url(cbs_url)

    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')

    client = MobileRestClient()

    client.create_user(url=sg_url_admin,
                       db=sg_db,
                       name=sg_user_name,
                       password=sg_user_password,
                       channels=sg_user_channels)
    sg_user_session = client.create_session(url=sg_url_admin,
                                            db=sg_db,
                                            name=sg_user_name)

    doc_exp_3_bodies = document.create_docs(doc_id_prefix="exp_3",
                                            number=5,
                                            expiry=3,
                                            channels=sg_user_channels)
    doc_exp_10_bodies = document.create_docs(doc_id_prefix="exp_10",
                                             number=5,
                                             expiry=10,
                                             channels=sg_user_channels)

    bulk_bodies = doc_exp_3_bodies + doc_exp_10_bodies

    bulk_docs = client.add_bulk_docs(url=sg_url,
                                     db=sg_db,
                                     docs=bulk_bodies,
                                     auth=sg_user_session)

    # Allow exp_3 docs to expire
    time.sleep(5)

    bulk_docs_ids = [doc["id"] for doc in bulk_docs]

    expected_ids = ["exp_10_0", "exp_10_1", "exp_10_2", "exp_10_3", "exp_10_4"]
    expected_missing_ids = [
        "exp_3_0", "exp_3_1", "exp_3_2", "exp_3_3", "exp_3_4"
    ]

    bulk_get_docs, errors = client.get_bulk_docs(url=sg_url,
                                                 db=sg_db,
                                                 doc_ids=bulk_docs_ids,
                                                 auth=sg_user_session,
                                                 validate=False)
    assert len(bulk_get_docs) == len(expected_ids)
    assert len(errors) == len(expected_missing_ids)

    bulk_get_doc_ids = [doc["_id"] for doc in bulk_get_docs]
    error_ids = [doc["id"] for doc in errors]

    assert bulk_get_doc_ids == expected_ids
    assert error_ids == expected_missing_ids

    client.verify_doc_ids_found_in_response(response=bulk_get_docs,
                                            expected_doc_ids=expected_ids)
    client.verify_doc_ids_not_found_in_response(
        response=errors, expected_missing_doc_ids=expected_missing_ids)

    for expired_doc in error_ids:
        verify_doc_deletion_on_server(doc_id=expired_doc,
                                      sdk_client=sdk_client,
                                      sg_client=client,
                                      sg_admin_url=sg_url_admin,
                                      sg_db=sg_db,
                                      xattrs_enabled=xattrs_enabled)
Ejemplo n.º 4
0
def test_rolling_ttl_remove_expirary(params_from_base_test_setup,
                                     sg_conf_name):
    """
    1. PUT /db/doc1 via SG with property "_exp":3
    2. Once per second for 10 seconds, update /db/doc1 with a new revision (also with "_exp":3)
    3. Update /db/doc1 with a revision with no expiry
    3. Get /db/doc1.  Assert response is 200
    """

    cluster_config = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster_helper = ClusterKeywords()
    topology = cluster_helper.get_cluster_topology(cluster_config)

    cluster_helper.reset_cluster(cluster_config=cluster_config,
                                 sync_gateway_config=sg_conf)

    cbs_url = topology["couchbase_servers"][0]
    sg_url = topology["sync_gateways"][0]["public"]
    sg_url_admin = topology["sync_gateways"][0]["admin"]

    log_info("Running 'test_rolling_ttl_remove_expirary'")
    log_info("cbs_url: {}".format(cbs_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("sg_url_admin: {}".format(sg_url_admin))

    sg_db = "db"
    sg_user_name = "sg_user"
    sg_user_password = "******"
    sg_user_channels = ["NBC", "ABC"]
    bucket_name = "data-bucket"
    cbs_ip = host_for_url(cbs_url)

    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')

    client = MobileRestClient()

    client.create_user(url=sg_url_admin,
                       db=sg_db,
                       name=sg_user_name,
                       password=sg_user_password,
                       channels=sg_user_channels)
    sg_user_session = client.create_session(url=sg_url_admin,
                                            db=sg_db,
                                            name=sg_user_name)

    doc_exp_3_body = document.create_doc(doc_id="exp_3",
                                         expiry=3,
                                         channels=sg_user_channels)
    doc_exp_10_body = document.create_doc(doc_id="exp_10",
                                          expiry=10,
                                          channels=sg_user_channels)

    doc_exp_3 = client.add_doc(url=sg_url,
                               db=sg_db,
                               doc=doc_exp_3_body,
                               auth=sg_user_session)
    doc_exp_10 = client.add_doc(url=sg_url,
                                db=sg_db,
                                doc=doc_exp_10_body,
                                auth=sg_user_session)

    client.update_doc(url=sg_url,
                      db=sg_db,
                      doc_id=doc_exp_3["id"],
                      number_updates=10,
                      expiry=3,
                      delay=1,
                      auth=sg_user_session)
    client.update_doc(url=sg_url,
                      db=sg_db,
                      doc_id=doc_exp_3["id"],
                      number_updates=1,
                      auth=sg_user_session)

    # If expiry was not removed in the last update, this would expire doc_exp_3
    time.sleep(5)

    # doc_exp_3 should still be around due to removal of expiry
    doc_exp_3 = client.get_doc(url=sg_url,
                               db=sg_db,
                               doc_id=doc_exp_3["id"],
                               auth=sg_user_session)
    assert doc_exp_3["_id"] == "exp_3"

    # doc_exp_10 should be expired due to the updates (10s) + sleep (5s)
    with pytest.raises(HTTPError) as he:
        client.get_doc(url=sg_url,
                       db=sg_db,
                       doc_id=doc_exp_10["id"],
                       auth=sg_user_session)

    # In XATTR mode, the expiry results in a tombstone
    # In Doc Meta mode, the expiry results in a purge
    if xattrs_enabled:
        assert he.value[0].startswith("403 Client Error: Forbidden for url:")
    else:
        assert he.value[0].startswith("404 Client Error: Not Found for url:")

    verify_doc_deletion_on_server(doc_id=doc_exp_10["id"],
                                  sdk_client=sdk_client,
                                  sg_client=client,
                                  sg_admin_url=sg_url_admin,
                                  sg_db=sg_db,
                                  xattrs_enabled=xattrs_enabled)
Ejemplo n.º 5
0
def test_numeric_expiry_as_ttl(params_from_base_test_setup, sg_conf_name):
    """
    1. PUT /db/doc1 via SG with property "_exp":3
       PUT /db/doc2 via SG with property "_exp":10
    2. Wait five seconds
    3. Get /db/doc1.  Assert response is 404
       Get /db/doc2.  Assert response is 200
    """

    cluster_config = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster_helper = ClusterKeywords()
    topology = cluster_helper.get_cluster_topology(cluster_config)

    cluster_helper.reset_cluster(cluster_config=cluster_config,
                                 sync_gateway_config=sg_conf)

    cbs_url = topology["couchbase_servers"][0]
    sg_url = topology["sync_gateways"][0]["public"]
    sg_url_admin = topology["sync_gateways"][0]["admin"]

    log_info("Running 'test_numeric_expiry_as_ttl'")
    log_info("cbs_url: {}".format(cbs_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("sg_url_admin: {}".format(sg_url_admin))

    sg_db = "db"
    sg_user_name = "sg_user"
    sg_user_password = "******"
    sg_user_channels = ["NBC", "ABC"]
    bucket_name = "data-bucket"
    cbs_ip = host_for_url(cbs_url)

    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')
    client = MobileRestClient()

    client.create_user(url=sg_url_admin,
                       db=sg_db,
                       name=sg_user_name,
                       password=sg_user_password,
                       channels=sg_user_channels)
    sg_user_session = client.create_session(url=sg_url_admin,
                                            db=sg_db,
                                            name=sg_user_name)

    doc_exp_3_body = document.create_doc(doc_id="exp_3",
                                         expiry=3,
                                         channels=sg_user_channels)
    doc_exp_10_body = document.create_doc(doc_id="exp_10",
                                          expiry=10,
                                          channels=sg_user_channels)

    doc_exp_3 = client.add_doc(url=sg_url,
                               db=sg_db,
                               doc=doc_exp_3_body,
                               auth=sg_user_session)
    doc_exp_10 = client.add_doc(url=sg_url,
                                db=sg_db,
                                doc=doc_exp_10_body,
                                auth=sg_user_session)

    # Sleep should allow doc_exp_3 to expire, but still be in the window to get doc_exp_10
    time.sleep(5)

    # doc_exp_3 should be expired
    with pytest.raises(HTTPError) as he:
        client.get_doc(url=sg_url,
                       db=sg_db,
                       doc_id=doc_exp_3["id"],
                       auth=sg_user_session)

    # In XATTR mode, the expiry results in a tombstone
    # In Doc Meta mode, the expiry results in a purge
    if xattrs_enabled:
        assert he.value[0].startswith("403 Client Error: Forbidden for url:")
    else:
        assert he.value[0].startswith("404 Client Error: Not Found for url:")

    verify_doc_deletion_on_server(doc_id=doc_exp_3["id"],
                                  sdk_client=sdk_client,
                                  sg_client=client,
                                  sg_admin_url=sg_url_admin,
                                  sg_db=sg_db,
                                  xattrs_enabled=xattrs_enabled)

    # doc_exp_10 should be available still
    doc_exp_10_result = client.get_doc(url=sg_url,
                                       db=sg_db,
                                       doc_id=doc_exp_10["id"],
                                       auth=sg_user_session)
    assert doc_exp_10_result["_id"] == "exp_10"
Ejemplo n.º 6
0
def test_string_expiry_as_iso_8601_date(params_from_base_test_setup,
                                        sg_conf_name):
    """
    1. Calculate (server time + 3 seconds) as ISO-8601 date (e.g. 2016-01-01T00:00:00.000+00:00)
    2. PUT /db/doc1 via SG with property "_exp":"[date]"
       PUT /db/doc2 via SG with property "_exp":"2026-01-01T00:00:00.000+00:00"
    3. Wait five seconds
    4. Get /db/doc1.  Assert response is 404
       Get /db/doc2.  Assert response is 20
    """

    cluster_config = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster_helper = ClusterKeywords()
    topology = cluster_helper.get_cluster_topology(cluster_config)

    cluster_helper.reset_cluster(cluster_config=cluster_config,
                                 sync_gateway_config=sg_conf)

    cbs_url = topology["couchbase_servers"][0]
    sg_url = topology["sync_gateways"][0]["public"]
    sg_url_admin = topology["sync_gateways"][0]["admin"]

    log_info("Running 'test_string_expiry_as_ISO_8601_Date'")
    log_info("cbs_url: {}".format(cbs_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("sg_url_admin: {}".format(sg_url_admin))

    sg_db = "db"
    sg_user_name = "sg_user"
    sg_user_password = "******"
    sg_user_channels = ["NBC", "ABC"]
    bucket_name = "data-bucket"
    cbs_ip = host_for_url(cbs_url)

    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')

    client = MobileRestClient()

    client.create_user(url=sg_url_admin,
                       db=sg_db,
                       name=sg_user_name,
                       password=sg_user_password,
                       channels=sg_user_channels)
    sg_user_session = client.create_session(url=sg_url_admin,
                                            db=sg_db,
                                            name=sg_user_name)

    time_util = Time()
    iso_datetime = time_util.get_iso_datetime(delta=3)

    doc_exp_3_body = document.create_doc(doc_id="exp_3",
                                         expiry=iso_datetime,
                                         channels=sg_user_channels)
    doc_exp_years_body = document.create_doc(
        doc_id="exp_years",
        expiry="2026-01-01T00:00:00.000+00:00",
        channels=sg_user_channels)

    doc_exp_3 = client.add_doc(url=sg_url,
                               db=sg_db,
                               doc=doc_exp_3_body,
                               auth=sg_user_session)
    doc_exp_years = client.add_doc(url=sg_url,
                                   db=sg_db,
                                   doc=doc_exp_years_body,
                                   auth=sg_user_session)

    # Sleep should allow doc_exp_3 to expire
    time.sleep(10)

    # doc_exp_3 should be expired
    with pytest.raises(HTTPError) as he:
        client.get_doc(url=sg_url,
                       db=sg_db,
                       doc_id=doc_exp_3["id"],
                       auth=sg_user_session)

    # In XATTR mode, the expiry results in a tombstone
    # In Doc Meta mode, the expiry results in a purge
    if xattrs_enabled:
        assert he.value[0].startswith("403 Client Error: Forbidden for url:")
    else:
        assert he.value[0].startswith("404 Client Error: Not Found for url:")

    verify_doc_deletion_on_server(doc_id=doc_exp_3["id"],
                                  sdk_client=sdk_client,
                                  sg_client=client,
                                  sg_admin_url=sg_url_admin,
                                  sg_db=sg_db,
                                  xattrs_enabled=xattrs_enabled)

    # doc_exp_years should be available still
    doc_exp_years_result = client.get_doc(url=sg_url,
                                          db=sg_db,
                                          doc_id=doc_exp_years["id"],
                                          auth=sg_user_session)
    assert doc_exp_years_result["_id"] == "exp_years"
Ejemplo n.º 7
0
def test_string_expiry_as_unix_date(params_from_base_test_setup, sg_conf_name):
    """
    1. Calculate (server time + 3 seconds) as unix time (i.e. Epoch time, e.g. 1466465122)
    2. PUT /db/doc1 via SG with property "_exp":"[unix time]"
       PUT /db/doc2 via SG with property "_exp":"1767225600"  (Jan 1 2026) Note: the maximum epoch time supported by CBS is maxUint32, or Sun 07 Feb 2106, in case you want to move it out further than 2026.
    3. Wait five seconds
    4. Get /db/doc1.  Assert response is 404
       Get /db/doc2.  Assert response is 200
    """

    cluster_config = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster_helper = ClusterKeywords()
    topology = cluster_helper.get_cluster_topology(cluster_config)

    cluster_helper.reset_cluster(cluster_config=cluster_config,
                                 sync_gateway_config=sg_conf)

    cbs_url = topology["couchbase_servers"][0]
    sg_url = topology["sync_gateways"][0]["public"]
    sg_url_admin = topology["sync_gateways"][0]["admin"]

    log_info("Running 'test_string_expiry_as_unix_date'")
    log_info("cbs_url: {}".format(cbs_url))
    log_info("sg_url: {}".format(sg_url))
    log_info("sg_url_admin: {}".format(sg_url_admin))

    sg_db = "db"
    sg_user_name = "sg_user"
    sg_user_password = "******"
    sg_user_channels = ["NBC", "ABC"]
    bucket_name = "data-bucket"
    cbs_ip = host_for_url(cbs_url)

    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')

    client = MobileRestClient()

    client.create_user(url=sg_url_admin,
                       db=sg_db,
                       name=sg_user_name,
                       password=sg_user_password,
                       channels=sg_user_channels)
    sg_user_session = client.create_session(url=sg_url_admin,
                                            db=sg_db,
                                            name=sg_user_name)

    time_util = Time()
    unix_time_3s_ahead = time_util.get_unix_timestamp(delta=3)

    # Convert unix timestamp to string
    unix_time_3s_ahead_string = str(unix_time_3s_ahead)

    # Using string representation for unix time
    doc_exp_3_body = document.create_doc(doc_id="exp_3",
                                         expiry=unix_time_3s_ahead_string,
                                         channels=sg_user_channels)
    doc_exp_years_body = document.create_doc(doc_id="exp_years",
                                             expiry="1767225600",
                                             channels=sg_user_channels)

    doc_exp_3 = client.add_doc(url=sg_url,
                               db=sg_db,
                               doc=doc_exp_3_body,
                               auth=sg_user_session)
    doc_exp_years = client.add_doc(url=sg_url,
                                   db=sg_db,
                                   doc=doc_exp_years_body,
                                   auth=sg_user_session)

    # Sleep should allow doc_exp_3 to expire
    time.sleep(10)

    # doc_exp_3 should be expired
    with pytest.raises(HTTPError) as he:
        client.get_doc(url=sg_url,
                       db=sg_db,
                       doc_id=doc_exp_3["id"],
                       auth=sg_user_session)

    # In XATTR mode, the expiry results in a tombstone
    # In Doc Meta mode, the expiry results in a purge
    if xattrs_enabled:
        assert he.value[0].startswith("403 Client Error: Forbidden for url:")
    else:
        assert he.value[0].startswith("404 Client Error: Not Found for url:")

    verify_doc_deletion_on_server(doc_id=doc_exp_3["id"],
                                  sdk_client=sdk_client,
                                  sg_client=client,
                                  sg_admin_url=sg_url_admin,
                                  sg_db=sg_db,
                                  xattrs_enabled=xattrs_enabled)

    # doc_exp_years should be available still
    doc_exp_years_result = client.get_doc(url=sg_url,
                                          db=sg_db,
                                          doc_id=doc_exp_years["id"],
                                          auth=sg_user_session)
    assert doc_exp_years_result["_id"] == "exp_years"
def test_mobile_opt_in(params_from_base_test_setup, sg_conf_name):
    """
    Scenario: Enable mobile opt in sync function in sync-gateway configuration file
    - Check xattrs/mobile-opt-in_cc or di json files
    - 8 cases covered
    - doc : https://docs.google.com/document/d/1XxLIBsjuj_UxTTJs4Iu7C7uZdos8ZEzeckrVc17y3sw/edit
    - #1 Create doc via sdk with mobile opt in and verify doc is imported
    - #2 Create doc via sdk with mobile opt out and verify doc is not imported
    - #3 Create doc via sg with mobile opt in and update via sdk and verify doc is imported
    - #4 Create doc via sg with mobile opt out and update via sdk and verify doc is not imported
         - Try to update same doc via sg and verify 409 conflict error is thrown
         - Create a doc with same doc id and verify doc is created successfully
    - #5 Create doc via sg with mobile opt out and update via sdk which created no revisions
         - Now do sdk create with mobile opt in should import case #5
    - #6 Create doc via sg with mobile opt out  and update via sdk with opt in
         - Verify type is overrided and doc is imported
    - #7 Create doc via sg with mobile opt in  and update via sdk with opt out
         - Verify type is overrided and doc is not imported
    - #8 Disable import in the sg config and have mobile opt in function
         Create doc via sdk with mobile property and verify sg update succeeds
    - #9 Same config as #8 and have mobile opt in function in config
         Create doc via sdk without mobile property and create new doc via sg with same doc id and
         verify it succeeds
    """

    bucket_name = 'data-bucket'
    sg_db = 'db'

    cluster_conf = params_from_base_test_setup['cluster_config']
    cluster_topology = params_from_base_test_setup['cluster_topology']
    mode = params_from_base_test_setup['mode']
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    # This test should only run when using xattr meta storage
    if not xattrs_enabled:
        pytest.skip('XATTR tests require --xattrs flag')

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)
    sg_admin_url = cluster_topology['sync_gateways'][0]['admin']
    sg_url = cluster_topology['sync_gateways'][0]['public']
    cbs_url = cluster_topology['couchbase_servers'][0]

    log_info('sg_conf: {}'.format(sg_conf))
    log_info('sg_admin_url: {}'.format(sg_admin_url))
    log_info('sg_url: {}'.format(sg_url))
    log_info('cbs_url: {}'.format(cbs_url))

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_config_path=sg_conf)

    # Create clients
    sg_client = MobileRestClient()
    cbs_ip = host_for_url(cbs_url)
    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name), password='******', timeout=SDK_TIMEOUT)

    # Create user / session
    auto_user_info = UserInfo(name='autotest', password='******', channels=['mobileOptIn'], roles=[])
    sg_client.create_user(
        url=sg_admin_url,
        db=sg_db,
        name=auto_user_info.name,
        password=auto_user_info.password,
        channels=auto_user_info.channels
    )

    test_auth_session = sg_client.create_session(
        url=sg_admin_url,
        db=sg_db,
        name=auto_user_info.name,
        password=auto_user_info.password
    )

    def update_mobile_prop():
        return {
            'updates': 0,
            'type': 'mobile',
        }

    def update_non_mobile_prop():
        return {
            'updates': 0,
            'test': 'true',
            'type': 'mobile opt out',
        }

    # Create first doc via SDK with type mobile. Case #1
    doc_id1 = 'mobile_opt_in_sdk_doc'
    doc = document.create_doc(doc_id=doc_id1, channels=['mobileOptIn'], prop_generator=update_mobile_prop)
    sdk_client.upsert(doc_id1, doc)
    sg_get_doc1 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id1, auth=test_auth_session)
    assert sg_get_doc1['_rev'].startswith('1-') and sg_get_doc1['_id'] == doc_id1
    # Additional coverage for case #1
    sg_client.update_doc(url=sg_url, db=sg_db, doc_id=doc_id1, number_updates=1, auth=test_auth_session)
    sg_get_doc1 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id1, auth=test_auth_session)
    assert sg_get_doc1['_rev'].startswith('2-') and sg_get_doc1['_id'] == doc_id1

    # Create second doc via SDK with type non mobile. Case #2
    doc_id2 = 'mobile_opt_out_sdk_doc'
    doc = document.create_doc(doc_id=doc_id2, channels=['mobileOptIn'], prop_generator=update_non_mobile_prop)
    sdk_client.upsert(doc_id2, doc)
    with pytest.raises(HTTPError) as he:
        sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id2, auth=test_auth_session)
    log_info(he.value)
    assert he.value.message.startswith('404 Client Error: Not Found for url:')

    # Create third sg doc with mobile opt in  and update via sdk. Case #3
    doc_id3 = 'mobile_opt_in_sg_doc'
    doc_body = document.create_doc(doc_id=doc_id3, channels=['mobileOptIn'], prop_generator=update_mobile_prop)
    doc = sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    sg_get_doc3 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id3, auth=test_auth_session)
    sg_get_doc3["updated_sdk_via_sg"] = "1"
    sdk_client.upsert(doc_id3, sg_get_doc3)
    sg_get_doc3 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id3, auth=test_auth_session)
    assert sg_get_doc3['_rev'].startswith('2-') and sg_get_doc3['_id'] == doc_id3
    log_info("sg get doc3 is {}".format(sg_get_doc3))

    # Create fourth sg doc with mobile opt out and update via sdk. Case #4 and case #8
    doc_id4 = 'mobile_opt_out_sg_doc'
    doc_body = document.create_doc(doc_id=doc_id4, channels=['mobileOptIn'], prop_generator=update_non_mobile_prop)
    doc = sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    # update vis SDK
    sg_get_doc4 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id4, auth=test_auth_session)
    rev = sg_get_doc4['_rev']
    sg_get_doc4["updated_sdk_via_sg"] = "1"
    sdk_client.upsert(doc_id4, sg_get_doc4)
    with pytest.raises(HTTPError) as he:
        sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id4, auth=test_auth_session)
    log_info(he.value)
    assert he.value.message.startswith('404 Client Error: Not Found for url:')
    # update via SG
    with pytest.raises(HTTPError) as he:
        sg_client.put_doc(url=sg_url, db=sg_db, doc_id=doc_id4, doc_body={'sg_rewrite': 'True'}, rev=rev, auth=test_auth_session)
    log_info(he.value)
    assert he.value.message.startswith('409 Client Error: Conflict for url:')
    # Create same doc again to verify there is not existing key error covers case #8
    doc_body = document.create_doc(doc_id=doc_id4, channels=['mobileOptIn'], prop_generator=update_non_mobile_prop)
    sg_get_doc4_1 = sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    log_info("4th doc after recreate vis sg is {}".format(sg_get_doc4_1))
    assert sg_get_doc4_1['rev'].startswith('1-') and sg_get_doc4_1['id'] == doc_id4

    # Create Fifth sg doc with mobile opt in and delete doc which created no revisions i.e tombstone doc
    # Now do sdk create with mobile opt in should import case #5
    doc_id5 = 'mobile_sdk_recreate_no_activerev'
    doc_body = document.create_doc(doc_id=doc_id5, channels=['mobileOptIn'], prop_generator=update_mobile_prop)
    doc = sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    rev = doc['rev']
    sg_client.delete_doc(url=sg_url, db=sg_db, doc_id=doc_id5, rev=rev, auth=test_auth_session)
    # At this point no active revisions for this doc, so now update via sdk with mobile opt in should be successful
    # in getting doc
    doc = document.create_doc(doc_id=doc_id5, channels=['mobileOptIn'], prop_generator=update_mobile_prop)
    sdk_client.upsert(doc_id5, doc)
    sg_get_doc5 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id5, auth=test_auth_session)
    log_info("sg get doc 5 is {}".format(sg_get_doc5))
    assert sg_get_doc5['_rev'].startswith('1-') and sg_get_doc5['_id'] == doc_id5

    # Create sixth sg doc with mobile opt out  and update via sdk with opt in
    doc_id6 = 'mobileoptout_sg_doc_sdkupdate_optin'
    doc_body = document.create_doc(doc_id=doc_id6, channels=['mobileOptIn'], prop_generator=update_non_mobile_prop)
    doc = sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    sg_get_doc6 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id6, auth=test_auth_session)
    log_info("Sg sixth doc is {}".format(sg_get_doc6))
    sg_get_doc6["type"] = "mobile"
    sdk_client.upsert(doc_id6, sg_get_doc6)
    sg_get_doc6 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id6, auth=test_auth_session)
    assert sg_get_doc6['_rev'].startswith('2-') and sg_get_doc6['_id'] == doc_id6

    # Create seventh sg doc with mobile opt in  and update via sdk with opt out
    doc_id7 = 'mobileoptin_sg_doc_sdkupdate_optout'
    doc_body = document.create_doc(doc_id=doc_id7, channels=['mobileOptIn'], prop_generator=update_mobile_prop)
    doc = sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    sg_get_doc7 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id7, auth=test_auth_session)
    log_info("Sg sixth doc is {}".format(sg_get_doc7))
    sg_get_doc7["type"] = "mobile opt out"
    sdk_client.upsert(doc_id7, sg_get_doc7)
    with pytest.raises(HTTPError) as he:
        sg_get_doc7 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id7, auth=test_auth_session)
    log_info(he.value)
    assert he.value.message.startswith('404 Client Error: Not Found for url:')
    # TODO : verify _changes that it shows tombstone revisions -> it will happen on 2.0

    # Create eighth sdk doc with import disabled and add mobile property and update via sg. Case #7
    sg_conf_name = "xattrs/mobile_opt_in_no_import"
    sg_no_import_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)
    sg_util = SyncGateway()
    sg_util.start_sync_gateways(cluster_config=cluster_conf, url=sg_url, config=sg_no_import_conf)

    doc_id8 = 'mobile_opt_in_sg_rewrite_with_importdisabled'
    doc_body = document.create_doc(doc_id=doc_id8, channels=['mobileOptIn'], prop_generator=update_mobile_prop)
    sdk_client.upsert(doc_id8, doc_body)
    with pytest.raises(HTTPError) as he:
        sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    log_info(he.value)
    assert he.value.message.startswith('409 Client Error: Conflict for url:')
    sg_client.update_doc(url=sg_url, db=sg_db, doc_id=doc_id8, number_updates=1, auth=test_auth_session)
    sg_get_doc8 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id8, auth=test_auth_session)
    assert sg_get_doc8['_rev'].startswith('2-') and sg_get_doc8['_id'] == doc_id8

    # Create ninth sdk doc with import disabled and add mobile property and update via sg. Case #8
    doc_id9 = 'mobile_opt_out_sg_rewrite_with_importdisabled'
    doc_body = document.create_doc(doc_id=doc_id9, channels=['mobileOptIn'], prop_generator=update_non_mobile_prop)
    sdk_client.upsert(doc_id9, doc_body)
    sg_client.add_doc(url=sg_url, db=sg_db, doc=doc_body, auth=test_auth_session)
    # sg_client.update_doc(url=sg_url, db=sg_db, doc_id=doc_id8, number_updates=1, auth=test_auth_session)
    sg_get_doc9 = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id9, auth=test_auth_session)
    assert sg_get_doc9['_rev'].startswith('1-') and sg_get_doc9['_id'] == doc_id9
Ejemplo n.º 9
0
def test_webhooks_crud(params_from_base_test_setup, sg_conf_name, filtered):
    """ Tests for webhook notification on import

    xattr mode
    1. Start sync gateway with autoimport

    1. Write 'num_docs_per_client' docs via SDK
    1. Write 'num_docs_per_client' docs via SG
    1. Verify 'num_docs_per_client' * 2 webhook events (id, rev, body)

    1. Update SG docs once each via SDK
    1. Update SDK docs once each via SG
    1. Verify 'num_docs_per_client' * 2 webhook events (id, rev, body)

    1. Delete SG docs via SDK
    1. Delete SDK docs via SG
    1. Verify 'num_docs_per_client' * 2 webhook events (id, rev, body)

    to verify no dups, wait 10s after recieveing expected webhooks

    docmeta mode
    1. Write 'num_docs_per_client' docs via SG
    1. Verify 'num_docs_per_client' webhook events (id, rev, body)

    1. Update SG docs once each via SG
    1. Verify 'num_docs_per_client' webhook events (id, rev, body)

    1. Delete SG docs via SG
    1. Verify 'num_docs_per_client' webhook events (id, rev, body)

    if filtered, the scenario will add a filtered propery to every other doc.
    The webhook validation will only look for the filtered docs

    """
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    log_info('Webhooks filtered?: {}'.format(filtered))

    cluster_conf = params_from_base_test_setup['cluster_config']
    cluster_topology = params_from_base_test_setup['cluster_topology']
    mode = params_from_base_test_setup['mode']
    sg_admin_url = cluster_topology['sync_gateways'][0]['admin']
    sg_url = cluster_topology['sync_gateways'][0]['public']
    cbs_url = cluster_topology['couchbase_servers'][0]

    sg_db = 'db'
    bucket_name = 'data-bucket'
    num_docs_per_client = 100

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_conf)

    # Start webhook server on test runner
    webhook_server = WebServer()
    webhook_server.start()

    sg_client = MobileRestClient()
    cbs_ip = host_for_url(cbs_url)
    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')

    sg_info = UserInfo('sg_user', 'pass', channels=['shared'], roles=[])
    sdk_info = UserInfo('sdk_user', 'pass', channels=['shared'], roles=[])
    sg_client.create_user(url=sg_admin_url,
                          db=sg_db,
                          name=sg_info.name,
                          password=sg_info.password,
                          channels=sg_info.channels)
    sg_auth = sg_client.create_session(url=sg_admin_url,
                                       db=sg_db,
                                       name=sg_info.name,
                                       password=sg_info.password)

    # Create sg docs
    doc_content = {'aphex': 'twin'}
    sg_docs = document.create_docs(doc_id_prefix='sg_user_doc',
                                   number=num_docs_per_client,
                                   content=doc_content,
                                   channels=sg_info.channels)

    # Add filtered property to every other doc
    count = 0
    for sg_doc in sg_docs:
        if count % 2 == 0:
            sg_doc['filtered'] = True
        count += 1

    sg_doc_ids = [doc['_id'] for doc in sg_docs]
    sg_filtered_doc_ids = [doc['_id'] for doc in sg_docs if 'filtered' in doc]
    assert len(sg_doc_ids) == num_docs_per_client
    assert len(sg_filtered_doc_ids) == num_docs_per_client / 2

    # Create sdk docs
    sdk_docs = {
        'sdk_user_doc_{}'.format(i): {
            'channels': sdk_info.channels,
            'content': doc_content
        }
        for i in range(num_docs_per_client)
    }

    # Add filtered property to every other doc
    count = 0
    for _, doc_val in sdk_docs.items():
        if count % 2 == 0:
            doc_val['filtered'] = True
        count += 1

    sdk_doc_ids = [doc for doc in sdk_docs]
    sdk_filtered_doc_ids = [k for k, v in sdk_docs.items() if 'filtered' in v]
    assert len(sdk_doc_ids) == num_docs_per_client
    assert len(sdk_filtered_doc_ids) == num_docs_per_client / 2

    all_docs = sg_doc_ids + sdk_doc_ids
    all_filtered_docs = sg_filtered_doc_ids + sdk_filtered_doc_ids
    assert len(all_docs) == num_docs_per_client * 2

    # If xattr mode, add sg + sdk docs
    # If non xattr mode, add sg docs
    add_docs(sg_client=sg_client,
             sg_url=sg_url,
             sg_db=sg_db,
             sg_docs=sg_docs,
             sg_auth=sg_auth,
             sdk_client=sdk_client,
             sdk_docs=sdk_docs,
             num_docs_per_client=num_docs_per_client,
             xattrs=xattrs_enabled)

    # Wait for added docs to trigger webhooks
    if xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, all_filtered_docs, 1,
                              doc_content)
    elif xattrs_enabled and not filtered:
        poll_for_webhook_data(webhook_server, all_docs, 1, doc_content)
    elif not xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, sg_filtered_doc_ids, 1,
                              doc_content)
    else:
        poll_for_webhook_data(webhook_server, sg_doc_ids, 1, doc_content)
    webhook_server.clear_data()

    # Update sdk docs from sg
    # If xattr mode, update sdk docs from sg, update sg docs from SDK
    # If non xattr mode, update sg docs from sg
    updated_doc_content = {'brian': 'eno'}
    update_docs(sg_client=sg_client,
                sg_url=sg_url,
                sg_db=sg_db,
                sg_doc_ids=sg_doc_ids,
                sg_auth=sg_auth,
                sdk_client=sdk_client,
                sdk_doc_ids=sdk_doc_ids,
                updated_doc_content=updated_doc_content,
                xattrs=xattrs_enabled)

    # Wait for updates to trigger webhooks
    if xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, all_filtered_docs, 2,
                              updated_doc_content)
    elif xattrs_enabled and not filtered:
        poll_for_webhook_data(webhook_server, all_docs, 2, updated_doc_content)
    elif not xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, sg_filtered_doc_ids, 2,
                              updated_doc_content)
    else:
        poll_for_webhook_data(webhook_server, sg_doc_ids, 2,
                              updated_doc_content)
    webhook_server.clear_data()

    delete_docs(sg_client=sg_client,
                sg_url=sg_url,
                sg_db=sg_db,
                sg_doc_ids=sg_doc_ids,
                sg_auth=sg_auth,
                sdk_client=sdk_client,
                sdk_doc_ids=sdk_doc_ids,
                xattrs=xattrs_enabled)

    # Wait for deletes to trigger webhook events, filter includes all deleted docs
    if xattrs_enabled:
        poll_for_webhook_data(webhook_server,
                              all_docs,
                              3,
                              updated_doc_content,
                              deleted=True)
    else:
        poll_for_webhook_data(webhook_server,
                              sg_doc_ids,
                              3,
                              updated_doc_content,
                              deleted=True)
    webhook_server.clear_data()

    # Stop webhook server
    webhook_server.stop()
Ejemplo n.º 10
0
def test_upgrade(params_from_base_test_setup):
    """
    @summary
        The initial versions of SG and CBS has already been provisioned at this point
        We have to upgrade them to the upgraded versions
    """
    cluster_config = params_from_base_test_setup['cluster_config']
    mode = params_from_base_test_setup['mode']
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']
    ls_url = params_from_base_test_setup["ls_url"]
    server_version = params_from_base_test_setup['server_version']
    sync_gateway_version = params_from_base_test_setup['sync_gateway_version']
    server_upgraded_version = params_from_base_test_setup[
        'server_upgraded_version']
    sync_gateway_upgraded_version = params_from_base_test_setup[
        'sync_gateway_upgraded_version']
    sg_url = params_from_base_test_setup['sg_url']
    sg_admin_url = params_from_base_test_setup['sg_admin_url']
    num_docs = int(params_from_base_test_setup['num_docs'])
    cbs_platform = params_from_base_test_setup['cbs_platform']
    cbs_toy_build = params_from_base_test_setup['cbs_toy_build']
    sg_conf = "{}/resources/sync_gateway_configs/sync_gateway_default_functional_tests_{}.json".format(
        os.getcwd(), mode)

    # Add data to liteserv
    client = MobileRestClient()
    log_info("ls_url: {}".format(ls_url))
    ls_db = client.create_database(ls_url, name="ls_db")

    # Create user and session on SG
    sg_user_channels = ["sg_user_channel"]
    sg_db = "db"
    sg_user_name = "sg_user"
    sg_user_password = "******"
    client.create_user(url=sg_admin_url,
                       db=sg_db,
                       name=sg_user_name,
                       password=sg_user_password,
                       channels=sg_user_channels)
    sg_session = client.create_session(url=sg_admin_url,
                                       db=sg_db,
                                       name=sg_user_name,
                                       password=sg_user_password)

    log_info(
        "Starting continuous push pull replication from liteserv to sync gateway"
    )
    repl_one = client.start_replication(url=ls_url,
                                        continuous=True,
                                        from_db=ls_db,
                                        to_url=sg_url,
                                        to_db=sg_db,
                                        to_auth=sg_session)
    client.wait_for_replication_status_idle(ls_url, repl_one)

    log_info("Starting replication from sync gateway to liteserv")
    client.start_replication(url=ls_url,
                             continuous=True,
                             from_url=sg_url,
                             from_db=sg_db,
                             from_auth=sg_session,
                             to_db=ls_db)

    # Add docs to liteserv
    added_docs = add_docs_to_client_task(client=client,
                                         url=ls_url,
                                         db=ls_db,
                                         channels=sg_user_channels,
                                         num_docs=num_docs)
    log_info("Added {} docs".format(len(added_docs)))

    # start updating docs
    terminator_doc_id = 'terminator'
    with ProcessPoolExecutor() as up:
        # Start updates in background process
        updates_future = up.submit(update_docs, client, ls_url, ls_db,
                                   added_docs, sg_session, terminator_doc_id)

        # Supported upgrade process
        # 1. Upgrade SGs first docmeta -> docmeta - CBS 5.0.0 does not support TAP.
        # 2. Upgrade the CBS cluster.
        # 3. Enable import/xattrs on SGs

        # Upgrade SG docmeta -> docmeta
        cluster_util = ClusterKeywords()
        topology = cluster_util.get_cluster_topology(cluster_config,
                                                     lb_enable=False)
        sync_gateways = topology["sync_gateways"]
        sg_accels = topology["sg_accels"]

        upgrade_sync_gateway(sync_gateways, sync_gateway_version,
                             sync_gateway_upgraded_version, sg_conf,
                             cluster_config)

        if mode == "di":
            upgrade_sg_accel(sg_accels, sync_gateway_version,
                             sync_gateway_upgraded_version, sg_conf,
                             cluster_config)

        # Upgrade CBS
        cluster = Cluster(config=cluster_config)
        if len(cluster.servers) < 2:
            raise Exception("Please provide at least 3 servers")

        server_urls = []
        for server in cluster.servers:
            server_urls.append(server.url)

        primary_server = cluster.servers[0]
        secondary_server = cluster.servers[1]
        servers = cluster.servers[1:]

        upgrade_server_cluster(servers,
                               primary_server,
                               secondary_server,
                               server_version,
                               server_upgraded_version,
                               server_urls,
                               cluster_config,
                               cbs_platform,
                               toy_build=cbs_toy_build)

        # Restart SGs after the server upgrade
        sg_obj = SyncGateway()
        for sg in sync_gateways:
            sg_ip = host_for_url(sg["admin"])
            log_info("Restarting sync gateway {}".format(sg_ip))
            sg_obj.restart_sync_gateways(cluster_config=cluster_config,
                                         url=sg_ip)
            time.sleep(5)

        if mode == "di":
            ac_obj = SyncGateway()
            for ac in sg_accels:
                ac_ip = host_for_url(ac)
                log_info("Restarting sg accel {}".format(ac_ip))
                ac_obj.restart_sync_gateways(cluster_config=cluster_config,
                                             url=ac_ip)
                time.sleep(5)

        if xattrs_enabled:
            # Enable xattrs on all SG/SGAccel nodes
            # cc - Start 1 SG with import enabled, all with XATTRs enabled
            # di - All SGs/SGAccels with xattrs enabled - this will also enable import on SGAccel
            #    - Do not enable import in SG.
            if mode == "cc":
                enable_import = True
            elif mode == "di":
                enable_import = False

            if mode == "di":
                ac_obj = SyncGateway()
                for ac in sg_accels:
                    ac_ip = host_for_url(ac)
                    ac_obj.enable_import_xattrs(cluster_config=cluster_config,
                                                sg_conf=sg_conf,
                                                url=ac_ip,
                                                enable_import=False)

            sg_obj = SyncGateway()
            for sg in sync_gateways:
                sg_ip = host_for_url(sg["admin"])
                sg_obj.enable_import_xattrs(cluster_config=cluster_config,
                                            sg_conf=sg_conf,
                                            url=sg_ip,
                                            enable_import=enable_import)
                enable_import = False
                # Check Import showing up on all nodes

        send_changes_termination_doc(auth=sg_session,
                                     terminator_doc_id=terminator_doc_id,
                                     terminator_channel=sg_user_channels,
                                     ls_url=ls_url,
                                     ls_db=ls_db)
        log_info("Waiting for doc updates to complete")
        updated_doc_revs = updates_future.result()

        log_info("Stopping replication from liteserv to sync gateway")
        # Stop repl_one
        client.stop_replication(url=ls_url,
                                continuous=True,
                                from_db=ls_db,
                                to_url=sg_url,
                                to_db=sg_db,
                                to_auth=sg_session)

        log_info("Stopping replication from sync gateway to liteserv")
        # Stop repl_two
        client.stop_replication(url=ls_url,
                                continuous=True,
                                from_url=sg_url,
                                from_db=sg_db,
                                from_auth=sg_session,
                                to_db=ls_db)
        # Gather the new revs for verification
        log_info("Gathering the updated revs for verification")
        doc_ids = []
        for i in range(len(added_docs)):
            doc_ids.append(added_docs[i]["id"])
            if added_docs[i]["id"] in updated_doc_revs:
                added_docs[i]["rev"] = updated_doc_revs[added_docs[i]["id"]]

        # Verify rev, doc bdy and revision history of all docs
        verify_sg_docs_revision_history(url=sg_admin_url,
                                        db=sg_db,
                                        added_docs=added_docs)

        if xattrs_enabled:
            # Verify through SDK that there is no _sync property in the doc body
            bucket_name = 'data-bucket'
            sdk_client = Bucket('couchbase://{}/{}'.format(
                primary_server.host, bucket_name),
                                password='******',
                                timeout=SDK_TIMEOUT)
            log_info("Fetching docs from SDK")
            docs_from_sdk = sdk_client.get_multi(doc_ids)

            log_info("Verifying that there is no _sync property in the docs")
            for i in docs_from_sdk:
                if "_sync" in docs_from_sdk[i].value:
                    raise Exception(
                        "_sync section found in docs after upgrade")
Ejemplo n.º 11
0
def test_document_resurrection(params_from_base_test_setup, sg_conf_name, deletion_type):
    """
    Scenarios:

    Doc meta mode / tombstone
    - Create docs (set A) via Sync Gateway
    - Delete docs (set A) via Sync Gateway
    - Verify docs (set A) are deleted via Sync Gateway
    - Create docs (set A) via Sync Gateway
    - Verify revs (set A) are generation 3 via Sync Gateway

    Doc meta mode / purge
    - Create docs (set A) via Sync Gateway
    - Purge docs (set A) via Sync Gateway
    - Verify docs (set A) are deleted via Sync Gateway
    - Create docs (set A) via Sync Gateway
    - Verify revs (set A) are generation 1 via Sync Gateway

    XATTRs / tombstone
    - Create docs (set A) via Sync Gateway
    - Create docs (set B) via SDK
    - Delete SDK docs (set B) via Sync Gateway
    - Delete SG docs (set A) via SDK
    - Verify docs (set B) are deleted via Sync Gateway
    - Verify docs (set B) are deleted via SDK
    - Verify docs (set A) are deleted via Sync Gateway
    - Verify docs (set A) are deleted via SDK
    - Create docs (set A) via Sync Gateway
    - Create docs (set B) via SDK
    - Verify revs (set A, B) are generation 3 via Sync Gateway

    XATTRs / purge
    - Create docs (set A) via Sync Gateway
    - Create docs (set B) via SDK
    - Purge SDK docs (set B) via Sync Gateway
    - Delete SG docs (set A) via SDK
    - Verify docs (set B) are deleted via Sync Gateway
    - Verify docs (set B) are deleted via SDK
    - Verify docs (set A) are deleted via Sync Gateway
    - Verify docs (set A) are deleted via SDK
    - Create docs (set A) via Sync Gateway
    - Create docs (set B) via SDK
    - Verify revs (set A, B) are generation 1 via Sync Gateway

    """
    cluster_conf = params_from_base_test_setup['cluster_config']
    cluster_topology = params_from_base_test_setup['cluster_topology']
    mode = params_from_base_test_setup['mode']
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    cbs_url = cluster_topology['couchbase_servers'][0]
    sg_admin_url = cluster_topology['sync_gateways'][0]['admin']
    sg_url = cluster_topology['sync_gateways'][0]['public']

    bucket_name = 'data-bucket'
    sg_db = 'db'
    cbs_host = host_for_url(cbs_url)

    num_docs_per_client = 10

    # Reset cluster
    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)
    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_config_path=sg_conf)

    # Initialize clients
    sg_client = MobileRestClient()
    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_host, bucket_name), password='******')

    # Create Sync Gateway user
    sg_user_channels = ['NASA', 'NATGEO']
    sg_client.create_user(url=sg_admin_url, db=sg_db, name='seth', password='******', channels=sg_user_channels)
    sg_user_auth = sg_client.create_session(url=sg_admin_url, db=sg_db, name='seth', password='******')

    # Create / Add docs from SG
    sg_doc_bodies = document.create_docs(
        doc_id_prefix='sg_doc',
        number=num_docs_per_client,
        content={'foo': 'bar'},
        channels=sg_user_channels,
        attachments_generator=attachment.generate_2_png_10_10
    )
    sg_doc_ids = [doc['_id'] for doc in sg_doc_bodies]

    sg_bulk_docs_resp = sg_client.add_bulk_docs(url=sg_url, db=sg_db, docs=sg_doc_bodies, auth=sg_user_auth)
    assert len(sg_bulk_docs_resp) == num_docs_per_client

    all_doc_ids = sg_doc_ids
    assert len(all_doc_ids) == num_docs_per_client

    if xattrs_enabled:
        #  Create / Add docs from sdk
        log_info('Adding docs via SDK')
        sdk_doc_bodies = document.create_docs(
            doc_id_prefix='sdk_doc',
            number=num_docs_per_client,
            content={'foo': 'bar'},
            channels=sg_user_channels,
        )
        sdk_docs = {doc['_id']: doc for doc in sdk_doc_bodies}
        sdk_doc_ids = [doc['_id'] for doc in sdk_doc_bodies]

        log_info('Creating SDK docs')
        sdk_client.upsert_multi(sdk_docs)

        all_doc_ids = sg_doc_ids + sdk_doc_ids
        assert len(all_doc_ids) == num_docs_per_client * 2

    if deletion_type == 'tombstone':
        # Set the target docs.
        # Doc meta mode: Delete Sync Gateway docs via Sync Gateway
        # XATTR mode: Delete SDK docs via Sync Gateway
        sg_doc_ids_to_delete = sg_doc_ids
        if xattrs_enabled:
            sg_doc_ids_to_delete = sdk_doc_ids

        # SG delete target docs
        for doc_id in sg_doc_ids_to_delete:
            doc = sg_client.get_doc(url=sg_url, db=sg_db, doc_id=doc_id, auth=sg_user_auth)
            deleted = sg_client.delete_doc(url=sg_url, db=sg_db, doc_id=doc_id, rev=doc['_rev'], auth=sg_user_auth)
            log_info(deleted)

        if xattrs_enabled:
            log_info('Deleting SG docs via SDK')
            sdk_client.remove_multi(sg_doc_ids)

    elif deletion_type == 'purge':
        # SG Purge all docs
        all_docs, errors = sg_client.get_bulk_docs(url=sg_url, db=sg_db, doc_ids=all_doc_ids, auth=sg_user_auth)
        if xattrs_enabled:
            assert len(all_docs) == num_docs_per_client * 2
            assert len(errors) == 0
        else:
            assert len(all_docs) == num_docs_per_client
            assert len(errors) == 0
        log_info('Purging docs via Sync Gateway')
        sg_client.purge_docs(url=sg_admin_url, db=sg_db, docs=all_docs)

    else:
        raise ValueError('Invalid test parameters')

    # Verify deletes via Sync Gateway
    deleted_docs_to_verify = sg_doc_ids
    assert len(deleted_docs_to_verify) == num_docs_per_client

    # If running is xattr mode, make sure to verify SG + SDK docs
    if xattrs_enabled:
        deleted_docs_to_verify = sg_doc_ids + sdk_doc_ids
        assert len(deleted_docs_to_verify) == num_docs_per_client * 2

    if xattrs_enabled and deletion_type == 'tombstone':

        # Verify SDK + SG docs are deleted from Sync Gateway
        verify_sg_deletes(sg_client, sg_url, sg_db, deleted_docs_to_verify, sg_user_auth)

        # Verify SDK + SG docs are deleted from SDK
        verify_sdk_deletes(sdk_client, deleted_docs_to_verify)

    elif xattrs_enabled and deletion_type == 'purge':

        # Verify SDK + SG docs are purged from Sync Gateway
        verify_sg_purges(sg_client, sg_url, sg_db, deleted_docs_to_verify, sg_user_auth)

        # Verify SDK + SG docs are deleted from SDK
        verify_sdk_deletes(sdk_client, deleted_docs_to_verify)

    elif not xattrs_enabled and deletion_type == 'tombstone':

        # Doc meta: Verify SG docs are all deleted via SG
        verify_sg_deletes(sg_client, sg_url, sg_db, deleted_docs_to_verify, sg_user_auth)

    elif not xattrs_enabled and deletion_type == 'purge':

        # Doc meta: Verify SG docs are all deleted via SG
        verify_sg_purges(sg_client, sg_url, sg_db, deleted_docs_to_verify, sg_user_auth)

    else:
        raise ValueError('Invalid test parameters')

    # Recreate deleted docs from Sync Gateway
    sg_bulk_docs_resp = sg_client.add_bulk_docs(url=sg_url, db=sg_db, docs=sg_doc_bodies, auth=sg_user_auth)
    assert len(sg_bulk_docs_resp) == num_docs_per_client

    if xattrs_enabled:
        log_info('Recreating SDK docs')
        # Recreate deleted docs from SDK
        sdk_client.upsert_multi(sdk_docs)

    # Get docs via Sync Gateway
    doc_ids_to_get = sg_doc_ids
    if xattrs_enabled:
        doc_ids_to_get = sg_doc_ids + sdk_doc_ids
    docs, errors = sg_client.get_bulk_docs(
        url=sg_url,
        db=sg_db,
        doc_ids=doc_ids_to_get,
        auth=sg_user_auth,
        validate=False
    )
    if xattrs_enabled:
        assert len(docs) == num_docs_per_client * 2
        assert len(errors) == 0
    else:
        assert len(docs) == num_docs_per_client
        assert len(errors) == 0

    if xattrs_enabled:

        # Get SDK docs and makes sure all docs were recreated
        all_docs_from_sdk = sdk_client.get_multi(doc_ids_to_get)
        assert len(all_docs_from_sdk) == num_docs_per_client * 2
        log_info('Found: {} recreated docs via SDK'.format(len(all_docs_from_sdk)))

        # Make sure we are able to get recreated docs via SDK
        doc_ids_to_get_scratch = list(doc_ids_to_get)
        assert len(doc_ids_to_get_scratch) == num_docs_per_client * 2
        for doc_id in all_docs_from_sdk:
            doc_ids_to_get_scratch.remove(doc_id)
        assert len(doc_ids_to_get_scratch) == 0

    # Make sure we are able to get recreated docs via SDK
    doc_ids_to_get_scratch = list(doc_ids_to_get)
    if xattrs_enabled:
        # SG + SDK docs
        assert len(doc_ids_to_get_scratch) == num_docs_per_client * 2
    else:
        # SG docs
        assert len(doc_ids_to_get_scratch) == num_docs_per_client

    for doc in docs:
        # Verify expected document revisions
        if xattrs_enabled:
            if deletion_type == 'purge':
                # SG purges SG docs and recreates them, expecting 1- rev
                # SDK removes SDK docs and recreates them, expecting 1- rev
                assert doc['_rev'].startswith('1-')
            else:
                # SG tombstones SG docs and recreates them, expecting 3- rev
                # SDK removes SDK docs and recreates them, expecting 1- rev
                if doc['_id'].startswith('sg_'):
                    assert doc['_rev'].startswith('3-')
                else:
                    assert doc['_rev'].startswith('1-')
        else:
            if deletion_type == 'purge':
                # SG purges SG docs and recreates them, expecting 1- rev
                assert doc['_rev'].startswith('1-')
            else:
                # SG tombstones SG docs and recreates them, expecting 3- rev
                assert doc['_rev'].startswith('3-')

        doc_ids_to_get_scratch.remove(doc['_id'])

    # Make sure all docs were found
    assert len(doc_ids_to_get_scratch) == 0
Ejemplo n.º 12
0
def test_concurrent_updates_no_conflicts(params_from_base_test_setup, sg_conf_name, num_of_docs, revs_limit):
    """@summary Test with concurrent updates with no conflicts enabled
    Test case link : https://docs.google.com/spreadsheets/d/1YwI_gCeoBebQKBybkzoAEoXSc0XLReszDA-mPFQapgk/edit#gid=0
    covered #15
    Steps:
    1. Start sg with some revs_limit specified
    2. Add docs to SG.
    3. Update docs few times via sg .
    4. Update docs few times vis sdk concurrently with sg.
        -> There are chances of getting conflict errors on both, handled the error appropriately
    5. update docs few number of times.
    6. Verify it can maintain default revisions.
    7. Verify previous revisions deleted and revisions maintained based on revs_limit
    """

    # Setup
    cluster_config = params_from_base_test_setup["cluster_config"]
    topology = params_from_base_test_setup["cluster_topology"]
    no_conflicts_enabled = params_from_base_test_setup["no_conflicts_enabled"]
    mode = params_from_base_test_setup["mode"]
    sg_url = topology["sync_gateways"][0]["public"]
    sg_admin_url = topology["sync_gateways"][0]["admin"]
    sg_db = "db"
    if revs_limit is None:
        revs_limit = 1000
    additional_updates = revs_limit
    total_updates = revs_limit + additional_updates
    if not no_conflicts_enabled:
        pytest.skip('--no-conflicts is not enabled, so skipping the test')
    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    # 1. Start sg
    c = cluster.Cluster(cluster_config)
    c.reset(sg_conf)

    sg_client = MobileRestClient()
    channels = ["no-conflicts"]
    sg_client.create_user(url=sg_admin_url, db=sg_db, name='autotest', password='******', channels=channels)
    autouser_session = sg_client.create_session(url=sg_admin_url, db=sg_db, name='autotest', password='******')

    temp_cluster_config = copy_to_temp_conf(cluster_config, mode)
    persist_cluster_config_environment_prop(temp_cluster_config, 'revs_limit', revs_limit, property_name_check=False)
    status = c.sync_gateways[0].restart(config=sg_conf, cluster_config=temp_cluster_config)
    assert status == 0, "Syncgateway did not start after no conflicts is enabled"
    # end of Set up

    # 2. Add docs to SG.
    sgdoc_bodies = document.create_docs(doc_id_prefix='sg_docs', number=num_of_docs,
                                        attachments_generator=attachment.generate_2_png_10_10, channels=channels)
    sg_docs = sg_client.add_bulk_docs(url=sg_url, db=sg_db, docs=sgdoc_bodies, auth=autouser_session)
    assert len(sgdoc_bodies) == num_of_docs

    # Connect to server via SDK
    log_info('Connecting to bucket ...')
    bucket_name = 'data-bucket'
    cbs_url = topology['couchbase_servers'][0]
    cbs_ip = host_for_url(cbs_url)
    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name), password='******', timeout=SDK_TIMEOUT)
    sg_doc_ids = [doc['id'] for doc in sg_docs]
    sdk_docs_resp = sdk_client.get_multi(sg_doc_ids)

    # Update the same documents concurrently from a sync gateway client and and sdk client
    with ThreadPoolExecutor(max_workers=9) as tpe:

        update_from_sdk_task = tpe.submit(sdk_bulk_update, sdk_client, sdk_docs_resp, 10)
        update_from_sg_task = tpe.submit(sg_doc_updates, sg_client, sg_url=sg_url, sg_db=sg_db, sg_docs=sg_docs, number_updates=10,
                                         auth=autouser_session, channels=channels)

        update_from_sg_task.result()
        update_from_sdk_task.result()

    # 3. Update the docs few times
    prev_revs = []
    for i in xrange(total_updates):
        update_sg_docs = sg_client.update_docs(url=sg_url, db=sg_db, docs=sg_docs, number_updates=1, delay=None,
                                               auth=autouser_session, channels=channels)
        rev = update_sg_docs[0]['rev'].split('-')[1]
        prev_revs.append(rev)

    # 4. Verify it can maintain default revisions.
    # 5. Verify previous revisions deleted.
    for doc in sg_docs:
        num_of_revs = sg_client.get_revs_num_in_history(url=sg_url, db=sg_db, doc_id=doc["id"], auth=autouser_session)
        assert len(num_of_revs) == revs_limit, "Number of revisions in history is more than revs_limit set in sg config"
        for i in xrange(additional_updates):
            assert prev_revs[i] not in num_of_revs