def test_db_online_offline_webhooks_offline_two(params_from_base_test_setup, sg_conf_name, num_users, num_channels, num_docs, num_revisions):

    start = time.time()

    cluster_conf = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    log_info("Running 'test_db_online_offline_webhooks_offline_two'")
    log_info("Using cluster_conf: {}".format(cluster_conf))
    log_info("Using num_users: {}".format(num_users))
    log_info("Using num_channels: {}".format(num_channels))
    log_info("Using num_docs: {}".format(num_docs))
    log_info("Using num_revisions: {}".format(num_revisions))

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_conf)

    init_completed = time.time()
    log_info("Initialization completed. Time taken:{}s".format(init_completed - start))

    channels = ["channel-" + str(i) for i in range(num_channels)]
    password = "******"
    ws = WebServer()
    ws.start()

    sgs = cluster.sync_gateways

    admin = Admin(sgs[0])

    # Register User
    log_info("Register User")
    user_objects = admin.register_bulk_users(target=sgs[0], db="db", name_prefix="User",
                                             number=num_users, password=password, channels=channels)

    # Add User
    log_info("Add docs")
    in_parallel(user_objects, 'add_docs', num_docs)

    # Update docs
    log_info("Update docs")
    in_parallel(user_objects, 'update_docs', num_revisions)
    time.sleep(10)

    status = cluster.servers[0].delete_bucket("data-bucket")
    assert status == 0

    log_info("Sleeping for 120 seconds...")
    time.sleep(120)

    webhook_events = ws.get_data()
    time.sleep(5)
    log_info("webhook event {}".format(webhook_events))
    last_event = webhook_events[-1]
    assert last_event['state'] == 'offline'

    ws.stop()
def test_webhooks(params_from_base_test_setup, sg_conf_name, num_users, num_channels, num_docs, num_revisions):

    start = time.time()

    cluster_conf = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    log_info("Running 'test_webhooks'")
    log_info("Using cluster_conf: {}".format(cluster_conf))
    log_info("Using num_users: {}".format(num_users))
    log_info("Using num_channels: {}".format(num_channels))
    log_info("Using num_docs: {}".format(num_docs))
    log_info("Using num_revisions: {}".format(num_revisions))

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_conf)

    init_completed = time.time()
    log_info("Initialization completed. Time taken:{}s".format(init_completed - start))

    channels = ["channel-" + str(i) for i in range(num_channels)]
    password = "******"
    ws = WebServer()
    ws.start()

    sgs = cluster.sync_gateways

    admin = Admin(sgs[0])

    # Register User
    log_info("Register User")
    user_objects = admin.register_bulk_users(target=sgs[0], db="db", name_prefix="User",
                                             number=num_users, password=password, channels=channels)

    # Add User
    log_info("Add docs")
    in_parallel(user_objects, 'add_docs', num_docs)

    # Update docs
    log_info("Update docs")
    in_parallel(user_objects, 'update_docs', num_revisions)
    time.sleep(30)
    ws.stop()
    expected_events = (num_users * num_docs * num_revisions) + (num_users * num_docs)
    received_events = len(ws.get_data())
    log_info("expected_events: {} received_events {}".format(expected_events, received_events))
    assert expected_events == received_events
def test_db_online_offline_webhooks_offline(params_from_base_test_setup, sg_conf_name, num_users, num_channels, num_docs, num_revisions):

    start = time.time()

    cluster_conf = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]

    if mode == "di":
        pytest.skip("Offline tests not supported in Di mode -- see https://github.com/couchbase/sync_gateway/issues/2423#issuecomment-300841425")

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    log_info("Running 'test_db_online_offline_webhooks_offline'")
    log_info("Using cluster_conf: {}".format(cluster_conf))
    log_info("Using num_users: {}".format(num_users))
    log_info("Using num_channels: {}".format(num_channels))
    log_info("Using num_docs: {}".format(num_docs))
    log_info("Using num_revisions: {}".format(num_revisions))

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_conf)

    init_completed = time.time()
    log_info("Initialization completed. Time taken:{}s".format(init_completed - start))

    channels = ["channel-" + str(i) for i in range(num_channels)]
    password = "******"
    ws = WebServer()
    ws.start()

    sgs = cluster.sync_gateways

    admin = Admin(sgs[0])

    # Register User
    log_info("Register User")
    user_objects = admin.register_bulk_users(target=sgs[0], db="db", name_prefix="User",
                                             number=num_users, password=password, channels=channels)

    # Add User
    log_info("Add docs")
    in_parallel(user_objects, 'add_docs', num_docs)

    # Update docs
    log_info("Update docs")
    in_parallel(user_objects, 'update_docs', num_revisions)
    time.sleep(10)

    # Take db offline
    sg_client = MobileRestClient()
    status = sg_client.take_db_offline(cluster_conf=cluster_conf, db="db")
    assert status == 0

    time.sleep(5)
    db_info = admin.get_db_info("db")
    log_info("Expecting db state {} found db state {}".format("Offline", db_info['state']))
    assert db_info["state"] == "Offline"

    webhook_events = ws.get_data()
    time.sleep(5)
    log_info("webhook event {}".format(webhook_events))

    try:
        last_event = webhook_events[-1]
        assert last_event['state'] == 'offline'

        # Bring db online
        status = sg_client.bring_db_online(cluster_conf=cluster_conf, db="db")
        assert status == 0

        time.sleep(5)
        db_info = admin.get_db_info("db")
        log_info("Expecting db state {} found db state {}".format("Online", db_info['state']))
        assert db_info["state"] == "Online"
        time.sleep(5)
        webhook_events = ws.get_data()
        last_event = webhook_events[-1]
        assert last_event['state'] == 'online'
        time.sleep(10)
        log_info("webhook event {}".format(webhook_events))
    except IndexError:
        log_info("Received index error")
        raise
    finally:
        ws.stop()
예제 #4
0
def test_webhooks(params_from_base_test_setup, sg_conf_name, num_users,
                  num_channels, num_docs, num_revisions):
    """
    Scenario:
    - Start a webserver on machine running the test to recieved webhook events
    - Create users
    - Add docs to Sync Gateway
    - Update docs on Sync Gateway
    - Verify the webserver recieved all expected webhook events
    """

    start = time.time()

    cluster_conf = params_from_base_test_setup["cluster_config"]
    mode = params_from_base_test_setup["mode"]

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    log_info("Running 'test_webhooks'")
    log_info("Using cluster_conf: {}".format(cluster_conf))
    log_info("Using num_users: {}".format(num_users))
    log_info("Using num_channels: {}".format(num_channels))
    log_info("Using num_docs: {}".format(num_docs))
    log_info("Using num_revisions: {}".format(num_revisions))

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_conf)

    init_completed = time.time()
    log_info("Initialization completed. Time taken:{}s".format(init_completed -
                                                               start))

    channels = ["channel-" + str(i) for i in range(num_channels)]
    password = "******"
    ws = WebServer()
    ws.start()

    sgs = cluster.sync_gateways

    admin = Admin(sgs[0])

    # Register User
    log_info("Register User")
    user_objects = admin.register_bulk_users(target=sgs[0],
                                             db="db",
                                             name_prefix="User",
                                             number=num_users,
                                             password=password,
                                             channels=channels)

    # Add User
    log_info("Add docs")
    in_parallel(user_objects, 'add_docs', num_docs)

    # Update docs
    log_info("Update docs")
    in_parallel(user_objects, 'update_docs', num_revisions)
    time.sleep(30)
    expected_events = (num_users * num_docs * num_revisions) + (num_users *
                                                                num_docs)
    received_events = ws.get_data()
    received_doc_events = []
    for ev in received_events:
        if "_id" in ev:
            received_doc_events.append(ev)

    log_info("expected_events: {} received_events {}".format(
        expected_events, received_events))
    # Stop ws before asserting
    # Else successive tests will fail to start ws
    ws.stop()
    assert expected_events == len(received_doc_events)
예제 #5
0
def test_webhooks_crud(params_from_base_test_setup, sg_conf_name, filtered):
    """ Tests for webhook notification on import

    xattr mode
    1. Start sync gateway with autoimport

    1. Write 'num_docs_per_client' docs via SDK
    1. Write 'num_docs_per_client' docs via SG
    1. Verify 'num_docs_per_client' * 2 webhook events (id, rev, body)

    1. Update SG docs once each via SDK
    1. Update SDK docs once each via SG
    1. Verify 'num_docs_per_client' * 2 webhook events (id, rev, body)

    1. Delete SG docs via SDK
    1. Delete SDK docs via SG
    1. Verify 'num_docs_per_client' * 2 webhook events (id, rev, body)

    to verify no dups, wait 10s after recieveing expected webhooks

    docmeta mode
    1. Write 'num_docs_per_client' docs via SG
    1. Verify 'num_docs_per_client' webhook events (id, rev, body)

    1. Update SG docs once each via SG
    1. Verify 'num_docs_per_client' webhook events (id, rev, body)

    1. Delete SG docs via SG
    1. Verify 'num_docs_per_client' webhook events (id, rev, body)

    if filtered, the scenario will add a filtered propery to every other doc.
    The webhook validation will only look for the filtered docs

    """
    xattrs_enabled = params_from_base_test_setup['xattrs_enabled']

    log_info('Webhooks filtered?: {}'.format(filtered))

    cluster_conf = params_from_base_test_setup['cluster_config']
    cluster_topology = params_from_base_test_setup['cluster_topology']
    mode = params_from_base_test_setup['mode']
    sg_admin_url = cluster_topology['sync_gateways'][0]['admin']
    sg_url = cluster_topology['sync_gateways'][0]['public']
    cbs_url = cluster_topology['couchbase_servers'][0]

    sg_db = 'db'
    bucket_name = 'data-bucket'
    num_docs_per_client = 100

    sg_conf = sync_gateway_config_path_for_mode(sg_conf_name, mode)

    cluster = Cluster(config=cluster_conf)
    cluster.reset(sg_conf)

    # Start webhook server on test runner
    webhook_server = WebServer()
    webhook_server.start()

    sg_client = MobileRestClient()
    cbs_ip = host_for_url(cbs_url)
    sdk_client = Bucket('couchbase://{}/{}'.format(cbs_ip, bucket_name),
                        password='******')

    sg_info = UserInfo('sg_user', 'pass', channels=['shared'], roles=[])
    sdk_info = UserInfo('sdk_user', 'pass', channels=['shared'], roles=[])
    sg_client.create_user(url=sg_admin_url,
                          db=sg_db,
                          name=sg_info.name,
                          password=sg_info.password,
                          channels=sg_info.channels)
    sg_auth = sg_client.create_session(url=sg_admin_url,
                                       db=sg_db,
                                       name=sg_info.name,
                                       password=sg_info.password)

    # Create sg docs
    doc_content = {'aphex': 'twin'}
    sg_docs = document.create_docs(doc_id_prefix='sg_user_doc',
                                   number=num_docs_per_client,
                                   content=doc_content,
                                   channels=sg_info.channels)

    # Add filtered property to every other doc
    count = 0
    for sg_doc in sg_docs:
        if count % 2 == 0:
            sg_doc['filtered'] = True
        count += 1

    sg_doc_ids = [doc['_id'] for doc in sg_docs]
    sg_filtered_doc_ids = [doc['_id'] for doc in sg_docs if 'filtered' in doc]
    assert len(sg_doc_ids) == num_docs_per_client
    assert len(sg_filtered_doc_ids) == num_docs_per_client / 2

    # Create sdk docs
    sdk_docs = {
        'sdk_user_doc_{}'.format(i): {
            'channels': sdk_info.channels,
            'content': doc_content
        }
        for i in range(num_docs_per_client)
    }

    # Add filtered property to every other doc
    count = 0
    for _, doc_val in sdk_docs.items():
        if count % 2 == 0:
            doc_val['filtered'] = True
        count += 1

    sdk_doc_ids = [doc for doc in sdk_docs]
    sdk_filtered_doc_ids = [k for k, v in sdk_docs.items() if 'filtered' in v]
    assert len(sdk_doc_ids) == num_docs_per_client
    assert len(sdk_filtered_doc_ids) == num_docs_per_client / 2

    all_docs = sg_doc_ids + sdk_doc_ids
    all_filtered_docs = sg_filtered_doc_ids + sdk_filtered_doc_ids
    assert len(all_docs) == num_docs_per_client * 2

    # If xattr mode, add sg + sdk docs
    # If non xattr mode, add sg docs
    add_docs(sg_client=sg_client,
             sg_url=sg_url,
             sg_db=sg_db,
             sg_docs=sg_docs,
             sg_auth=sg_auth,
             sdk_client=sdk_client,
             sdk_docs=sdk_docs,
             num_docs_per_client=num_docs_per_client,
             xattrs=xattrs_enabled)

    # Wait for added docs to trigger webhooks
    if xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, all_filtered_docs, 1,
                              doc_content)
    elif xattrs_enabled and not filtered:
        poll_for_webhook_data(webhook_server, all_docs, 1, doc_content)
    elif not xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, sg_filtered_doc_ids, 1,
                              doc_content)
    else:
        poll_for_webhook_data(webhook_server, sg_doc_ids, 1, doc_content)
    webhook_server.clear_data()

    # Update sdk docs from sg
    # If xattr mode, update sdk docs from sg, update sg docs from SDK
    # If non xattr mode, update sg docs from sg
    updated_doc_content = {'brian': 'eno'}
    update_docs(sg_client=sg_client,
                sg_url=sg_url,
                sg_db=sg_db,
                sg_doc_ids=sg_doc_ids,
                sg_auth=sg_auth,
                sdk_client=sdk_client,
                sdk_doc_ids=sdk_doc_ids,
                updated_doc_content=updated_doc_content,
                xattrs=xattrs_enabled)

    # Wait for updates to trigger webhooks
    if xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, all_filtered_docs, 2,
                              updated_doc_content)
    elif xattrs_enabled and not filtered:
        poll_for_webhook_data(webhook_server, all_docs, 2, updated_doc_content)
    elif not xattrs_enabled and filtered:
        poll_for_webhook_data(webhook_server, sg_filtered_doc_ids, 2,
                              updated_doc_content)
    else:
        poll_for_webhook_data(webhook_server, sg_doc_ids, 2,
                              updated_doc_content)
    webhook_server.clear_data()

    delete_docs(sg_client=sg_client,
                sg_url=sg_url,
                sg_db=sg_db,
                sg_doc_ids=sg_doc_ids,
                sg_auth=sg_auth,
                sdk_client=sdk_client,
                sdk_doc_ids=sdk_doc_ids,
                xattrs=xattrs_enabled)

    # Wait for deletes to trigger webhook events, filter includes all deleted docs
    if xattrs_enabled:
        poll_for_webhook_data(webhook_server,
                              all_docs,
                              3,
                              updated_doc_content,
                              deleted=True)
    else:
        poll_for_webhook_data(webhook_server,
                              sg_doc_ids,
                              3,
                              updated_doc_content,
                              deleted=True)
    webhook_server.clear_data()

    # Stop webhook server
    webhook_server.stop()