コード例 #1
0
def fetch_ldap_data():
    """
        Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
        and inserts data into RethinkDB.
    """
    LOGGER.debug("Connecting to RethinkDB...")
    connect_to_db()
    LOGGER.debug("Successfully connected to RethinkDB")

    last_sync = (r.table("sync_tracker").filter({
        "provider_id": LDAP_DC
    }).max("timestamp").coerce_to("object").run())

    last_sync_time = last_sync["timestamp"]
    last_sync_time_formatted = to_date_ldap_query(
        rethink_timestamp=last_sync_time)
    search_filter = (
        "(&(|(objectClass=person)(objectClass=group))(whenChanged>=%s))" %
        last_sync_time_formatted)
    server = Server(LDAP_SERVER, get_info=ALL)
    ldap_conn = Connection(server, user=LDAP_USER, password=LDAP_PASS)
    if not ldap_conn.bind():
        LOGGER.error("Error connecting to LDAP server %s : %s", LDAP_SERVER,
                     ldap_conn.result)
    ldap_conn.search(
        search_base=LDAP_DC,
        search_filter=search_filter,
        attributes=ldap3.ALL_ATTRIBUTES,
    )

    parsed_last_sync_time = datetime.strptime(
        last_sync_time.split("+")[0],
        "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=timezone.utc)
    insert_to_db(data_dict=ldap_conn.entries,
                 when_changed=parsed_last_sync_time)
コード例 #2
0
def fetch_ldap_data(data_type):
    """
        Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
        inserts data into RethinkDB, and initiates a new thread for a delta sync for data_type.
    """
    connect_to_db()

    if data_type == "user":
        search_filter = "(objectClass=person)"
    elif data_type == "group":
        search_filter = "(objectClass=group)"

    server = Server(LDAP_SERVER, get_info=ALL)
    conn = Connection(server, user=LDAP_USER, password=LDAP_PASS)
    if not conn.bind():
        LOGGER.error("Error connecting to LDAP server %s : %s", LDAP_SERVER,
                     conn.result)
    conn.search(
        search_base=LDAP_DC,
        search_filter=search_filter,
        attributes=ldap3.ALL_ATTRIBUTES,
    )

    insert_to_db(data_dict=conn.entries, data_type=data_type)
    sync_source = "ldap-" + data_type
    provider_id = LDAP_DC
    save_sync_time(provider_id, sync_source, "initial")
コード例 #3
0
def inbound_sync_listener():
    """Initialize a delta inbound sync between the inbound queue and sawtooth."""
    LOGGER.info("Starting inbound sync listener...")

    LOGGER.info("Connecting to RethinkDB...")
    connect_to_db()
    LOGGER.info("Successfully connected to RethinkDB!")

    while True:
        try:
            queue_entry = peek_at_queue(INBOUND_QUEUE)
            LOGGER.info("Received queue entry %s from outbound queue...",
                        queue_entry["id"])

            data_type = queue_entry["data_type"]
            LOGGER.info("Putting %s into Sawtooth...", data_type)
            # TODO: Validate queue_entry.
            # TODO: Transform or reject invalid entries.
            # TODO: Get queue_entry object from NEXT state table.
            # TODO: Update object or create if it doesn't exist.
            LOGGER.debug(queue_entry)

            LOGGER.info("Putting queue entry into changelog...")
            put_entry_changelog(queue_entry, DIRECTION)

            LOGGER.info("Deleting queue entry from outbound queue...")
            entry_id = queue_entry["id"]
            delete_entry_queue(entry_id, INBOUND_QUEUE)
        except ExpectedError as err:
            time.sleep(DELAY)
        except Exception as err:
            LOGGER.exception(err)
            raise err
コード例 #4
0
def initialize_aad_sync():
    """Initialize a sync with Azure Active Directory."""
    LOGGER.info("connecting to RethinkDB...")
    connect_to_db()
    LOGGER.info("Successfully connected to RethinkDB!")
    provider_id = TENANT_ID

    db_user_payload = check_last_sync("azure-user", "initial")
    if not db_user_payload:
        LOGGER.info(
            "No initial AAD user sync was found. Starting initial AAD user sync now."
        )

        LOGGER.info("Getting Users...")
        users = fetch_users()
        if users:
            insert_user_to_db(users)
            while "@odata.nextLink" in users:
                users = fetch_next_payload(users["@odata.nextLink"])
                if users:
                    insert_user_to_db(users)
                else:
                    break
            save_sync_time(provider_id, "azure-user", "initial")
            LOGGER.info("Initial user upload complete :)")
        else:
            LOGGER.info(
                "An error occurred when uploading users.  Please check the logs."
            )

    db_group_payload = check_last_sync("azure-group", "initial")
    if not db_group_payload:
        LOGGER.info(
            "No initial AAD group sync was found. Starting initial AAD group sync now."
        )
        LOGGER.info("Getting Groups with Members...")
        groups = fetch_groups_with_members()
        if groups:
            insert_group_to_db(groups)
            while "@odata.nextLink" in groups:
                groups = fetch_next_payload(groups["@odata.nextLink"])
                if groups:
                    insert_group_to_db(groups)
                else:
                    break
            save_sync_time(provider_id, "azure-group", "initial")
            LOGGER.info("Initial group upload complete :)")
        else:
            LOGGER.info(
                "An error occurred when uploading groups.  Please check the logs."
            )

    if db_group_payload and db_user_payload:
        LOGGER.info("The initial sync has already been run.")
コード例 #5
0
def fetch_ldap_data(data_type):
    """
        Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
        inserts data into RethinkDB, and initiates a new thread for a delta sync for data_type.
    """
    connect_to_db()

    if data_type == "user":
        search_filter = "(objectClass=person)"
        search_base = USER_BASE_DN
    elif data_type == "group":
        search_filter = "(objectClass=group)"
        search_base = GROUP_BASE_DN

    ldap_connection = ldap_connector.await_connection(LDAP_SERVER, LDAP_USER,
                                                      LDAP_PASS)

    search_parameters = {
        "search_base": search_base,
        "search_filter": search_filter,
        "attributes": ldap3.ALL_ATTRIBUTES,
        "paged_size": LDAP_SEARCH_PAGE_SIZE,
    }

    entry_count = 0
    LOGGER.info("Importing users..")

    while True:
        start_time = time.clock()
        ldap_connection.search(**search_parameters)
        record_count = len(ldap_connection.entries)
        LOGGER.info(
            "Got %s entries in %s seconds.",
            record_count,
            "%.3f" % (time.clock() - start_time),
        )
        for entry in ldap_connection.entries:
            entry_count = entry_count + 1
            insert_to_db(entry, data_type=data_type)

        # 1.2.840.113556.1.4.319 is the OID/extended control for PagedResults
        cookie = ldap_connection.result["controls"]["1.2.840.113556.1.4.319"][
            "value"]["cookie"]

        if cookie:
            search_parameters["paged_cookie"] = cookie
        else:
            LOGGER.info("Imported %s entries from Active Directory",
                        entry_count)
            break

    sync_source = "ldap-" + data_type
    save_sync_time(LDAP_DC, sync_source, "initial")
コード例 #6
0
def ldap_outbound_listener():
    """Initialize LDAP delta outbound sync with Active Directory."""
    LOGGER.info("Starting outbound sync listener...")

    LOGGER.info("Connecting to RethinkDb...")
    connect_to_db()
    LOGGER.info("..connected to RethinkDb")

    ldap_connection = ldap_connector.await_connection(LDAP_SERVER, LDAP_USER, LDAP_PASS)

    while True:

        try:
            queue_entry = peek_at_queue("outbound_queue", LDAP_DC)

            while queue_entry is None:
                queue_entry = peek_at_queue("outbound_queue", LDAP_DC)
                time.sleep(LISTENER_POLLING_DELAY)

            LOGGER.info(
                "Received queue entry %s from outbound queue...", queue_entry["id"]
            )

            LOGGER.debug("Putting queue entry into changelog...")
            put_entry_changelog(queue_entry, "outbound")

            data_type = queue_entry["data_type"]
            LOGGER.debug("Putting %s into ad...", data_type)

            try:
                if is_entry_in_ad(queue_entry, ldap_connection):
                    update_entry_ldap(queue_entry, ldap_connection)
                else:
                    create_entry_ldap(queue_entry, ldap_connection)

            except ValidationException as err:
                LOGGER.warning("Outbound payload failed validation")
                LOGGER.warning(err)

            LOGGER.debug("Deleting queue entry from outbound queue...")
            delete_entry_queue(queue_entry["id"], "outbound_queue")

        except LDAPSessionTerminatedByServerError:
            LOGGER.warning(
                "Ldap connection was terminated by the server. Attempting to reconnect..."
            )
            ldap_connection = ldap_connector.await_connection(
                LDAP_SERVER, LDAP_USER, LDAP_PASS
            )
コード例 #7
0
def insert_updated_entries(data_dict, when_changed, data_type):
    """Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer."""
    insertion_counter = 0
    conn = connect_to_db()
    for entry in data_dict:
        if entry.whenChanged.value > when_changed:
            if data_type == "user":
                standardized_entry = inbound_user_filter(entry, "ldap")
            else:
                standardized_entry = inbound_group_filter(entry, "ldap")
            entry_modified_timestamp = entry.whenChanged.value.strftime(
                "%Y-%m-%dT%H:%M:%S.%f+00:00")
            inbound_entry = {
                "data": standardized_entry,
                "data_type": data_type,
                "sync_type": "delta",
                "timestamp": entry_modified_timestamp,
                "provider_id": LDAP_DC,
            }
            LOGGER.debug(
                "Inserting LDAP %s into inbound queue: %s",
                data_type,
                standardized_entry["remote_id"],
            )
            r.table("inbound_queue").insert(inbound_entry).run(conn)

            sync_source = "ldap-" + data_type
            provider_id = LDAP_DC
            save_sync_time(provider_id, sync_source, "delta",
                           entry_modified_timestamp)
            insertion_counter += 1
    conn.close()
    LOGGER.info("Inserted %s records into inbound_queue.", insertion_counter)
コード例 #8
0
def get_outbound_queue_entry(data, max_attempts=10, delay=0.5):
    """ Gets an entry from outbound_queue table that matches the passed in
    data dictionary.

    Args:
        data: (dict) Entry from users/roles table without created_date field.
        max_attempts:
            int: The number of times to attempt to find the given outbound_queue
                 entry.
                    Default value: 10
        delay
            float: The number of seconds to wait between query attempts.
                Default value: 0.5
    Returns:
        outbound_queue_entry: (dict) The entry that has the data field
            matching the data parameter. This entry would contain the
            following fields: data, data_type, provider_id, sync_type,
            timestamp.
    """
    query_count = 0
    outbound_queue_entry = []
    with connect_to_db() as conn:
        while not outbound_queue_entry and query_count < max_attempts:
            outbound_queue_entry = (r.table("outbound_queue").filter({
                "data":
                data
            }).coerce_to("array").run(conn))
            if not outbound_queue_entry:
                query_count += 1
                sleep(delay)
    return outbound_queue_entry
コード例 #9
0
def test_outbound_queue_check(entry_data, outbound_status, expected_result):
    """ Tests that any inbound queue entries are checked against the outbound
    queue before insertion. If a duplicate entry exists in the outbound queue
    the function should return `True`, indicating the entry has already been
    written to sawtooth and that both entries should be deleted. Otherwise,
    the function should return `False`, indicating that the entry should be
    inserted.

    Args:
        entry_data:
            obj:    A dict containing a valid NEXT role object. Only
                    the `whenChanged` key is directly called in this obj, so
                    the rest may be arbitrary for this test (update if needed).
        outbound_status:
            str:    A string containing a valid NEXT `status` (
                    `CONFIRMED` or `UNCONFIRMED`). May be an empty string.
        expected_result:
            bool:   The boolean value that is expected to be returned by
                    put_in_outbound_queue().
    """
    with connect_to_db() as conn:
        if expected_result:
            outbound_entry = {**entry_data}
            if outbound_status:
                outbound_entry["status"] = outbound_status
                result = (r.table("outbound_queue").insert(
                    outbound_entry).coerce_to("object").run(conn))
                assert result["inserted"] > 0
        result = remove_outbound_duplicates(entry_data["data"], conn)
        assert result == expected_result
コード例 #10
0
def get_deleted_user_entries(next_id):
    """Returns a list of entries from tables relating to a
    user's deletion. Tables include: users, metadata, auth and
    user_mapping. After a successful deletion, this function
    should return an empty list.

    Args:
        next_id:
            str: a user's unique id.
    Returns:
        related_entries:
            dict: Contains entries from tables: users,
                metadta, user_mapping, and auth for a
                given user
    """
    with connect_to_db() as db_connection:
        return (
            r.table("users")
            .union(r.table("metadata"))
            .union(r.table("user_mapping"))
            .union(r.table("auth"))
            .filter({"next_id": next_id})
            .coerce_to("array")
            .run(db_connection)
        )
コード例 #11
0
def insert_to_user_mapping(user_record):
    """Inset a user to the user_mapping table if it hasn't inserted before.
    user_record: dict - user object
    """
    conn = connect_to_db()
    # Find user if object already exists in user mapping table
    existing_rec = (
        r.table("user_mapping")
        .filter(
            {
                "provider_id": user_record["provider_id"],
                "remote_id": user_record["data"]["remote_id"],
            }
        )
        .coerce_to("array")
        .run(conn)
    )

    # If the user does not exist insert to user_mapping table
    if not existing_rec:
        data = {
            "next_id": user_record["next_id"],
            "provider_id": user_record["provider_id"],
            "remote_id": user_record["data"]["remote_id"],
            "public_key": user_record["public_key"],
            "encrypted_key": user_record["private_key"],
            "active": True,
        }

        # Insert to user_mapping and close
        r.table("user_mapping").insert(data).run(conn)
    conn.close()
コード例 #12
0
def teardown_module():
    """actions to be performed to clear configurations after tests are run.
    """
    with connect_to_db() as db_connection:
        # remove the index we created
        r.table("roles").index_drop("start_block_num").run(db_connection)
        for user in TEST_USERS:
            # remove any users, role members, and role owners that we created
            r.table("users").filter({
                "cn": user["common_name"]
            }).delete().run(db_connection)
            user_distinct_name = (
                "CN=%s,OU=Users,OU=Accounts,DC=AD2012,DC=LAB" %
                user["common_name"])
            r.table("role_members").filter({
                "related_id": user_distinct_name
            }).delete().run(db_connection)
            r.table("role_owners").filter({
                "related_id": user_distinct_name
            }).delete().run(db_connection)
        for group in TEST_GROUPS:
            # remove any roles we created
            r.table("roles").filter({
                "cn": group["common_name"]
            }).delete().run(db_connection)
コード例 #13
0
def listener():
    """ Listener for Sawtooth State changes
    """
    try:
        conn = connect_to_db()

        LOGGER.info("Reading queued Sawtooth transactions")
        while True:
            feed = r.table("inbound_queue").order_by(index=r.asc("timestamp")).run(conn)
            count = 0
            for rec in feed:
                process(rec, conn)
                count = count + 1
            if count == 0:
                break
            LOGGER.info("Processed %s records in the inbound queue", count)
        LOGGER.info("Listening for incoming Sawtooth transactions")
        feed = r.table("inbound_queue").changes().run(conn)
        for rec in feed:
            if rec["new_val"] and not rec["old_val"]:  # only insertions
                process(rec["new_val"], conn)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.exception("Inbound listener %s exception", type(err).__name__)
        LOGGER.exception(err)

    finally:
        try:
            conn.close()
        except UnboundLocalError:
            pass
コード例 #14
0
def insert_deleted_entries(deleted_entries, data_type):
    """ Inserts every entry in deleted_entries dict into inbound_queue table.

    Args:
        deleted_entries: An array containing the remote_ids/distinguished names
            of the users/groups that were deleted.
        data_type: A string with the value of either user_deleted or group_deleted.
            This value will be used in the data_type field when we insert our data
            into the inbound_queue.

    Raises:
        ValueError: If parameter data_type does not have the value of "user_deleted"
            or "group_delete".
    """

    if data_type not in ["user_deleted", "group_deleted"]:
        raise ValueError(
            "For deletions, data_type field must be either "
            "user_deleted or group_deleted. Found {}".format(data_type))

    conn = connect_to_db()
    for remote_id in deleted_entries:
        data = {"remote_id": remote_id}
        inbound_entry = {
            "data": data,
            "data_type": data_type,
            "sync_type": "delta",
            "timestamp":
            datetime.now().replace(tzinfo=timezone.utc).isoformat(),
            "provider_id": LDAP_DC,
        }
        LOGGER.debug("Inserted deleted LDAP %s into inbound queue: %s",
                     data_type, remote_id)
        r.table("inbound_queue").insert(inbound_entry).run(conn)
    conn.close()
コード例 #15
0
def insert_to_db(entry, data_type):
    """Insert user or group individually to RethinkDB from dict of data and begins delta sync timer."""
    if data_type == "user":
        standard_entry = inbound_user_filter(entry, "ldap")
    elif data_type == "group":
        standard_entry = inbound_group_filter(entry, "ldap")
    else:
        LOGGER.warning("unsupported data type: %s", data_type)
        return

    inbound_entry = {
        "data": standard_entry,
        "data_type": data_type,
        "sync_type": "initial",
        "timestamp": r.now(),
        "provider_id": LDAP_DC,
    }
    conn = connect_to_db()
    LOGGER.debug(
        "Inserting LDAP %s into inbound queue: %s",
        data_type,
        standard_entry["remote_id"],
    )
    r.table("inbound_queue").insert(inbound_entry).run(conn)
    conn.close()
コード例 #16
0
def set_sync_direction(next_id, direction):
    """Sets the sync_direction of next_id.
    Args:
        next_id: The next_id of the user.
        direction: str with value of "INBOUND" or "OUTBOUND"

    Returns:
        RethinkDB output of the update query.
    """
    conn = connect_to_db()
    retry = 0
    while retry < 3:
        try:
            response = (r.table("users").get_all(next_id,
                                                 index="next_id").update({
                                                     "metadata": {
                                                         "sync_direction":
                                                         direction
                                                     }
                                                 }).run(conn))
        except r.errors.ReqlOpFailedError:
            time.sleep(3)
            retry += 1
        else:
            break
    else:
        LOGGER.warning(
            "Max retries reached when setting sync_direction. Not set.")
    conn.close()
    return response
コード例 #17
0
def fetch_ldap_deletions():
    """ Searches LDAP provider for users & groups that were deleted from LDAP.
        If any were deleted, inserts distinguished names of deleted into the
        inbound_queue table.
    """
    LOGGER.info("Fetching LDAP deleted entries...")
    conn = connect_to_db()
    for data_type in ["user", "group"]:
        if data_type == "user":
            search_filter = "(objectClass=person)"
            search_base = USER_BASE_DN
            existing_records = list(
                r.table("users").get_field("remote_id").run(conn))
        else:
            search_filter = "(objectClass=group)"
            search_base = GROUP_BASE_DN
            existing_records = list(
                r.table("roles").get_field("remote_id").run(conn))

        ldap_connection = ldap_connector.await_connection(
            LDAP_SERVER, LDAP_USER, LDAP_PASS)

        search_parameters = {
            "search_base": search_base,
            "search_filter": search_filter,
            "attributes": ["distinguishedName"],
            "paged_size": LDAP_SEARCH_PAGE_SIZE,
        }

        while True:
            ldap_connection.search(**search_parameters)

            # For each user/group in AD, remove the user/group from existing_records.
            # Remaining entries in existing_records were deleted from AD.

            for entry in ldap_connection.entries:
                if entry.distinguishedName.value in existing_records:
                    existing_records.remove(entry.distinguishedName.value)

            # 1.2.840.113556.1.4.319 is the OID/extended control for PagedResults

            cookie = ldap_connection.result["controls"][
                "1.2.840.113556.1.4.319"]["value"]["cookie"]
            if cookie:
                search_parameters["paged_cookie"] = cookie
            else:
                break

        if existing_records:
            LOGGER.info(
                "Found %s deleted entries. Inserting deleted "
                "AD %s(s) into inbound queue.",
                str(len(existing_records)),
                data_type,
            )
            LOGGER.debug(existing_records)
            insert_deleted_entries(existing_records, data_type + "_deleted")
    conn.close()
    LOGGER.info("Fetching LDAP deleted entries completed...")
コード例 #18
0
def ldap_outbound_listener():
    """Initialize LDAP delta outbound sync with Active Directory."""
    LOGGER.info("Starting outbound sync listener...")

    LOGGER.info("Connecting to RethinkDB...")
    connect_to_db()
    LOGGER.info("Successfully connected to RethinkDB!")

    LOGGER.info("Connecting to LDAP...")
    ldap_conn = connect_to_ldap()
    LOGGER.info("Successfully connected to LDAP!")

    while True:
        try:
            queue_entry = peek_at_queue("outbound_queue", LDAP_DC)
            LOGGER.info("Received queue entry %s from outbound queue...",
                        queue_entry["id"])

            data_type = queue_entry["data_type"]
            LOGGER.info("Putting %s into ad...", data_type)
            if is_entry_in_ad(queue_entry, ldap_conn):
                update_entry_ldap(queue_entry, ldap_conn)
            else:
                create_entry_ldap(queue_entry, ldap_conn)

            LOGGER.info("Putting queue entry into changelog...")
            put_entry_changelog(queue_entry, "outbound")

            LOGGER.info("Deleting queue entry from outbound queue...")
            entry_id = queue_entry["id"]
            delete_entry_queue(entry_id, "outbound_queue")
        except ValidationException as err:
            LOGGER.info(err)
            LOGGER.info("No oubound payload possible.  Deleting entry %s",
                        queue_entry)
            delete_entry_queue(queue_entry["id"], "outbound_queue")
        except ExpectedError as err:
            LOGGER.debug((
                "%s Repolling after %s seconds...",
                err.__str__,
                "LISTENER_POLLING_DELAY",
            ))
            time.sleep(LISTENER_POLLING_DELAY)
        except Exception as err:
            LOGGER.exception(err)
            raise err
コード例 #19
0
def test_reject_users_proposals():
    """Test that a user's proposals are rejected when they are deleted."""
    user_to_delete = {
        "name": "nadia two",
        "username": "******",
        "password": "******",
        "email": "*****@*****.**",
    }

    user = {
        "name": "nadia three",
        "username": "******",
        "password": "******",
        "email": "*****@*****.**",
    }
    with requests.Session() as session:
        response1 = create_test_user(session, user_to_delete)
        response2 = create_test_user(session, user)
        role_payload_1 = {
            "name": "NadiaRole1",
            "owners": response1.json()["data"]["user"]["id"],
            "administrators": response1.json()["data"]["user"]["id"],
            "description": "Nadia Role 1",
        }

        role_response1 = create_test_role(session, role_payload_1)
        proposal_1 = add_role_member(
            session,
            role_response1.json()["data"]["id"],
            {"id": response2.json()["data"]["user"]["id"]},
        )
        next_id = response1.json()["data"]["user"]["id"]
        conn = connect_to_db()
        user_exists = (r.db("rbac").table("users").filter({
            "next_id": next_id
        }).coerce_to("array").run(conn))
        assert user_exists

        deletion = session.delete("http://rbac-server:8000/api/users/" +
                                  next_id)
        time.sleep(5)
        assert deletion.json() == {
            "message": "User {} successfully deleted".format(next_id),
            "deleted": 1,
        }

        user_exists = (r.db("rbac").table("users").filter({
            "next_id": next_id
        }).coerce_to("array").run(conn))
        assert not user_exists

        proposal_1_result = (r.db("rbac").table("proposals").filter({
            "proposal_id":
            proposal_1.json()["proposal_id"]
        }).coerce_to("array").run(conn))
        conn.close()
        assert proposal_1_result[0]["status"] == "REJECTED"
コード例 #20
0
def get_next_object(table, remote_id, provider_id):
    """Check if object already exists in NEXT and return it."""
    query_filter = {"remote_id": remote_id}
    if table == "user_mapping":
        query_filter["provider_id"] = provider_id
    conn = connect_to_db()
    result = r.table(table).filter(query_filter).coerce_to("array").run(conn)
    conn.close()
    return result
コード例 #21
0
def initialize_ldap_sync():
    """
        Checks if LDAP initial syncs has been ran. If not, run initial sync for both ldap users
        and groups. If initial syncs have been completed, restart the inbound delta syncs.
    """

    if not LDAP_DC:
        LOGGER.info(
            "Ldap Domain Controller is not provided, skipping Ldap sync.")
    elif not ldap_connector.can_connect_to_ldap(LDAP_SERVER, LDAP_USER,
                                                LDAP_PASS):
        LOGGER.info("Ldap Connection failed. Skipping Ldap sync.")
    else:
        connect_to_db()

        # Check to see if User Sync has occurred.  If not - Sync
        db_user_payload = check_last_sync("ldap-user", "initial")
        if not db_user_payload:
            LOGGER.info(
                "No initial AD user sync was found. Starting initial AD user sync now."
            )

            LOGGER.info("Getting AD Users...")
            fetch_ldap_data(data_type="user")

            LOGGER.debug("Initial AD user upload completed.")

        # Check to see if Group Sync has occurred.  If not - Sync
        db_group_payload = check_last_sync("ldap-group", "initial")
        if not db_group_payload:
            LOGGER.debug(
                "No initial AD group sync was found. Starting initial AD group sync now."
            )
            LOGGER.debug("Getting Groups with Members...")
            fetch_ldap_data(data_type="group")

            LOGGER.debug("Initial AD group upload completed.")

        if db_user_payload and db_group_payload:
            LOGGER.debug("The LDAP initial sync has already been run.")

        # Start the inbound delta sync
        initiate_delta_sync()
コード例 #22
0
def is_user_in_db(email):
    """Returns the number of users in rethinkdb with the given email.

    Args:
        email:
            str: an email address.
    """
    with connect_to_db() as db_connection:
        result = r.table("users").filter({"email": email}).count().run(db_connection)
        return result > 0
コード例 #23
0
def delete_task_by_name(name):
    """Delete a task from db by the name."""
    conn = connect_to_db()
    task_id = r.table("tasks").filter({"name": name}).coerce_to("array").run(conn)
    r.table("tasks").filter({"name": name}).delete().run(conn)
    r.table("task_owners").filter({"task_id": task_id[0]["task_id"]}).delete().run(conn)
    r.table("role_tasks").filter({"identifiers": [task_id[0]["task_id"]]}).delete().run(
        conn
    )
    conn.close()
コード例 #24
0
def is_group_in_db(name):
    """Returns the number of groups from the roles table in rethinkdb with
    the given name.

    Args:
        name:
            str: The name of a fake group.
    """
    with connect_to_db() as db_connection:
        result = r.table("roles").filter({"name": name}).count().run(db_connection)
        return result > 0
コード例 #25
0
def insert_change_to_db(data, record_timestamp):
    """Insert change individually to rethinkdb from changelog eventhub of azure"""
    inbound_entry = {
        "data": data,
        "data_type": VALID_OPERATIONS[data["operation_name"]],
        "sync_type": "delta",
        "timestamp": record_timestamp,
        "provider_id": TENANT_ID,
    }
    conn = connect_to_db()
    r.table("inbound_queue").insert(inbound_entry).run(conn)
    conn.close()
コード例 #26
0
def get_pack_by_pack_id(pack_id):
    """Returns pack by pack_id

    Args:
        pack_id:
            str: pack_id of pack to query
    """
    with connect_to_db() as db_connection:
        pack = (r.table("packs").filter({
            "pack_id": pack_id
        }).coerce_to("array").run(db_connection))
    return pack
コード例 #27
0
def get_user_in_db_by_email(email):
    """Returns the user in rethinkdb with the given email.

    Args:
        email:
            str: an email address.
    """
    with connect_to_db() as db_connection:
        result = (r.table("users").filter({
            "email": email
        }).coerce_to("array").run(db_connection))
        return result
コード例 #28
0
def get_role(name):
    """Returns a role in rethinkDB via name.

    Args:
        name:
            str: a name of a role in rethinkDB.
    """
    with connect_to_db() as db_connection:
        role = (r.table("roles").filter({
            "name": name
        }).coerce_to("array").run(db_connection))
    return role
コード例 #29
0
def get_role_members(role_id):
    """Returns a list of member user_ids from a role in rethinkDB.

    Args:
        role_id:
            str: a NEXT role_id from rethinkDB.
    """
    with connect_to_db() as db_connection:
        role_members = (r.table("role_members").filter({
            "role_id": role_id
        }).pluck("related_id").coerce_to("array").run(db_connection))
    return role_members
コード例 #30
0
def fetch_ldap_data():
    """
        Call to get entries for (Users | Groups) in Active Directory, saves the time of the sync,
        and inserts data into RethinkDB.
    """
    LOGGER.debug("Connecting to RethinkDB...")
    conn = connect_to_db()
    LOGGER.debug("Successfully connected to RethinkDB")

    for data_type in ["user", "group"]:
        if data_type == "user":
            last_sync = (r.table("sync_tracker").filter({
                "provider_id": LDAP_DC,
                "source": "ldap-user"
            }).max("timestamp").coerce_to("object").run(conn))
            conn.close()
            last_sync_time = last_sync["timestamp"]
            last_sync_time_formatted = to_date_ldap_query(
                rethink_timestamp=last_sync_time)
            search_filter = ("(&(objectClass=person)(whenChanged>=%s))" %
                             last_sync_time_formatted)
            search_base = USER_BASE_DN

        else:
            last_sync = (r.table("sync_tracker").filter({
                "provider_id": LDAP_DC,
                "source": "ldap-group"
            }).max("timestamp").coerce_to("object").run())

            last_sync_time = last_sync["timestamp"]
            last_sync_time_formatted = to_date_ldap_query(
                rethink_timestamp=last_sync_time)
            search_filter = ("(&(objectClass=group)(whenChanged>=%s))" %
                             last_sync_time_formatted)
            search_base = GROUP_BASE_DN
        ldap_connection = ldap_connector.await_connection(
            LDAP_SERVER, LDAP_USER, LDAP_PASS)

        ldap_connection.search(
            search_base=search_base,
            search_filter=search_filter,
            attributes=ldap3.ALL_ATTRIBUTES,
        )

        parsed_last_sync_time = datetime.strptime(
            last_sync_time.split("+")[0],
            "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=timezone.utc)
        insert_to_db(
            data_dict=ldap_connection.entries,
            when_changed=parsed_last_sync_time,
            data_type=data_type,
        )