def inbound_sync_listener():
    """Initialize a delta inbound sync between the inbound queue and sawtooth."""
    LOGGER.info("Starting inbound sync listener...")

    LOGGER.info("Connecting to RethinkDB...")
    connect_to_db()
    LOGGER.info("Successfully connected to RethinkDB!")

    while True:
        try:
            queue_entry = peek_at_queue(INBOUND_QUEUE)
            LOGGER.info(
                "Received queue entry %s from outbound queue...", queue_entry["id"]
            )

            data_type = queue_entry["data_type"]
            LOGGER.info("Putting %s into Sawtooth...", data_type)
            # TODO: Validate queue_entry.
            # TODO: Transform or reject invalid entries.
            # TODO: Get queue_entry object from NEXT state table.
            # TODO: Update object or create if it doesn't exist.
            LOGGER.debug(queue_entry)

            LOGGER.info("Putting queue entry into changelog...")
            put_entry_changelog(queue_entry, DIRECTION)

            LOGGER.info("Deleting queue entry from outbound queue...")
            entry_id = queue_entry["id"]
            delete_entry_queue(entry_id, INBOUND_QUEUE)
        except ExpectedError as err:
            time.sleep(DELAY)
        except Exception as err:
            LOGGER.exception(err)
            raise err
def fetch_ldap_data(data_type):
    """
        Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
        inserts data into RethinkDB, and initiates a new thread for a delta sync for data_type.
    """
    connect_to_db()

    if data_type == "user":
        search_filter = LDAP_FILTER_USER
    elif data_type == "group":
        search_filter = LDAP_FILTER_GROUP

    server = Server(LDAP_SERVER, get_info=ALL)
    conn = Connection(server, user=LDAP_USER, password=LDAP_PASS)
    if not conn.bind():
        LOGGER.error(
            "Error connecting to LDAP server %s : %s", LDAP_SERVER, conn.result
        )
    conn.search(
        search_base=LDAP_DC,
        search_filter=search_filter,
        attributes=ldap3.ALL_ATTRIBUTES,
    )

    insert_to_db(data_dict=conn.entries, data_type=data_type)
    sync_source = "ldap-" + data_type
    provider_id = LDAP_DC
    save_sync_time(provider_id, sync_source, "initial")
def outbound_sync_listener():
    """Initialize a delta outbound sync with Azure Active Directory."""
    LOGGER.info("Starting outbound sync listener...")

    LOGGER.info("Connecting to RethinkDB...")
    connect_to_db()
    LOGGER.info("Successfully connected to RethinkDB!")

    while True:
        try:
            queue_entry = peek_at_queue(OUTBOUND_QUEUE, TENANT_ID)
            LOGGER.info("Received queue entry %s from outbound queue...",
                        queue_entry["id"])

            data_type = queue_entry["data_type"]
            LOGGER.info("Putting %s into aad...", data_type)
            if is_entry_in_aad(queue_entry):
                update_entry_aad(queue_entry)
            else:
                create_entry_aad(queue_entry)

            LOGGER.info("Putting queue entry into changelog...")
            put_entry_changelog(queue_entry, "outbound")

            LOGGER.info("Deleting queue entry from outbound queue...")
            entry_id = queue_entry["id"]
            delete_entry_queue(entry_id, OUTBOUND_QUEUE)
        except ExpectedError as err:
            LOGGER.debug(
                ("%s Repolling after %s seconds...", err.__str__, DELAY))
            time.sleep(DELAY)
        except Exception as err:
            LOGGER.exception(err)
            raise err
Esempio n. 4
0
def fetch_ldap_data():
    """
        Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
        and inserts data into RethinkDB.
    """
    LOGGER.debug("Connecting to RethinkDB...")
    connect_to_db()
    LOGGER.debug("Successfully connected to RethinkDB")

    last_sync = (r.table("sync_tracker").filter({
        "provider_id": LDAP_DC
    }).max("timestamp").coerce_to("object").run())

    last_sync_time = last_sync["timestamp"]
    last_sync_time_formatted = to_date_ldap_query(
        rethink_timestamp=last_sync_time)
    search_filter = (
        "(&(|(objectClass=person)(objectClass=group))(whenChanged>=%s))" %
        last_sync_time_formatted)
    server = Server(LDAP_SERVER, get_info=ALL)
    ldap_conn = Connection(server, user=LDAP_USER, password=LDAP_PASS)
    if not ldap_conn.bind():
        LOGGER.error("Error connecting to LDAP server %s : %s", LDAP_SERVER,
                     ldap_conn.result)
    ldap_conn.search(
        search_base=LDAP_DC,
        search_filter=search_filter,
        attributes=ldap3.ALL_ATTRIBUTES,
    )

    parsed_last_sync_time = datetime.strptime(
        last_sync_time.split("+")[0],
        "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=timezone.utc)
    insert_to_db(data_dict=ldap_conn.entries,
                 when_changed=parsed_last_sync_time)
Esempio n. 5
0
def initialize_aad_sync():
    """Initialize a sync with Azure Active Directory."""
    LOGGER.info("connecting to RethinkDB...")
    connect_to_db()
    LOGGER.info("Successfully connected to RethinkDB!")
    provider_id = TENANT_ID

    db_user_payload = check_last_sync("azure-user", "initial")
    if not db_user_payload:
        LOGGER.info(
            "No initial AAD user sync was found. Starting initial AAD user sync now."
        )

        LOGGER.info("Getting Users...")
        users = fetch_users()
        if users:
            insert_user_to_db(users)
            while "@odata.nextLink" in users:
                users = fetch_next_payload(users["@odata.nextLink"])
                if users:
                    insert_user_to_db(users)
                else:
                    break
            save_sync_time(provider_id, "azure-user", "initial")
            LOGGER.info("Initial user upload complete :)")
        else:
            LOGGER.info(
                "An error occurred when uploading users.  Please check the logs."
            )

    db_group_payload = check_last_sync("azure-group", "initial")
    if not db_group_payload:
        LOGGER.info(
            "No initial AAD group sync was found. Starting initial AAD group sync now."
        )
        LOGGER.info("Getting Groups with Members...")
        groups = fetch_groups_with_members()
        if groups:
            insert_group_to_db(groups)
            while "@odata.nextLink" in groups:
                groups = fetch_next_payload(groups["@odata.nextLink"])
                if groups:
                    insert_group_to_db(groups)
                else:
                    break
            save_sync_time(provider_id, "azure-group", "initial")
            LOGGER.info("Initial group upload complete :)")
        else:
            LOGGER.info(
                "An error occurred when uploading groups.  Please check the logs."
            )

    if db_group_payload and db_user_payload:
        LOGGER.info("The initial sync has already been run.")
def initialize_ldap_sync():
    """
        Checks if LDAP initial syncs has been ran. If not, run initial sync for both ldap users
        and groups. If initial syncs have been completed, restart the inbound delta syncs.
    """

    if LDAP_DC:
        connect_to_db()

        user_sync_completed = False
        group_sync_completed = False
        db_user_payload = check_last_sync("ldap-user", "initial")
        if not db_user_payload:
            LOGGER.info(
                "No initial AD user sync was found. Starting initial AD user sync now."
            )

            LOGGER.info("Getting AD Users...")
            fetch_ldap_data(data_type="user")

            LOGGER.info("Initial AD user upload completed.")
            user_sync_completed = True

        db_group_payload = check_last_sync("ldap-group", "initial")
        if not db_group_payload:
            LOGGER.info(
                "No initial AD group sync was found. Starting initial AD group sync now."
            )
            LOGGER.info("Getting Groups with Members...")
            fetch_ldap_data(data_type="group")

            LOGGER.info("Initial AD group upload completed.")
            group_sync_completed = True

        if user_sync_completed and group_sync_completed:
            initiate_delta_sync()
        else:
            LOGGER.info(
                "Initial syncs did not complete successfully, LDAP delta sync will not start."
            )

        if db_user_payload and db_group_payload:
            LOGGER.info("The LDAP initial sync has already been run.")
            initiate_delta_sync()
    else:
        LOGGER.info("LDAP Domain Controller is not provided, skipping LDAP sync.")
Esempio n. 7
0
def process_ldap_outbound():
    """
        While there are items in OUTBOUND_QUEUE table, get earliest entry in table.
        Check if entry is in ldap and process the entry accordingly. Once processed,
        record entry in changelog and delete the entry from the outbound queue.
    """
    connect_to_db()
    ldap_conn = connect_to_ldap()

    while not r.table(OUTBOUND_QUEUE).filter({"provider_id": LDAP_DC}).is_empty().run():
        queue_entry = peek_at_queue(table_name=OUTBOUND_QUEUE, provider_id=LDAP_DC)
        if is_entry_in_ad(queue_entry, ldap_conn):
            update_entry_ldap(queue_entry, ldap_conn)
        else:
            create_entry_ldap(queue_entry, ldap_conn)
        put_entry_changelog(queue_entry, DIRECTION)
        queue_entry_id = queue_entry["id"]
        delete_entry_queue(queue_entry_id, OUTBOUND_QUEUE)
Esempio n. 8
0
def fetch_ldap_data(sync_type, data_type):
    """
        Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
        and inserts data into RethinkDB.
    """
    connect_to_db()

    if sync_type == "delta":
        last_sync = (
            r.table("sync_tracker")
            .filter({"source": "ldap-" + data_type})
            .coerce_to("array")
            .run()
        )
        last_sync_time = ldap_payload_transformer.to_date_ldap_query(
            rethink_timestamp=last_sync[0]["timestamp"]
        )
        if data_type == "user":
            search_filter = LDAP_FILTER_USER_DELTA % last_sync_time
        elif data_type == "group":
            search_filter = LDAP_FILTER_GROUP_DELTA % last_sync_time

    elif sync_type == "initial":
        if data_type == "user":
            search_filter = LDAP_FILTER_USER
        elif data_type == "group":
            search_filter = LDAP_FILTER_GROUP

    server = Server(LDAP_SERVER, get_info=ALL)
    conn = Connection(server, user=LDAP_USER, password=LDAP_PASS)
    if not conn.bind():
        LOGGER.error(
            "Error connecting to LDAP server %s : %s", LDAP_SERVER, conn.result
        )
    conn.search(
        search_base=LDAP_DC,
        search_filter=search_filter,
        attributes=ldap3.ALL_ATTRIBUTES,
    )

    insert_to_db(data_dict=conn.entries, data_type=data_type)
    sync_source = "ldap-" + data_type
    provider_id = LDAP_DC
    save_sync_time(provider_id, sync_source, sync_type)
Esempio n. 9
0
def ldap_sync():
    """Fetches (Users | Groups) from Active Directory and inserts them into RethinkDB."""

    if LDAP_DC:
        connect_to_db()

        db_user_payload = check_last_sync("ldap-user", "initial")
        if not db_user_payload:
            LOGGER.info(
                "No initial AD user sync was found. Starting initial AD user sync now."
            )
            LOGGER.debug("Inserting AD data...")

            LOGGER.debug("Getting Users...")
            fetch_ldap_data(sync_type="initial", data_type="user")

            LOGGER.debug(
                "Initial AD user upload completed. User delta sync will occur in %s seconds.",
                str(int(DELTA_SYNC_INTERVAL_SECONDS)),
            )

        db_group_payload = check_last_sync("ldap-group", "initial")
        if not db_group_payload:
            LOGGER.info(
                "No initial AD group sync was found. Starting initial AD group sync now."
            )
            LOGGER.debug("Getting Groups with Members...")
            fetch_ldap_data(sync_type="initial", data_type="group")

            LOGGER.debug(
                "Initial AD group upload completed. Group delta sync will occur in %s seconds.",
                str(int(DELTA_SYNC_INTERVAL_SECONDS)),
            )

        if db_user_payload and db_group_payload:
            LOGGER.info("The initial sync has already been run.")
            # TODO: Recreate threads for delta syncs
    else:
        LOGGER.debug("LDAP Domain Controller is not provided, skipping LDAP sync.")
def inbound_sync_listener():
    """Initialize a delta inbound sync with Azure Active Directory."""
    while True:  # pylint: disable=too-many-nested-blocks
        provider_id = TENANT_ID
        try:
            LOGGER.info("Connecting to RethinkDB...")
            connect_to_db()
            LOGGER.info("Successfully connected to RethinkDB!")

            initial_sync_time = check_last_sync("azure-user", "initial")
            initial_sync_time = initial_sync_time[0]["timestamp"][:26]
            latest_delta_sync_time = get_last_delta_sync(provider_id, "delta")
            if latest_delta_sync_time:
                latest_delta_sync_time = latest_delta_sync_time[
                    "timestamp"][:26]
                previous_sync_datetime = datetime.strptime(
                    latest_delta_sync_time, "%Y-%m-%dT%H:%M:%S.%f")
            else:
                previous_sync_datetime = datetime.strptime(
                    initial_sync_time, "%Y-%m-%dT%H:%M:%S.%f")
            # Create an eventhub client.
            client = EventHubClient(ADDRESS,
                                    debug=False,
                                    username=USER,
                                    password=KEY)
            try:
                LOGGER.info("Opening connection to EventHub...")
                # Set prefetch to 1, we only want one event at a time.
                receiver = client.add_receiver(CONSUMER_GROUP,
                                               PARTITION,
                                               prefetch=1,
                                               offset=OFFSET)
                # Open the connection to the EventHub.
                client.run()
                # Get one event from EventHub.
                batch = receiver.receive(timeout=5000)
                while batch:
                    for event_data in batch:
                        # Get the event as a json record from the batch of events.
                        event_json = event_data.body_as_json()
                        record = event_json["records"][0]
                        operation_name = record["operationName"]
                        time = record["time"][:26]
                        record_timestamp = datetime.strptime(
                            time, "%Y-%m-%dT%H:%M:%S.%f")
                        # Only process events logged after the previous initial/delta sync.
                        # Only grab events concerning User or Group objects.
                        if (operation_name in VALID_OPERATIONS
                                and record_timestamp > previous_sync_datetime):
                            data = {
                                "initated_by":
                                record["properties"]["initiatedBy"],
                                "target_resources":
                                record["properties"]["targetResources"],
                                "operation_name":
                                operation_name,
                                "resultType":
                                record["resultType"],
                            }
                            LOGGER.info("Operation name: %s", operation_name)
                            LOGGER.info("Record to Change: %s", record)
                            record_timestamp_utc = record_timestamp.isoformat()
                            insert_change_to_db(data, record_timestamp_utc)
                            sync_source = "azure-" + VALID_OPERATIONS[
                                operation_name]
                            provider_id = TENANT_ID
                            save_sync_time(provider_id, sync_source, "delta",
                                           record_timestamp_utc)
                            previous_sync_datetime = record_timestamp
                    batch = receiver.receive(timeout=50)
                LOGGER.info("Closing connection to EventHub...")
                # Close the connection to the EventHub.
                client.stop()
            except KeyboardInterrupt:
                pass
            finally:
                client.stop()
        except ExpectedError as err:
            LOGGER.debug(
                ("%s Repolling after %s seconds...", err.__str__, DELAY))
            time.sleep(DELAY)
        except Exception as err:
            LOGGER.exception(err)
            raise err