def ldap_outbound_listener(): """Initialize LDAP delta outbound sync with Active Directory.""" LOGGER.info("Starting LDAP outbound sync listener...") while True: try: queue_entry = peek_at_queue("outbound_queue", LDAP_DC) while queue_entry is None: queue_entry = peek_at_queue("outbound_queue", LDAP_DC) time.sleep(LISTENER_POLLING_DELAY) LOGGER.info( "Received queue entry %s from outbound queue...", queue_entry["id"] ) data_type = queue_entry["data_type"] LOGGER.debug("Putting %s into ad...", data_type) try: LOGGER.debug( "Processing LDAP outbound_queue entry: %s", str(queue_entry) ) ldap_connection = ldap_connector.await_connection( LDAP_SERVER, LDAP_USER, LDAP_PASS ) successful_ldap_write = process_outbound_entry( queue_entry, ldap_connection ) ldap_connection.unbind() if successful_ldap_write: update_outbound_entry_status(queue_entry["id"]) LOGGER.debug("Putting queue entry into changelog...") put_entry_changelog(queue_entry, "outbound") else: LOGGER.error( "No changes were made in AD - deleting entry from outbound queue..." ) delete_entry_queue(queue_entry["id"], "outbound_queue") except ValidationException as err: LOGGER.warning( "Outbound payload failed validation, deleting entry from outbound queue..." ) LOGGER.warning(err) delete_entry_queue(queue_entry["id"], "outbound_queue") except LDAPSessionTerminatedByServerError: LOGGER.warning( "Ldap connection was terminated by the server. Attempting to reconnect..." ) ldap_connection = ldap_connector.await_connection( LDAP_SERVER, LDAP_USER, LDAP_PASS )
def ldap_outbound_listener(): """Initialize LDAP delta outbound sync with Active Directory.""" LOGGER.info("Starting outbound sync listener...") LOGGER.info("Connecting to RethinkDb...") connect_to_db() LOGGER.info("..connected to RethinkDb") ldap_connection = ldap_connector.await_connection(LDAP_SERVER, LDAP_USER, LDAP_PASS) while True: try: queue_entry = peek_at_queue("outbound_queue", LDAP_DC) while queue_entry is None: queue_entry = peek_at_queue("outbound_queue", LDAP_DC) time.sleep(LISTENER_POLLING_DELAY) LOGGER.info( "Received queue entry %s from outbound queue...", queue_entry["id"] ) LOGGER.debug("Putting queue entry into changelog...") put_entry_changelog(queue_entry, "outbound") data_type = queue_entry["data_type"] LOGGER.debug("Putting %s into ad...", data_type) try: if is_entry_in_ad(queue_entry, ldap_connection): update_entry_ldap(queue_entry, ldap_connection) else: create_entry_ldap(queue_entry, ldap_connection) except ValidationException as err: LOGGER.warning("Outbound payload failed validation") LOGGER.warning(err) LOGGER.debug("Deleting queue entry from outbound queue...") delete_entry_queue(queue_entry["id"], "outbound_queue") except LDAPSessionTerminatedByServerError: LOGGER.warning( "Ldap connection was terminated by the server. Attempting to reconnect..." ) ldap_connection = ldap_connector.await_connection( LDAP_SERVER, LDAP_USER, LDAP_PASS )
def fetch_ldap_data(): """ Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync, and inserts data into RethinkDB. """ LOGGER.debug("Connecting to RethinkDB...") connect_to_db() LOGGER.debug("Successfully connected to RethinkDB") last_sync = (r.table("sync_tracker").filter({ "provider_id": LDAP_DC }).max("timestamp").coerce_to("object").run()) last_sync_time = last_sync["timestamp"] last_sync_time_formatted = to_date_ldap_query( rethink_timestamp=last_sync_time) search_filter = ( "(&(|(objectClass=person)(objectClass=group))(whenChanged>=%s))" % last_sync_time_formatted) ldap_connection = ldap_connector.await_connection(LDAP_SERVER, LDAP_USER, LDAP_PASS) ldap_connection.search( search_base=LDAP_DC, search_filter=search_filter, attributes=ldap3.ALL_ATTRIBUTES, ) parsed_last_sync_time = datetime.strptime( last_sync_time.split("+")[0], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=timezone.utc) insert_to_db(data_dict=ldap_connection.entries, when_changed=parsed_last_sync_time)
def fetch_ldap_deletions(): """ Searches LDAP provider for users & groups that were deleted from LDAP. If any were deleted, inserts distinguished names of deleted into the inbound_queue table. """ LOGGER.info("Fetching LDAP deleted entries...") conn = connect_to_db() for data_type in ["user", "group"]: if data_type == "user": search_filter = "(objectClass=person)" search_base = USER_BASE_DN existing_records = list( r.table("users").get_field("remote_id").run(conn)) else: search_filter = "(objectClass=group)" search_base = GROUP_BASE_DN existing_records = list( r.table("roles").get_field("remote_id").run(conn)) ldap_connection = ldap_connector.await_connection( LDAP_SERVER, LDAP_USER, LDAP_PASS) search_parameters = { "search_base": search_base, "search_filter": search_filter, "attributes": ["distinguishedName"], "paged_size": LDAP_SEARCH_PAGE_SIZE, } while True: ldap_connection.search(**search_parameters) # For each user/group in AD, remove the user/group from existing_records. # Remaining entries in existing_records were deleted from AD. for entry in ldap_connection.entries: if entry.distinguishedName.value in existing_records: existing_records.remove(entry.distinguishedName.value) # 1.2.840.113556.1.4.319 is the OID/extended control for PagedResults cookie = ldap_connection.result["controls"][ "1.2.840.113556.1.4.319"]["value"]["cookie"] if cookie: search_parameters["paged_cookie"] = cookie else: break if existing_records: LOGGER.info( "Found %s deleted entries. Inserting deleted " "AD %s(s) into inbound queue.", str(len(existing_records)), data_type, ) LOGGER.debug(existing_records) insert_deleted_entries(existing_records, data_type + "_deleted") conn.close() LOGGER.info("Fetching LDAP deleted entries completed...")
def fetch_ldap_data(data_type): """ Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync, inserts data into RethinkDB, and initiates a new thread for a delta sync for data_type. """ if data_type == "user": search_filter = "(objectClass=person)" search_base = USER_BASE_DN elif data_type == "group": search_filter = "(objectClass=group)" search_base = GROUP_BASE_DN ldap_connection = ldap_connector.await_connection(LDAP_SERVER, LDAP_USER, LDAP_PASS) search_parameters = { "search_base": search_base, "search_filter": search_filter, "attributes": ldap3.ALL_ATTRIBUTES, "paged_size": LDAP_SEARCH_PAGE_SIZE, } entry_count = 0 LOGGER.info("Importing %ss..", data_type) conn = connect_to_db() while True: start_time = time.clock() ldap_connection.search(**search_parameters) record_count = len(ldap_connection.entries) LOGGER.info( "Got %s entries in %s seconds.", record_count, "%.3f" % (time.clock() - start_time), ) for entry in ldap_connection.entries: entry_count = entry_count + 1 insert_to_db(entry=entry, data_type=data_type, conn=conn) # 1.2.840.113556.1.4.319 is the OID/extended control for PagedResults cookie = ldap_connection.result["controls"]["1.2.840.113556.1.4.319"][ "value"]["cookie"] if cookie: search_parameters["paged_cookie"] = cookie else: LOGGER.info("Imported %s entries from Active Directory", entry_count) break sync_source = "ldap-" + data_type save_sync_time(LDAP_DC, sync_source, "initial", conn) conn.close()
def fetch_ldap_changes(): """ Call to get entries for (Users & Groups) in Active Directory, saves the time of the sync, and inserts data into RethinkDB. """ LOGGER.debug("Connecting to RethinkDB...") conn = connect_to_db() LOGGER.debug("Successfully connected to RethinkDB") for data_type in ["user", "group"]: if data_type == "user": ldap_source = "ldap-user" search_base = USER_BASE_DN object_class = "person" else: ldap_source = "ldap-group" search_base = GROUP_BASE_DN object_class = "group" last_sync = (r.table("sync_tracker").filter({ "provider_id": LDAP_DC, "source": ldap_source }).max("timestamp").coerce_to("object").run(conn)) last_sync_time = last_sync["timestamp"] last_sync_time_formatted = to_date_ldap_query( rethink_timestamp=last_sync_time) search_filter = "(&(objectClass=%s)(whenChanged>=%s)(!(whenChanged=%s)))" % ( object_class, last_sync_time_formatted, last_sync_time_formatted, ) ldap_connection = ldap_connector.await_connection( LDAP_SERVER, LDAP_USER, LDAP_PASS) parsed_last_sync_time = datetime.strptime( last_sync_time.split("+")[0], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=timezone.utc) search_parameters = { "search_base": search_base, "search_filter": search_filter, "attributes": ldap3.ALL_ATTRIBUTES, "paged_size": LDAP_SEARCH_PAGE_SIZE, } entry_count = 0 LOGGER.info("Importing %ss..", data_type) while True: start_time = time.clock() ldap_connection.search(**search_parameters) record_count = len(ldap_connection.entries) LOGGER.info( "Got %s entries in %s seconds.", record_count, "%.3f" % (time.clock() - start_time), ) entry_count = entry_count + len(ldap_connection.entries) insert_updated_entries( data_dict=ldap_connection.entries, when_changed=parsed_last_sync_time, data_type=data_type, ) # 1.2.840.113556.1.4.319 is the OID/extended control for PagedResults cookie = ldap_connection.result["controls"][ "1.2.840.113556.1.4.319"]["value"]["cookie"] if cookie: search_parameters["paged_cookie"] = cookie else: LOGGER.info("Imported %s entries from Active Directory", entry_count) break conn.close()