def initialize_aad_sync(): """Initialize a sync with Azure Active Directory.""" LOGGER.info("connecting to RethinkDB...") connect_to_db() LOGGER.info("Successfully connected to RethinkDB!") provider_id = TENANT_ID db_user_payload = check_last_sync("azure-user", "initial") if not db_user_payload: LOGGER.info( "No initial AAD user sync was found. Starting initial AAD user sync now." ) LOGGER.info("Getting Users...") users = fetch_users() if users: insert_user_to_db(users) while "@odata.nextLink" in users: users = fetch_next_payload(users["@odata.nextLink"]) if users: insert_user_to_db(users) else: break save_sync_time(provider_id, "azure-user", "initial") LOGGER.info("Initial user upload complete :)") else: LOGGER.info( "An error occurred when uploading users. Please check the logs." ) db_group_payload = check_last_sync("azure-group", "initial") if not db_group_payload: LOGGER.info( "No initial AAD group sync was found. Starting initial AAD group sync now." ) LOGGER.info("Getting Groups with Members...") groups = fetch_groups_with_members() if groups: insert_group_to_db(groups) while "@odata.nextLink" in groups: groups = fetch_next_payload(groups["@odata.nextLink"]) if groups: insert_group_to_db(groups) else: break save_sync_time(provider_id, "azure-group", "initial") LOGGER.info("Initial group upload complete :)") else: LOGGER.info( "An error occurred when uploading groups. Please check the logs." ) if db_group_payload and db_user_payload: LOGGER.info("The initial sync has already been run.")
def initialize_ldap_sync(): """ Checks if LDAP initial syncs has been ran. If not, run initial sync for both ldap users and groups. If initial syncs have been completed, restart the inbound delta syncs. """ if LDAP_DC: connect_to_db() user_sync_completed = False group_sync_completed = False db_user_payload = check_last_sync("ldap-user", "initial") if not db_user_payload: LOGGER.info( "No initial AD user sync was found. Starting initial AD user sync now." ) LOGGER.info("Getting AD Users...") fetch_ldap_data(data_type="user") LOGGER.info("Initial AD user upload completed.") user_sync_completed = True db_group_payload = check_last_sync("ldap-group", "initial") if not db_group_payload: LOGGER.info( "No initial AD group sync was found. Starting initial AD group sync now." ) LOGGER.info("Getting Groups with Members...") fetch_ldap_data(data_type="group") LOGGER.info("Initial AD group upload completed.") group_sync_completed = True if user_sync_completed and group_sync_completed: initiate_delta_sync() else: LOGGER.info( "Initial syncs did not complete successfully, LDAP delta sync will not start." ) if db_user_payload and db_group_payload: LOGGER.info("The LDAP initial sync has already been run.") initiate_delta_sync() else: LOGGER.info("LDAP Domain Controller is not provided, skipping LDAP sync.")
def initialize_ldap_sync(): """ Checks if LDAP initial syncs has been ran. If not, run initial sync for both ldap users and groups. If initial syncs have been completed, restart the inbound delta syncs. """ if not LDAP_DC: LOGGER.info( "Ldap Domain Controller is not provided, skipping Ldap sync.") elif not ldap_connector.can_connect_to_ldap(LDAP_SERVER, LDAP_USER, LDAP_PASS): LOGGER.info("Ldap Connection failed. Skipping Ldap sync.") else: connect_to_db() # Check to see if User Sync has occurred. If not - Sync db_user_payload = check_last_sync("ldap-user", "initial") if not db_user_payload: LOGGER.info( "No initial AD user sync was found. Starting initial AD user sync now." ) LOGGER.info("Getting AD Users...") fetch_ldap_data(data_type="user") LOGGER.debug("Initial AD user upload completed.") # Check to see if Group Sync has occurred. If not - Sync db_group_payload = check_last_sync("ldap-group", "initial") if not db_group_payload: LOGGER.debug( "No initial AD group sync was found. Starting initial AD group sync now." ) LOGGER.debug("Getting Groups with Members...") fetch_ldap_data(data_type="group") LOGGER.debug("Initial AD group upload completed.") if db_user_payload and db_group_payload: LOGGER.debug("The LDAP initial sync has already been run.") # Start the inbound delta sync initiate_delta_sync()
def ldap_sync(): """Fetches (Users | Groups) from Active Directory and inserts them into RethinkDB.""" if LDAP_DC: connect_to_db() db_user_payload = check_last_sync("ldap-user", "initial") if not db_user_payload: LOGGER.info( "No initial AD user sync was found. Starting initial AD user sync now." ) LOGGER.debug("Inserting AD data...") LOGGER.debug("Getting Users...") fetch_ldap_data(sync_type="initial", data_type="user") LOGGER.debug( "Initial AD user upload completed. User delta sync will occur in %s seconds.", str(int(DELTA_SYNC_INTERVAL_SECONDS)), ) db_group_payload = check_last_sync("ldap-group", "initial") if not db_group_payload: LOGGER.info( "No initial AD group sync was found. Starting initial AD group sync now." ) LOGGER.debug("Getting Groups with Members...") fetch_ldap_data(sync_type="initial", data_type="group") LOGGER.debug( "Initial AD group upload completed. Group delta sync will occur in %s seconds.", str(int(DELTA_SYNC_INTERVAL_SECONDS)), ) if db_user_payload and db_group_payload: LOGGER.info("The initial sync has already been run.") # TODO: Recreate threads for delta syncs else: LOGGER.debug("LDAP Domain Controller is not provided, skipping LDAP sync.")
def inbound_sync_listener(): """Initialize a delta inbound sync with Azure Active Directory.""" while True: # pylint: disable=too-many-nested-blocks provider_id = TENANT_ID try: initial_sync_time = check_last_sync("azure-user", "initial") LOGGER.info(initial_sync_time) LOGGER.info("This is your initial sync time") initial_sync_time = initial_sync_time["timestamp"][:26] latest_delta_sync_time = get_last_delta_sync(provider_id, "delta") if latest_delta_sync_time: latest_delta_sync_time = latest_delta_sync_time[ "timestamp"][:26] previous_sync_datetime = datetime.strptime( latest_delta_sync_time, "%Y-%m-%dT%H:%M:%S.%f") else: previous_sync_datetime = datetime.strptime( initial_sync_time, "%Y-%m-%dT%H:%M:%S.%f") # Create an eventhub client. LOGGER.info(ADDRESS) client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY) try: LOGGER.info("Opening connection to EventHub...") # Set prefetch to 1, we only want one event at a time. receiver = client.add_receiver(CONSUMER_GROUP, PARTITION, prefetch=1, offset=OFFSET) # Open the connection to the EventHub. client.run() # Get one event from EventHub. batch = receiver.receive(timeout=5000) while batch: for event_data in batch: # Get the event as a json record from the batch of events. event_json = event_data.body_as_json() record = event_json["records"][0] operation_name = record["operationName"] time = record["time"][:26] record_timestamp = datetime.strptime( time, "%Y-%m-%dT%H:%M:%S.%f") # Only process events logged after the previous initial/delta sync. # Only grab events concerning User or Group objects. if (operation_name in VALID_OPERATIONS and record_timestamp > previous_sync_datetime): data = { "initated_by": record["properties"]["initiatedBy"], "target_resources": record["properties"]["targetResources"], "operation_name": operation_name, "resultType": record["resultType"], } LOGGER.info("Operation name: %s", operation_name) LOGGER.info("Record to Change: %s", record) record_timestamp_utc = record_timestamp.isoformat() insert_change_to_db(data, record_timestamp_utc) sync_source = "azure-" + VALID_OPERATIONS[ operation_name] provider_id = TENANT_ID conn = connect_to_db() save_sync_time( provider_id, sync_source, "delta", conn, record_timestamp_utc, ) conn.close() previous_sync_datetime = record_timestamp batch = receiver.receive(timeout=50) LOGGER.info("Closing connection to EventHub...") # Close the connection to the EventHub. client.stop() except KeyboardInterrupt: pass finally: client.stop() except ExpectedError as err: LOGGER.debug(( "%s Repolling after %s seconds...", err.__str__, LISTENER_POLLING_DELAY, )) time.sleep(LISTENER_POLLING_DELAY) except Exception as err: LOGGER.exception(err) raise err