def test_role_data_with_empty_lst(): """Test that a group list stays a list when there is no value in it.""" result = inbound_group_filter({ "id": "123-456-abs3", "members": [] }, "azure") assert result["members"] == []
def insert_updated_entries(data_dict, when_changed, data_type): """Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer.""" insertion_counter = 0 conn = connect_to_db() for entry in data_dict: if entry.whenChanged.value > when_changed: if data_type == "user": standardized_entry = inbound_user_filter(entry, "ldap") else: standardized_entry = inbound_group_filter(entry, "ldap") entry_modified_timestamp = entry.whenChanged.value.strftime( "%Y-%m-%dT%H:%M:%S.%f+00:00") inbound_entry = { "data": standardized_entry, "data_type": data_type, "sync_type": "delta", "timestamp": entry_modified_timestamp, "provider_id": LDAP_DC, } LOGGER.debug( "Inserting LDAP %s into inbound queue: %s", data_type, standardized_entry["remote_id"], ) r.table("inbound_queue").insert(inbound_entry).run(conn) sync_source = "ldap-" + data_type provider_id = LDAP_DC save_sync_time(provider_id, sync_source, "delta", entry_modified_timestamp) insertion_counter += 1 conn.close() LOGGER.info("Inserted %s records into inbound_queue.", insertion_counter)
def test_role_data_when_null(): """Test that a group list stays null when it is None.""" result = inbound_group_filter({ "id": "123-456-abs3", "members": None }, "azure") assert result["members"] is None
def test_inbound_group_filter(): """Test the inbound group filter for azure transforms and returns a group dict.""" result = inbound_group_filter({"id": 1234}, "azure") assert isinstance(result, dict) is True assert result["role_id"] == 1234 assert "id" not in result assert result["classification"] is None
def insert_to_db(entry, data_type): """Insert user or group individually to RethinkDB from dict of data and begins delta sync timer.""" if data_type == "user": standard_entry = inbound_user_filter(entry, "ldap") elif data_type == "group": standard_entry = inbound_group_filter(entry, "ldap") else: LOGGER.warning("unsupported data type: %s", data_type) return inbound_entry = { "data": standard_entry, "data_type": data_type, "sync_type": "initial", "timestamp": r.now(), "provider_id": LDAP_DC, } conn = connect_to_db() LOGGER.debug( "Inserting LDAP %s into inbound queue: %s", data_type, standard_entry["remote_id"], ) r.table("inbound_queue").insert(inbound_entry).run(conn) conn.close()
def insert_to_db(data_dict, when_changed): """Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer.""" insertion_counter = 0 for entry in data_dict: if entry.whenChanged.value > when_changed: entry_data = json.loads(entry.entry_to_json())["attributes"] if "person" in entry.objectClass.value: data_type = "user" standardized_entry = inbound_user_filter(entry_data, "ldap") else: data_type = "group" standardized_entry = inbound_group_filter(entry_data, "ldap") entry_modified_timestamp = entry.whenChanged.value.strftime( "%Y-%m-%dT%H:%M:%S.%f+00:00") inbound_entry = { "data": standardized_entry, "data_type": data_type, "sync_type": "delta", "timestamp": entry_modified_timestamp, "provider_id": LDAP_DC, } r.table("inbound_queue").insert(inbound_entry).run() sync_source = "ldap-" + data_type provider_id = LDAP_DC save_sync_time(provider_id, sync_source, "delta", entry_modified_timestamp) insertion_counter += 1 LOGGER.info("Inserted %s records into inbound_queue.", insertion_counter)
def test_ldap_group_changes( group_name, outbound_group_entry, expected_ldap_group, expected_write, ldap_connection, ): """ Test writing to LDAP from outbound_queue entries in the following scenarios: 1) Add a new LDAP group member, which should write successfully 2) Process the same outbound_queue payload again, which should not write sucessfully Args: group_name: (str) Name of the LDAP group outbound_group_entry: (dict) A outbound_queue table entry. The mandatory keys in the dict are: { "data": (dict containing current state of LDAP object) "data_type": (str) } expected_ldap_group: (dict) expected_write: (bool) Whether a write to LDAP occurred """ successful_write = process_outbound_entry(outbound_group_entry, ldap_connection) # Fetch role from LDAP and standardize the role to be compared against expected_group_payload fake_group = get_fake_group(ldap_connection, group_name) for entry in fake_group: standardized_fake_group = inbound_group_filter(entry, "ldap") standardized_fake_group.pop("created_date") assert standardized_fake_group == expected_ldap_group assert successful_write == expected_write
def insert_to_db(data_dict, data_type): """Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer.""" for entry in data_dict: entry_to_insert = {} entry_json = json.loads(entry.entry_to_json()) entry_attributes = entry_json["attributes"] for attribute in entry_attributes: if len(entry_attributes[attribute]) > 1: entry_to_insert[attribute] = entry_attributes[attribute] else: entry_to_insert[attribute] = entry_attributes[attribute][0] if data_type == "user": standardized_entry = inbound_user_filter(entry_to_insert, "ldap") elif data_type == "group": standardized_entry = inbound_group_filter(entry_to_insert, "ldap") inbound_entry = { "data": standardized_entry, "data_type": data_type, "sync_type": "initial", "timestamp": datetime.now().replace(tzinfo=timezone.utc).isoformat(), "provider_id": LDAP_DC, } r.table("inbound_queue").insert(inbound_entry).run() LOGGER.info( "Inserted %s %s records into inbound_queue.", str(len(data_dict)), data_type )
def test_group_data_type_correct(): """Test that a group list stays a list when a single value is in it.""" result = inbound_group_filter( { "id": "123-456-abs3", "members": ["123-456-abs3"] }, "azure") assert result["members"] == ["123-456-abs3"]
def insert_group_to_db(groups_dict): """Insert groups individually to rethinkdb from dict of groups""" for group in groups_dict["value"]: owner = fetch_group_owner(group["id"]) if owner and "error" not in owner: group["owners"] = get_ids_from_list_of_dicts(owner["value"]) else: group["owners"] = [] group["members"] = get_ids_from_list_of_dicts(group["members"]) standardized_group = inbound_group_filter(group, "azure") inbound_entry = { "data": standardized_group, "data_type": "group", "sync_type": "initial", "timestamp": dt.now().isoformat(), "provider_id": TENANT_ID, } r.table("queue_inbound").insert(inbound_entry).run()
def insert_to_db(entry, data_type): """Insert user or group individually to RethinkDB from dict of data and begins delta sync timer.""" if data_type == "user": standard_entry = inbound_user_filter(entry, "ldap") elif data_type == "group": standard_entry = inbound_group_filter(entry, "ldap") else: LOGGER.warning("unsupported data type: %s", data_type) return inbound_entry = { "data": standard_entry, "data_type": data_type, "sync_type": "initial", "timestamp": r.now(), "provider_id": LDAP_DC, } add_transaction(inbound_entry) r.table("inbound_queue").insert(inbound_entry).run()
def insert_to_db(data_dict, when_changed, data_type): """Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer.""" insertion_counter = 0 conn = connect_to_db() for entry in data_dict: entry_to_insert = {} entry_json = json.loads(entry.entry_to_json()) entry_attributes = entry_json["attributes"] for attribute in entry_attributes: if len(entry_attributes[attribute]) > 1: entry_to_insert[attribute] = entry_attributes[attribute] else: entry_to_insert[attribute] = entry_attributes[attribute][0] if entry.whenChanged.value > when_changed: if data_type == "user": standardized_entry = inbound_user_filter( entry_to_insert, "ldap") else: standardized_entry = inbound_group_filter( entry_to_insert, "ldap") entry_modified_timestamp = entry.whenChanged.value.strftime( "%Y-%m-%dT%H:%M:%S.%f+00:00") inbound_entry = { "data": standardized_entry, "data_type": data_type, "sync_type": "delta", "timestamp": entry_modified_timestamp, "provider_id": LDAP_DC, } add_transaction(inbound_entry) r.table("inbound_queue").insert(inbound_entry).run(conn) sync_source = "ldap-" + data_type provider_id = LDAP_DC save_sync_time(provider_id, sync_source, "delta", entry_modified_timestamp) insertion_counter += 1 conn.close() LOGGER.info("Inserted %s records into inbound_queue.", insertion_counter)
def insert_to_db(data_dict, data_type): """Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer.""" for entry in data_dict: entry_data = json.loads(entry.entry_to_json())["attributes"] if data_type == "user": standardized_entry = inbound_filters.inbound_user_filter(entry_data, "ldap") elif data_type == "group": standardized_entry = inbound_filters.inbound_group_filter( entry_data, "ldap" ) inbound_entry = { "data": standardized_entry, "data_type": data_type, "timestamp": datetime.now().replace(tzinfo=timezone.utc).isoformat(), "provider_id": LDAP_DC, } r.table("queue_inbound").insert(inbound_entry).run() LOGGER.info( "Inserted %s %s records into inbound_queue.", str(len(data_dict)), data_type ) Timer( DELTA_SYNC_INTERVAL_SECONDS, fetch_ldap_data, args=("delta", data_type) ).start()
def test_inbound_group_filter_bad_provider(): """Test the inbound group filter with bad provider throws error""" with pytest.raises(TypeError): inbound_group_filter({"id": 1234}, "potato")
def test_inbound_group_filter(): """Test the inbound group filter for azure transforms and returns a group dict.""" result = inbound_group_filter({"id": "123-456-abs3"}, "azure") assert isinstance(result, dict) is True assert result["remote_id"] == "123-456-abs3" assert "id" not in result