コード例 #1
0
    def it_should_match_the_record(
        test_mssql_db: Tuple[MssqlLmsOperations, Connection]
    ):
        adapter, connection = test_mssql_db

        # arrange - note csv file has only B123456 from BestLMS
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)
        insert_record(connection, "B123456", SOURCE_SYSTEM)
        insert_record(connection, "F234567", "FirstLMS")

        # act
        run_loader(main_arguments(adapter, CSV_PATH))

        # assert - records are unchanged
        LMSSystemActivity = connection.execute(
            "SELECT SourceSystem, SourceSystemIdentifier, DeletedAt from lms.LMSSystemActivity"
        ).fetchall()
        assert len(LMSSystemActivity) == 2
        assert [SOURCE_SYSTEM, "FirstLMS"] == [
            x["SourceSystem"] for x in LMSSystemActivity
        ]
        assert ["B123456", "F234567"] == [
            x["SourceSystemIdentifier"] for x in LMSSystemActivity
        ]
        assert [None, None] == [x["DeletedAt"] for x in LMSSystemActivity]
    def it_should_match_the_record(test_mssql_db: Tuple[MssqlLmsOperations,
                                                        Connection]):
        adapter, connection = test_mssql_db
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)
        insert_user(connection, "U123456", "FirstLMS", 2)

        insert_section(connection, "B098765", SOURCE_SYSTEM, 1)
        insert_section(connection, "F098765", "FirstLMS", 2)

        insert_record(connection, "B123456", SOURCE_SYSTEM, 1, 1)
        insert_record(connection, "F234567", "FirstLMS", 2, 2)

        # act
        run_loader(main_arguments(adapter, CSV_PATH))

        # assert - records are unchanged
        LMSUserLMSSectionAssociation = connection.execute(
            "SELECT SourceSystem, SourceSystemIdentifier, DeletedAt from lms.LMSUserLMSSectionAssociation"
        ).fetchall()
        assert len(LMSUserLMSSectionAssociation) == 2
        assert [SOURCE_SYSTEM, "FirstLMS"
                ] == [x["SourceSystem"] for x in LMSUserLMSSectionAssociation]
        assert ["B123456", "F234567"] == [
            x["SourceSystemIdentifier"] for x in LMSUserLMSSectionAssociation
        ]
        assert [None, None
                ] == [x["DeletedAt"] for x in LMSUserLMSSectionAssociation]
    def it_should_match_the_record(test_pgsql_db: Tuple[SqlLmsOperations,
                                                        Connection,
                                                        ConnectionSettings]):
        adapter, connection, settings = test_pgsql_db
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)

        insert_section(connection, "B098765", SOURCE_SYSTEM, 1)
        insert_section(connection, "B109876", SOURCE_SYSTEM, 2)

        insert_record(connection, "B123456", SOURCE_SYSTEM, 1, 1)
        insert_record(connection, "B234567", SOURCE_SYSTEM, 2, 1)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - records are unchanged
        LMSSectionActivity = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.lmssectionactivity order by sourcesystemidentifier"
        ).fetchall()
        assert len(LMSSectionActivity) == 2
        assert [SOURCE_SYSTEM, SOURCE_SYSTEM
                ] == [x["sourcesystem"] for x in LMSSectionActivity]
        assert ["B123456", "B234567"
                ] == [x["sourcesystemidentifier"] for x in LMSSectionActivity]
        assert [None, None] == [x["deletedat"] for x in LMSSectionActivity]
    def it_should_match_the_record(
        test_pgsql_db: Tuple[SqlLmsOperations, Connection, ConnectionSettings]
    ):
        adapter, connection, settings = test_pgsql_db

        # arrange - note csv file has only B123456 from BestLMS
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)
        insert_record(connection, "B123456", SOURCE_SYSTEM)
        insert_record(connection, "F234567", "FirstLMS")

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - records are unchanged
        LMSSystemActivity = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.lmssystemactivity order by sourcesystemidentifier"
        ).fetchall()
        assert len(LMSSystemActivity) == 2
        assert [SOURCE_SYSTEM, "FirstLMS"] == [
            x["sourcesystem"] for x in LMSSystemActivity
        ]
        assert ["B123456", "F234567"] == [
            x["sourcesystemidentifier"] for x in LMSSystemActivity
        ]
        assert [None, None] == [x["deletedat"] for x in LMSSystemActivity]
コード例 #5
0
    def it_should_not_soft_delete_record_from_different_source_system(
        test_pgsql_db: Tuple[SqlLmsOperations, Connection,
                             ConnectionSettings]):
        operations_adapter, connection, settings = test_pgsql_db

        # arrange - note csv file has only B123456 from BestLMS. F234567 is from
        # a different source system. Although it is missing from the file, it
        # should not be soft deleted.
        insert_user(connection, "B123456", SOURCE_SYSTEM, 99898)
        insert_user(connection, "F234567", "FirstLMS", 99899)

        # act
        run_loader(main_arguments(operations_adapter, CSV_PATH, settings))

        # assert - records are unchanged
        LMSUser = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.lmsuser order by sourcesystemidentifier"
        ).fetchall()
        assert len(LMSUser) == 2

        def get_user(source_system: str) -> dict:
            query = [x for x in LMSUser if x["sourcesystem"] == source_system]
            assert len(query) == 1, f"No record returned for {source_system}"

            return query[0]

        first = get_user("FirstLMS")
        assert "F234567" == first[
            "sourcesystemidentifier"], "First sourcesystemidentifier"
        assert first["deletedat"] is None, "First deletedat"

        first = get_user(SOURCE_SYSTEM)
        assert "B123456" == first[
            "sourcesystemidentifier"], "Best sourcesystemidentifier"
        assert first["deletedat"] is None, "Best deletedat"
コード例 #6
0
    def it_should_match_the_record(test_pgsql_db: Tuple[SqlLmsOperations,
                                                        Connection,
                                                        ConnectionSettings]):
        adapter, connection, settings = test_pgsql_db

        section_identifier_1 = 13
        insert_section(connection, "B098765", SOURCE_SYSTEM,
                       section_identifier_1)
        section_identifier_2 = 14
        insert_section(connection, "B109876", SOURCE_SYSTEM,
                       section_identifier_2)

        insert_assignment(connection, "B123456", SOURCE_SYSTEM, 11,
                          section_identifier_1)
        insert_assignment(connection, "B234567", SOURCE_SYSTEM, 12,
                          section_identifier_2)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - records are unchanged
        Assignment = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.assignment order by sourcesystemidentifier"
        ).fetchall()
        assert len(Assignment) == 2
        assert [SOURCE_SYSTEM,
                SOURCE_SYSTEM] == [x["sourcesystem"] for x in Assignment]
        assert ["B123456", "B234567"
                ] == [x["sourcesystemidentifier"] for x in Assignment]
        assert [None, None] == [x["deletedat"] for x in Assignment]
    def it_should_match_the_record(test_mssql_db: Tuple[SqlLmsOperations, Connection]):
        adapter, connection = test_mssql_db
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)

        insert_section(connection, "S098765", SOURCE_SYSTEM, 1)
        insert_section(connection, "S109876", SOURCE_SYSTEM, 2)

        insert_assignment(connection, "B098765", SOURCE_SYSTEM, 1, 1)
        insert_assignment(connection, "B109876", SOURCE_SYSTEM, 2, 2)

        insert_record(connection, "B123456", SOURCE_SYSTEM, 1, 1)
        insert_record(connection, "B234567", SOURCE_SYSTEM, 2, 1)

        # act
        run_loader(main_arguments(adapter, CSV_PATH))

        # assert - records are unchanged
        AssignmentSubmission = connection.execute(
            "SELECT SourceSystem, SourceSystemIdentifier, DeletedAt from lms.AssignmentSubmission"
        ).fetchall()
        assert len(AssignmentSubmission) == 2
        assert [SOURCE_SYSTEM, SOURCE_SYSTEM] == [
            x["SourceSystem"] for x in AssignmentSubmission
        ]
        assert ["B123456", "B234567"] == [
            x["SourceSystemIdentifier"] for x in AssignmentSubmission
        ]
        assert [None, None] == [x["DeletedAt"] for x in AssignmentSubmission]
    def it_should_soft_delete_the_record(
        test_pgsql_db: Tuple[SqlLmsOperations, Connection,
                             ConnectionSettings]):
        adapter, connection, settings = test_pgsql_db

        # arrange - note csv file has only B123456
        user_identifier = 13
        insert_user(connection, "U123456", SOURCE_SYSTEM, user_identifier)

        section_identifier = 14
        insert_section(connection, "S098765", SOURCE_SYSTEM,
                       section_identifier)

        assignment_identifier = 15
        insert_assignment(connection, "B098765", SOURCE_SYSTEM,
                          assignment_identifier, section_identifier)

        insert_record(connection, "B123456", SOURCE_SYSTEM,
                      assignment_identifier, user_identifier)
        insert_record(connection, "B234567", SOURCE_SYSTEM,
                      assignment_identifier, user_identifier)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - B234567 has been soft deleted
        AssignmentSubmission = connection.execute(
            "select sourcesystemidentifier from lms.assignmentsubmission where deletedat is not null"
        ).fetchall()
        assert len(AssignmentSubmission) == 1
        assert AssignmentSubmission[0]["sourcesystemidentifier"] == "B234567"
    def it_should_match_the_record(
        test_pgsql_db: Tuple[SqlLmsOperations, Connection, ConnectionSettings]
    ):
        adapter, connection, settings = test_pgsql_db
        user_identifier = 10
        insert_user(connection, "U123456", SOURCE_SYSTEM, user_identifier)

        section_identifier_1 = 11
        insert_section(connection, "B098765", SOURCE_SYSTEM, section_identifier_1)
        section_identifier_2 = 12
        insert_section(connection, "B109876", SOURCE_SYSTEM, section_identifier_2)

        insert_user_section_association(connection, "B123456", SOURCE_SYSTEM, 11, user_identifier, section_identifier_1)
        insert_user_section_association(connection, "B234567", SOURCE_SYSTEM, 12, user_identifier, section_identifier_2)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - records are unchanged
        LMSUserLMSSectionAssociation = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.lmsuserlmssectionassociation order by sourcesystemidentifier"
        ).fetchall()
        assert len(LMSUserLMSSectionAssociation) == 2
        assert [SOURCE_SYSTEM, SOURCE_SYSTEM] == [
            x["sourcesystem"] for x in LMSUserLMSSectionAssociation
        ]
        assert ["B123456", "B234567"] == [
            x["sourcesystemidentifier"] for x in LMSUserLMSSectionAssociation
        ]
        assert [None, None] == [x["deletedat"] for x in LMSUserLMSSectionAssociation]
コード例 #10
0
    def it_should_match_the_record(test_pgsql_db: Tuple[SqlLmsOperations,
                                                        Connection,
                                                        ConnectionSettings]):
        adapter, connection, settings = test_pgsql_db
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)
        insert_user(connection, "U123456", "FirstLMS", 2)

        insert_section(connection, "B098765", SOURCE_SYSTEM, 1)
        insert_section(connection, "F098765", "FirstLMS", 2)

        insert_user_section_association(connection, "UB123456", SOURCE_SYSTEM,
                                        1, 1, 1)
        insert_user_section_association(connection, "UF123456", SOURCE_SYSTEM,
                                        2, 2, 2)

        insert_record(connection, "B123456", SOURCE_SYSTEM, 1, 1, 1)
        insert_record(connection, "F234567", "FirstLMS", 2, 2, 2)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - records are unchanged
        LMSUserAttendanceEvent = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.lmsuserattendanceevent order by sourcesystemidentifier"
        ).fetchall()
        assert len(LMSUserAttendanceEvent) == 2
        assert [SOURCE_SYSTEM, "FirstLMS"
                ] == [x["sourcesystem"] for x in LMSUserAttendanceEvent]
        assert ["B123456", "F234567"] == [
            x["sourcesystemidentifier"] for x in LMSUserAttendanceEvent
        ]
        assert [None, None] == [x["deletedat"] for x in LMSUserAttendanceEvent]
コード例 #11
0
        def it_bubbles_up_the_error(mocker) -> None:
            # Arrange
            args_mock = MagicMock(spec=MainArguments)
            db_engine_mock = Mock()
            args_mock.get_adapter.return_value = db_engine_mock

            args_mock.csv_path = "/some/path"

            db_adapter_mock = Mock()
            db_adapter_mock.get_processed_files = Mock(
                return_value=set(["fileOne"]))
            args_mock.get_db_operations_adapter.return_value = db_adapter_mock

            migrator_mock = MagicMock(spec=migrator.migrate)
            mocker.patch("edfi_lms_ds_loader.migrator.migrate", migrator_mock)

            def __raise(csv_path) -> None:
                raise Exception("bad things")

            mocker.patch("edfi_lms_file_utils.file_reader.read_users_file",
                         side_effect=__raise)

            file_repository_mock = Mock(return_value=["fileOne", "fileThree"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_users_file_paths",
                file_repository_mock,
            )

            # Act
            with pytest.raises(Exception):
                run_loader(args_mock)
コード例 #12
0
    def it_should_soft_delete_the_record(test_mssql_db: Tuple[SqlLmsOperations,
                                                              Connection]):
        adapter, connection = test_mssql_db

        # arrange - note csv file has only B123456
        insert_record(connection, "B123456", SOURCE_SYSTEM)
        insert_record(connection, "B234567", SOURCE_SYSTEM)

        # act
        run_loader(main_arguments(adapter, CSV_PATH))

        # assert - B234567 has been soft deleted
        LMSUser = connection.execute(
            "SELECT SourceSystemIdentifier from lms.LMSUser WHERE DeletedAt IS NOT NULL"
        ).fetchall()
        assert len(LMSUser) == 1
        assert LMSUser[0]["SourceSystemIdentifier"] == "B234567"
コード例 #13
0
    def it_should_soft_delete_the_record(
        test_pgsql_db: Tuple[SqlLmsOperations, Connection,
                             ConnectionSettings]):
        operations_adapter, connection, settings = test_pgsql_db

        # arrange - note csv file has only B123456
        insert_user(connection, "B123456", SOURCE_SYSTEM, 9998)
        insert_user(connection, "B234567", SOURCE_SYSTEM, 9999)

        # act
        run_loader(main_arguments(operations_adapter, CSV_PATH, settings))

        # assert - B234567 has been soft deleted
        LMSUser = connection.execute(
            "select sourcesystemidentifier from lms.lmsuser where deletedat is not null"
        ).fetchall()
        assert len(LMSUser) == 1
        assert LMSUser[0]["sourcesystemidentifier"] == "B234567"
    def it_should_match_the_record(test_pgsql_db: Tuple[SqlLmsOperations,
                                                        Connection,
                                                        ConnectionSettings]):
        adapter, connection, settings = test_pgsql_db

        user_identifier = 11
        insert_user(connection, "U123456", SOURCE_SYSTEM, user_identifier)
        user_identifier = 12
        insert_user(connection, "U123456", "FirstLMS", user_identifier)

        section_identifier = 13
        insert_section(connection, "S098765", SOURCE_SYSTEM,
                       section_identifier)
        section_identifier = 14
        insert_section(connection, "S098765", "FirstLMS", section_identifier)

        assignment_identifier_1 = 15
        insert_assignment(connection, "B098765", SOURCE_SYSTEM,
                          assignment_identifier_1, section_identifier)
        assignment_identifier_2 = 16
        insert_assignment(connection, "F098765", SOURCE_SYSTEM,
                          assignment_identifier_2, section_identifier)

        insert_record(connection, "B123456", SOURCE_SYSTEM,
                      assignment_identifier_1, user_identifier)
        insert_record(connection, "F234567", "FirstLMS",
                      assignment_identifier_2, user_identifier)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - records are unchanged
        AssignmentSubmission = connection.execute(
            "select sourcesystem, sourcesystemidentifier, deletedat from lms.assignmentsubmission order by sourcesystemidentifier"
        ).fetchall()
        assert len(AssignmentSubmission) == 2
        assert [SOURCE_SYSTEM, "FirstLMS"
                ] == [x["sourcesystem"] for x in AssignmentSubmission]
        assert ["B123456", "F234567"] == [
            x["sourcesystemidentifier"] for x in AssignmentSubmission
        ]
        assert [None, None] == [x["deletedat"] for x in AssignmentSubmission]
コード例 #15
0
    def it_should_soft_delete_the_record(
            test_mssql_db: Tuple[MssqlLmsOperations, Connection]):
        adapter, connection = test_mssql_db

        # arrange - note csv file has only B123456
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)
        insert_section(connection, "S098765", SOURCE_SYSTEM, 1)
        insert_assignment(connection, "B098765", SOURCE_SYSTEM, 1, 1)

        insert_record(connection, "B123456", SOURCE_SYSTEM, 1, 1)
        insert_record(connection, "B234567", SOURCE_SYSTEM, 1, 1)

        # act
        run_loader(main_arguments(adapter, CSV_PATH))

        # assert - B234567 has been soft deleted
        AssignmentSubmission = connection.execute(
            "SELECT SourceSystemIdentifier from lms.AssignmentSubmission WHERE DeletedAt IS NOT NULL"
        ).fetchall()
        assert len(AssignmentSubmission) == 1
        assert AssignmentSubmission[0]["SourceSystemIdentifier"] == "B234567"
コード例 #16
0
    def it_should_soft_delete_the_record(
        test_pgsql_db: Tuple[SqlLmsOperations, Connection,
                             ConnectionSettings]):
        adapter, connection, settings = test_pgsql_db

        # arrange - note csv file has only B123456
        insert_user(connection, "U123456", SOURCE_SYSTEM, 1)
        insert_section(connection, "B098765", SOURCE_SYSTEM, 1)
        insert_user_section_association(connection, "UB123456", SOURCE_SYSTEM,
                                        1, 1, 1)

        insert_record(connection, "B123456", SOURCE_SYSTEM, 1, 1, 1)
        insert_record(connection, "B234567", SOURCE_SYSTEM, 1, 1, 1)

        # act
        run_loader(main_arguments(adapter, CSV_PATH, settings))

        # assert - B234567 has been soft deleted
        LMSUserAttendanceEvent = connection.execute(
            "select sourcesystemidentifier from lms.lmsuserattendanceevent where deletedat is not null"
        ).fetchall()
        assert len(LMSUserAttendanceEvent) == 1
        assert LMSUserAttendanceEvent[0]["sourcesystemidentifier"] == "B234567"
コード例 #17
0
ファイル: __main__.py プロジェクト: stephenfuqua/Ed-Fi-X-Fizz
def _run_loader(arguments: MainArguments) -> None:
    run_loader(arguments)
コード例 #18
0
        def fixture(
                mocker
        ) -> Tuple[Dict[str, MagicMock], Dict[str, pd.DataFrame]]:
            # Arrange
            args_mock = MagicMock(spec=MainArguments)
            args_mock.engine = DbEngine.MSSQL

            db_engine_mock = MagicMock()
            args_mock.get_adapter.return_value = db_engine_mock

            args_mock.csv_path = "/some/path"

            db_adapter_mock = Mock()
            db_adapter_mock.get_processed_files = Mock(
                return_value=set(["FullPathOne"]))
            db_adapter_mock.engine = DbEngine.MSSQL

            args_mock.get_db_operations_adapter.return_value = db_adapter_mock

            migrator_mock = MagicMock(spec=migrator.migrate)
            migrator_mock.engine = DbEngine.MSSQL
            mocker.patch("edfi_lms_ds_loader.migrator.migrate", migrator_mock)

            fake_df_users = pd.DataFrame({"generic_df": [1, 2, 3]})
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_users_file",
                return_value=fake_df_users,
            )

            fake_df_sections = pd.DataFrame([{"SourceSystemIdentifier": "a"}])
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_sections_file",
                return_value=fake_df_sections,
            )

            mocker.patch(
                "edfi_lms_ds_loader.loader_facade._get_sections_df",
                return_value=fake_df_sections,
            )

            fake_df_assignments = pd.DataFrame([
                {
                    "LMSSectionSourceSystemIdentifier": "a"
                },
                {
                    "SourceSystemIdentifier": "b"
                },
            ])
            mocker.patch(
                "edfi_lms_ds_loader.loader_facade._get_assignments_df",
                return_value=fake_df_assignments,
            )

            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_assignments_file",
                return_value=fake_df_assignments,
            )

            fake_df_section_associations = pd.DataFrame([{
                "associations": "b"
            }])
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_section_associations_file",
                return_value=fake_df_section_associations,
            )

            fake_df_section_activities = pd.DataFrame([{"activities": "b"}])
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_section_activities_file",
                return_value=fake_df_section_activities,
            )

            fake_df_system_activities = pd.DataFrame([{"activities": "b"}])
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_system_activities_file",
                return_value=fake_df_system_activities,
            )

            fake_df_assignment_submissions = pd.DataFrame([{
                "submissions": "b"
            }])
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_submissions_file",
                return_value=fake_df_assignment_submissions,
            )

            fake_df_attendance_events = pd.DataFrame([{"date": "c"}])
            mocker.patch(
                "edfi_lms_file_utils.file_reader.read_attendance_events_file",
                return_value=fake_df_attendance_events,
            )

            mock_upload_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_file")
            mock_upload_assignments_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_assignments")
            mock_upload_section_associations_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_section_associations")
            mock_upload_section_activities_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_section_activities")
            mock_upload_system_activities_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_system_activities")
            mock_upload_assignment_submissions_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_assignment_submissions")

            mock_upload_attendance_events_file = mocker.patch(
                "edfi_lms_ds_loader.df_to_db.upload_attendance_events")

            mocks = {
                "migrate":
                migrator_mock,
                "get_db_operations_adapter":
                db_adapter_mock,
                "get_adapter":
                db_engine_mock,
                "upload_file":
                mock_upload_file,
                "upload_assignments_file":
                mock_upload_assignments_file,
                "upload_section_associations_file":
                mock_upload_section_associations_file,
                "upload_section_activities_file":
                mock_upload_section_activities_file,
                "upload_system_activities_file":
                mock_upload_system_activities_file,
                "upload_assignment_submissions_file":
                mock_upload_assignment_submissions_file,
                "upload_attendance_events_file":
                mock_upload_attendance_events_file,
            }

            dfs = {
                "users": fake_df_users,
                "sections": fake_df_sections,
                "assignments": fake_df_assignments,
                "section_associations": fake_df_section_associations,
                "section_activities": fake_df_section_activities,
                "system_activities": fake_df_system_activities,
                "assignment_submissions": fake_df_assignment_submissions,
                "attendance_events": fake_df_attendance_events,
            }

            file_repository_users_mock = Mock(
                return_value=["fileOne", "fileTwo"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_sections_file_paths",
                file_repository_users_mock,
            )

            file_repository_users_mock = Mock(
                return_value=["fileFour", "fileSix"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_users_file_paths",
                file_repository_users_mock,
            )

            file_repository_assignments_mock = Mock(
                return_value=["fileSeven", "fileEighth"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_assignments_file_paths",
                file_repository_assignments_mock,
            )

            file_repository_section_associations_mock = Mock(
                return_value=["fileNine", "fileTen"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_section_associations_file_paths",
                file_repository_section_associations_mock,
            )

            file_repository_assignment_submissions_mock = Mock(
                return_value=["fileEleven", "fileTwelve"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_submissions_file_paths",
                file_repository_assignment_submissions_mock,
            )

            file_repository_section_activities_mock = Mock(
                return_value=["file13", "file14"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_section_activities_file_paths",
                file_repository_section_activities_mock,
            )
            file_repository_system_activities_mock = Mock(
                return_value=["file15", "file16"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_system_activities_file_paths",
                file_repository_system_activities_mock,
            )

            file_repository_attendance_mock = Mock(
                return_value=["file17", "file18"])
            mocker.patch(
                "edfi_lms_file_utils.file_repository.get_attendance_events_paths",
                file_repository_attendance_mock,
            )

            # Act
            run_loader(args_mock)

            # Return the mock objects for examination
            return (mocks, dfs)