def test_process_fieldserver_file_mass_records(self, uow):
        fieldserver_file_handler = FieldserverFileHandler()
        fieldserver_file_handler.logger = Mock()
        fieldserver_file_handler.date_str = Mock()

        good_record_file_mock = MagicMock()
        bad_record_file_mock = MagicMock()

        summary = ProcessFieldserverRecordsSummaryReturn()
        summary.num_good_records = 200
        summary.num_bad_records = 0
        summary.num_global_vendor_point_records = 0
        fieldserver_file_handler.process_fieldserver_records = Mock(return_value=summary)

        read_file_stream = io.StringIO()
        for i in range(400):
            read_file_stream.write(u'{"name": "record"}\n')

        # add extra record to force small batch for good measure
        read_file_stream.write(u'{"name": "record"}')
        read_file_stream.seek(0)

        parsed_list = [{"name": "record"} for i in range(200)]

        rv = fieldserver_file_handler.process_fieldserver_file(read_file_stream, good_record_file_mock, bad_record_file_mock)

        fieldserver_file_handler.process_fieldserver_records.assert_has_calls([call(parsed_list, good_record_file_mock,
                                                                            bad_record_file_mock),
                                                                       call(parsed_list, good_record_file_mock,
                                                                            bad_record_file_mock),
                                                                       call([{"name": "record"}], good_record_file_mock,
                                                                            bad_record_file_mock)])
        assert rv.num_good_records == 600
    def test_process_fieldserver_file(self, uow):
        fieldserver_file_handler = FieldserverFileHandler()
        fieldserver_file_handler.logger = Mock()
        fieldserver_file_handler.date_str = Mock()

        good_record_file_mock = MagicMock()
        bad_record_file_mock = MagicMock()

        summary = ProcessFieldserverRecordsSummaryReturn()
        summary.num_good_records = 3
        summary.num_bad_records = 1
        summary.num_global_vendor_point_records = 2
        fieldserver_file_handler.process_fieldserver_records = Mock(return_value=summary)

        read_file_stream = io.StringIO()
        read_file_stream.write(u'{"name": "record0"}\n'
                               u'{"name": "record1"}\n'
                               u'{"name": "record2"}\n'
                               u'{"name": "record3"}\n'
                               u'{"name": "record4"}\n'
                               u'{"name": "record5"}')
        read_file_stream.seek(0)

        parsed_list = [{"name": "record0"}, {"name": "record1"}, {"name": "record2"}, {"name": "record3"},
                       {"name": "record4"}, {"name": "record5"}]

        rv = fieldserver_file_handler.process_fieldserver_file(read_file_stream, good_record_file_mock, bad_record_file_mock)

        fieldserver_file_handler.process_fieldserver_records.assert_called_with(parsed_list, good_record_file_mock,
                                                                        bad_record_file_mock)
        assert rv.num_good_records == 3 and rv.num_bad_records == 1 and rv.num_global_vendor_point_records == 2
    def test_process_fieldserver_records(self, uow):
        fieldserver_file_handler = FieldserverFileHandler()
        fieldserver_file_handler.logger = Mock()
        fieldserver_file_handler.date_str = Mock()
        fieldserver_file_handler.handle_unmapped_vendor_points = Mock()

        processed_records = ProcessFieldserverRecordsReturn()
        processed_records.good_records = [
            {"name": "record0"},
            {"name": "record1"}
        ]
        processed_records.bad_records = [
            {"name": "record2"},
            {"name": "record3"}
        ]
        processed_records.unmapped_vendor_points = [
            {"name": "record4"},
            {"name": "record5"}
        ]
        processed_records.global_vendor_point_records = [
            {'name': 'record6'},
            {'name': 'record7'}
        ]
        fieldserver_file_handler.get_processed_fieldserver_records = Mock(return_value=processed_records)
        fieldserver_file_handler.make_global_vendor_point_records_unique = Mock(return_value=processed_records.global_vendor_point_records)

        records = [MagicMock(), MagicMock()]
        good_record_file = io.StringIO()
        bad_record_file = io.StringIO()

        rv = fieldserver_file_handler.process_fieldserver_records(records, good_record_file, bad_record_file)

        fieldserver_file_handler.get_processed_fieldserver_records.assert_called_with(records)

        good_record_file.seek(0)
        good_records_written_data = good_record_file.read().strip().split("\n")
        assert len(good_records_written_data) == 2
        assert json.loads(good_records_written_data[0]) == {"name": "record0"}
        assert json.loads(good_records_written_data[1]) == {"name": "record1"}

        bad_record_file.seek(0)
        bad_records_written_data = bad_record_file.read().strip().split("\n")
        assert len(bad_records_written_data) == 2
        assert json.loads(bad_records_written_data[0]) == {"name": "record2"}
        assert json.loads(bad_records_written_data[1]) == {"name": "record3"}

        fieldserver_file_handler.handle_unmapped_vendor_points.assert_called_with(processed_records.unmapped_vendor_points)

        assert rv.num_good_records == 2
        assert rv.num_bad_records == 2
        assert rv.num_unmapped_vendor_points == 2