def test_process_fieldserver_file_path_no_bad_records(self, move, remove, open_mock, path_join, named_temporary_file, uow): fieldserver_file_handler = FieldserverFileHandler() fieldserver_file_handler.logger = Mock() fieldserver_file_handler.date_str = Mock() summary = ProcessFieldserverRecordsSummaryReturn() summary.num_bad_records = 0 fieldserver_file_handler.process_fieldserver_file = Mock(return_value=summary) file_path = MagicMock() good_record_file_path = path_join.return_value bad_record_file_mock = named_temporary_file.return_value good_record_file_mock = MagicMock() read_file_mock = MagicMock() def open_mock_side_effect(path, mode): if path == good_record_file_path and mode == "a": return good_record_file_mock elif path == file_path and mode == "r": return read_file_mock else: return None open_mock.side_effect = open_mock_side_effect fieldserver_file_handler.process_fieldserver_file_path(file_path) named_temporary_file.assert_called_with(delete=False) fieldserver_file_handler.process_fieldserver_file.assert_called_with(read_file_mock.__enter__.return_value, good_record_file_mock.__enter__.return_value, bad_record_file_mock) remove.assert_has_calls([call(file_path), call(bad_record_file_mock.name)]) bad_record_file_mock.close.assert_called_with()
def test_handle_unmapped_vendor_points(self, uow): fieldserver_file_handler = FieldserverFileHandler() fieldserver_file_handler.date_str = "date" unmapped_vendor_points = [{"source": "fieldserver", "fieldserver_site_id": "123456", "fieldserver_offset": 0}, {"source": "fieldserver", "fieldserver_site_id": "123456", "fieldserver_offset": 1}, {"source": "fieldserver", "fieldserver_site_id": "123456", "fieldserver_offset": 2}] detupled_keys = [["123456", 0], ["123456", 1], ["123456", 2]] db_query_call = uow.return_value.data_mapping.get_unknown_vendor_points_for_fieldserver_site_id_offset db_query_call.return_value = [{"source": "fieldserver", "fieldserver_site_id": "123456", "fieldserver_offset": 0, "date_added": "date"}, {"source": "fieldserver", "fieldserver_site_id": "123456", "fieldserver_offset": 1, "date_added": "date"}] db_insert_call = uow.return_value.data_mapping.insert_unknown_vendor_points fieldserver_file_handler.handle_unmapped_vendor_points(unmapped_vendor_points) db_query_call_args = db_query_call.call_args[0][0] assert sorted(db_query_call_args) == sorted(detupled_keys) filtered_unmapped_vendor_points = [{"source": "fieldserver", "fieldserver_site_id": "123456", "fieldserver_offset": 2, "date_added": "date"}] db_insert_call.assert_called_with(filtered_unmapped_vendor_points)
def test_process_fieldserver_file_mass_records(self, uow): fieldserver_file_handler = FieldserverFileHandler() fieldserver_file_handler.logger = Mock() fieldserver_file_handler.date_str = Mock() good_record_file_mock = MagicMock() bad_record_file_mock = MagicMock() summary = ProcessFieldserverRecordsSummaryReturn() summary.num_good_records = 200 summary.num_bad_records = 0 summary.num_global_vendor_point_records = 0 fieldserver_file_handler.process_fieldserver_records = Mock(return_value=summary) read_file_stream = io.StringIO() for i in range(400): read_file_stream.write(u'{"name": "record"}\n') # add extra record to force small batch for good measure read_file_stream.write(u'{"name": "record"}') read_file_stream.seek(0) parsed_list = [{"name": "record"} for i in range(200)] rv = fieldserver_file_handler.process_fieldserver_file(read_file_stream, good_record_file_mock, bad_record_file_mock) fieldserver_file_handler.process_fieldserver_records.assert_has_calls([call(parsed_list, good_record_file_mock, bad_record_file_mock), call(parsed_list, good_record_file_mock, bad_record_file_mock), call([{"name": "record"}], good_record_file_mock, bad_record_file_mock)]) assert rv.num_good_records == 600
def test_process_fieldserver_file(self, uow): fieldserver_file_handler = FieldserverFileHandler() fieldserver_file_handler.logger = Mock() fieldserver_file_handler.date_str = Mock() good_record_file_mock = MagicMock() bad_record_file_mock = MagicMock() summary = ProcessFieldserverRecordsSummaryReturn() summary.num_good_records = 3 summary.num_bad_records = 1 summary.num_global_vendor_point_records = 2 fieldserver_file_handler.process_fieldserver_records = Mock(return_value=summary) read_file_stream = io.StringIO() read_file_stream.write(u'{"name": "record0"}\n' u'{"name": "record1"}\n' u'{"name": "record2"}\n' u'{"name": "record3"}\n' u'{"name": "record4"}\n' u'{"name": "record5"}') read_file_stream.seek(0) parsed_list = [{"name": "record0"}, {"name": "record1"}, {"name": "record2"}, {"name": "record3"}, {"name": "record4"}, {"name": "record5"}] rv = fieldserver_file_handler.process_fieldserver_file(read_file_stream, good_record_file_mock, bad_record_file_mock) fieldserver_file_handler.process_fieldserver_records.assert_called_with(parsed_list, good_record_file_mock, bad_record_file_mock) assert rv.num_good_records == 3 and rv.num_bad_records == 1 and rv.num_global_vendor_point_records == 2
def test_process_fieldserver_filename_exception(self, path_join, uow): fieldserver_file_handler = FieldserverFileHandler() fieldserver_file_handler.logger = Mock() fieldserver_file_handler.date_str = Mock() fieldserver_file_handler.process_fieldserver_file_path = Mock(side_effect=Exception()) containing_directory = Mock() file_name = Mock() fieldserver_file_handler.process_fieldserver_filename(0, containing_directory, file_name, 1) path_join.assert_called_with(containing_directory, file_name) fieldserver_file_handler.process_fieldserver_file_path.assert_called_with(path_join.return_value) assert fieldserver_file_handler.logger.exception.call_count == 1
def test_process_fieldserver_records(self, uow): fieldserver_file_handler = FieldserverFileHandler() fieldserver_file_handler.logger = Mock() fieldserver_file_handler.date_str = Mock() fieldserver_file_handler.handle_unmapped_vendor_points = Mock() processed_records = ProcessFieldserverRecordsReturn() processed_records.good_records = [ {"name": "record0"}, {"name": "record1"} ] processed_records.bad_records = [ {"name": "record2"}, {"name": "record3"} ] processed_records.unmapped_vendor_points = [ {"name": "record4"}, {"name": "record5"} ] processed_records.global_vendor_point_records = [ {'name': 'record6'}, {'name': 'record7'} ] fieldserver_file_handler.get_processed_fieldserver_records = Mock(return_value=processed_records) fieldserver_file_handler.make_global_vendor_point_records_unique = Mock(return_value=processed_records.global_vendor_point_records) records = [MagicMock(), MagicMock()] good_record_file = io.StringIO() bad_record_file = io.StringIO() rv = fieldserver_file_handler.process_fieldserver_records(records, good_record_file, bad_record_file) fieldserver_file_handler.get_processed_fieldserver_records.assert_called_with(records) good_record_file.seek(0) good_records_written_data = good_record_file.read().strip().split("\n") assert len(good_records_written_data) == 2 assert json.loads(good_records_written_data[0]) == {"name": "record0"} assert json.loads(good_records_written_data[1]) == {"name": "record1"} bad_record_file.seek(0) bad_records_written_data = bad_record_file.read().strip().split("\n") assert len(bad_records_written_data) == 2 assert json.loads(bad_records_written_data[0]) == {"name": "record2"} assert json.loads(bad_records_written_data[1]) == {"name": "record3"} fieldserver_file_handler.handle_unmapped_vendor_points.assert_called_with(processed_records.unmapped_vendor_points) assert rv.num_good_records == 2 assert rv.num_bad_records == 2 assert rv.num_unmapped_vendor_points == 2