示例#1
0
def _build_bulk_refusal_file_with_gubbins(context, dodgy_ids=False):
    # Build a bulk refusal file with a row for each the stored case created event
    context.bulk_refusals_file = RESOURCE_FILE_PATH.joinpath(
        'bulk_processing_files', 'refusal_bulk_test.csv')
    context.bulk_refusals = {}

    for case_created in context.case_created_events:
        case_id = case_created['payload']['collectionCase']['id']

        if dodgy_ids:
            case_id = str(uuid.uuid4())

        context.bulk_refusals[case_id] = random.choice(
            ('HARD_REFUSAL', 'EXTRAORDINARY_REFUSAL'))
    test_helper.assertGreater(
        len(context.bulk_refusals), 0,
        'Must have at least one refusal for this test to be valid')

    with open(context.bulk_refusals_file, 'w') as bulk_refusal_file_write:
        writer = csv.DictWriter(bulk_refusal_file_write,
                                fieldnames=['case_id', 'refusal_type'])
        writer.writeheader()
        for case_id, refusal_type in context.bulk_refusals.items():
            writer.writerow({'case_id': case_id, 'refusal_type': refusal_type})

    # Upload the file to a real bucket if one is configured
    if Config.BULK_REFUSAL_BUCKET_NAME:
        clear_bucket(Config.BULK_REFUSAL_BUCKET_NAME)
        upload_file_to_bucket(
            context.bulk_refusals_file,
            f'refusals_acceptance_tests_{datetime.utcnow().strftime("%Y%m%d-%H%M%S")}.csv',
            Config.BULK_REFUSAL_BUCKET_NAME)
示例#2
0
def build_uninvalidated_address_bulk_file(context):
    # Build a bulk un-invalid address file with a row for each the stored case created event
    context.bulk_uninvalidated_addresses_file = RESOURCE_FILE_PATH.joinpath(
        'bulk_processing_files', 'uninvalidated_addresses_bulk_test.csv')
    context.bulk_uninvalidated_addresses = []
    for case_created in context.case_created_events:
        context.bulk_uninvalidated_addresses.append({
            'CASE_ID':
            case_created['payload']['collectionCase']['id'],
        })
    test_helper.assertGreater(
        len(context.bulk_uninvalidated_addresses), 0,
        'Must have at least one update for this test to be valid')
    with open(context.bulk_uninvalidated_addresses_file,
              'w') as bulk_invalidated_addresses_file_write:
        writer = csv.DictWriter(
            bulk_invalidated_addresses_file_write,
            fieldnames=list(context.bulk_uninvalidated_addresses[0].keys()))
        writer.writeheader()
        for row in context.bulk_uninvalidated_addresses:
            writer.writerow(row)

    # Upload the file to a real bucket if one is configured
    if Config.BULK_UNINVALIDATED_ADDRESS_BUCKET_NAME:
        clear_bucket(Config.BULK_UNINVALIDATED_ADDRESS_BUCKET_NAME)
        upload_file_to_bucket(
            context.bulk_uninvalidated_addresses_file,
            f'uninvalidated_addresses_acceptance_tests_'
            f'{datetime.utcnow().strftime("%Y%m%d-%H%M%S")}.csv',
            Config.BULK_UNINVALIDATED_ADDRESS_BUCKET_NAME)
示例#3
0
def build_invalid_address_file(context):
    # Build a bulk invalid address file with a row for each the stored case created event
    context.bulk_invalid_address_file = RESOURCE_FILE_PATH.joinpath(
        'bulk_processing_files', 'invalid_addresses_bulk_test.csv')
    context.bulk_invalid_addresses = {}
    for case_created in context.case_created_events:
        context.bulk_invalid_addresses[case_created['payload'][
            'collectionCase']['id']] = 'TEST_INVALID_REASON'
    test_helper.assertGreater(
        len(context.bulk_invalid_addresses), 0,
        'Must have at least one refusal for this test to be valid')
    with open(context.bulk_invalid_address_file,
              'w') as bulk_invalid_file_write:
        writer = csv.DictWriter(bulk_invalid_file_write,
                                fieldnames=['case_id', 'reason'])
        writer.writeheader()
        for case_id, reason in context.bulk_invalid_addresses.items():
            writer.writerow({'case_id': case_id, 'reason': reason})

    # Upload the file to a real bucket if one is configured
    if Config.BULK_INVALID_ADDRESS_BUCKET_NAME:
        clear_bucket(Config.BULK_INVALID_ADDRESS_BUCKET_NAME)
        upload_file_to_bucket(
            context.bulk_invalid_address_file,
            f'invalid_addresses_acceptance_tests_{datetime.utcnow().strftime("%Y%m%d-%H%M%S")}.csv',
            Config.BULK_INVALID_ADDRESS_BUCKET_NAME)
def check_for_bad_messages(context):
    time.sleep(30)
    response = requests.get(f'{Config.EXCEPTION_MANAGER_URL}/badmessages/summary')
    bad_messages = response.json()

    for bad_message in bad_messages:
        test_helper.assertGreater(bad_message['seenCount'], 1,
                                  msg=f'Seen count is not greater than 1, failed bad message summary: {bad_message}')
        test_helper.assertIn(bad_message['messageHash'], context.message_hashes,
                             msg=f'Unknown bad message hash, message summary: {bad_message}')
示例#5
0
def find_multiple_cases_by_uprn(context):
    response = requests.get(f'{Config.CASE_API_CASE_URL}uprn/10008677190')
    response.raise_for_status()
    response_data = json.loads(response.content)
    test_helper.assertGreater(len(response_data), 1,
                              'Multiple cases not found')

    # Check some of the fields aren't blank
    for case in response_data:
        test_helper.assertTrue(case['id'], 'caseId missing')
        test_helper.assertTrue(case['caseRef'], 'caseRef missing')
        test_helper.assertTrue(case['postcode'], 'postcode missing')
示例#6
0
def check_new_cases_are_emitted(context):
    test_helper.assertGreater(
        len(context.bulk_new_addresses), 0,
        'Must have at least one new address for this test to be valid')

    context.case_created_events = get_case_created_events(
        context, len(context.bulk_new_addresses))
    test_helper.assertEqual(
        len(context.case_created_events), len(context.bulk_new_addresses),
        'Number of created cases should match number supplied in the bulk file'
    )

    for address in context.bulk_new_addresses:
        test_helper.assertTrue(
            any([
                new_address_matches_case_created(address, case_created_event)
                for case_created_event in context.case_created_events
            ]), f'No case created event found for address: {address}')

    if Config.BULK_NEW_ADDRESS_BUCKET_NAME:
        clear_bucket(Config.BULK_NEW_ADDRESS_BUCKET_NAME)
示例#7
0
def build_address_updates_file(context):
    # Build a bulk invalid address file with a row for each the stored case created event
    context.bulk_address_updates_file = RESOURCE_FILE_PATH.joinpath(
        'bulk_processing_files', 'address_updates_bulk_test.csv')
    context.bulk_address_updates = []
    for case_created in context.case_created_events:
        context.bulk_address_updates.append({
            'CASE_ID':
            case_created['payload']['collectionCase']['id'],
            'UPRN':
            '123456789',
            'ESTAB_UPRN':
            '987654321',
            'ESTAB_TYPE':
            'ROYAL HOUSEHOLD',
            'ABP_CODE':
            '4321',
            'ORGANISATION_NAME':
            'foo_incorporated',
            'ADDRESS_LINE1':
            'foo flat1',
            'ADDRESS_LINE2':
            'foo some road',
            'ADDRESS_LINE3':
            'foo somewhere',
            'TOWN_NAME':
            'foo some town',
            'POSTCODE':
            'F00 BAR',
            'LATITUDE':
            '0.0',
            'LONGITUDE':
            '127.0',
            'OA':
            'foo_1',
            'LSOA':
            'foo_2',
            'MSOA':
            'foo_3',
            'LAD':
            'foo_4',
            'HTC_WILLINGNESS':
            '5',
            'HTC_DIGITAL':
            '3',
            'TREATMENT_CODE':
            'HH_LP1E',
            'FIELDCOORDINATOR_ID':
            'ABC123',
            'FIELDOFFICER_ID':
            'XYZ999',
            'CE_EXPECTED_CAPACITY':
            '10',
            'CE_SECURE':
            '1',
            'PRINT_BATCH':
            '99',
        })
    test_helper.assertGreater(
        len(context.bulk_address_updates), 0,
        'Must have at least one update for this test to be valid')
    with open(context.bulk_address_updates_file,
              'w') as bulk_updates_file_write:
        writer = csv.DictWriter(bulk_updates_file_write,
                                fieldnames=list(
                                    context.bulk_address_updates[0].keys()))
        writer.writeheader()
        for row in context.bulk_address_updates:
            writer.writerow(row)

    # Upload the file to a real bucket if one is configured
    if Config.BULK_ADDRESS_UPDATE_BUCKET_NAME:
        clear_bucket(Config.BULK_INVALID_ADDRESS_BUCKET_NAME)
        upload_file_to_bucket(
            context.bulk_address_updates_file,
            f'address_updates_acceptance_tests_{datetime.utcnow().strftime("%Y%m%d-%H%M%S")}.csv',
            Config.BULK_ADDRESS_UPDATE_BUCKET_NAME)