def company_with_contacts_factory(): """Factory for a company with contacts.""" company = CompanyFactory() ContactFactory.create_batch(3, company=company) return company
def test_merge_interactions_contacts_succeeds( self, source_num_interactions, source_num_contacts, source_num_orders, ): """ Tests that perform_merge() moves contacts and interactions to the target company, and marks the source company as archived and transferred. """ creation_time = datetime(2010, 12, 1, 15, 0, 10, tzinfo=utc) with freeze_time(creation_time): source_company = _company_factory( source_num_interactions, source_num_contacts, source_num_orders, ) target_company = CompanyFactory() user = AdviserFactory() source_interactions = list(source_company.interactions.all()) source_contacts = list(source_company.contacts.all()) source_orders = list(source_company.orders.all()) # Each interaction and order has a contact, so actual number of contacts is # source_num_interactions + source_num_contacts + source_num_orders assert len(source_contacts) == (source_num_interactions + source_num_contacts + source_num_orders) merge_time = datetime(2011, 2, 1, 14, 0, 10, tzinfo=utc) with freeze_time(merge_time): result = merge_companies(source_company, target_company, user) assert result == { Contact: { 'company': len(source_contacts) }, Interaction: { 'company': len(source_interactions) }, InvestmentProject: {field: 0 for field in INVESTMENT_PROJECT_COMPANY_FIELDS}, Order: { 'company': len(source_orders) }, } for obj in chain(source_interactions, source_contacts, source_orders): obj.refresh_from_db() assert all(obj.company == target_company for obj in source_interactions) assert all(obj.modified_on == creation_time for obj in source_interactions) assert all(obj.company == target_company for obj in source_contacts) assert all(obj.modified_on == creation_time for obj in source_contacts) assert all(obj.company == target_company for obj in source_orders) assert all(obj.modified_on == creation_time for obj in source_orders) source_company.refresh_from_db() assert source_company.archived assert source_company.archived_by == user assert source_company.archived_on == merge_time assert source_company.archived_reason == ( f'This record is no longer in use and its data has been transferred ' f'to {target_company} for the following reason: Duplicate record.') assert source_company.modified_by == user assert source_company.modified_on == merge_time assert source_company.transfer_reason == Company.TRANSFER_REASONS.duplicate assert source_company.transferred_by == user assert source_company.transferred_on == merge_time assert source_company.transferred_to == target_company
def test_merge_investment_projects_succeeds(self, fields): """ Tests that perform_merge() moves investment projects to the target company and marks the source company as archived and transferred. """ creation_time = datetime(2010, 12, 1, 15, 0, 10, tzinfo=utc) with freeze_time(creation_time): source_company = CompanyFactory() investment_project = InvestmentProjectFactory( **{field: source_company for field in fields}, ) target_company = CompanyFactory() user = AdviserFactory() merge_time = datetime(2011, 2, 1, 14, 0, 10, tzinfo=utc) with freeze_time(merge_time): result = merge_companies(source_company, target_company, user) other_fields = set(INVESTMENT_PROJECT_COMPANY_FIELDS) - set(fields) assert result == { # each interaction has a contact, that's why 4 contacts should be moved Contact: { 'company': 0 }, Interaction: { 'company': 0 }, InvestmentProject: { **{field: 1 for field in fields}, **{field: 0 for field in other_fields}, }, Order: { 'company': 0 }, } investment_project.refresh_from_db() assert all( getattr(investment_project, field) == target_company for field in fields) assert all( getattr(investment_project, field) != target_company for field in other_fields) assert all( getattr(investment_project, field) != source_company for field in other_fields) assert investment_project.modified_on == creation_time source_company.refresh_from_db() assert source_company.archived assert source_company.archived_by == user assert source_company.archived_on == merge_time assert source_company.archived_reason == ( f'This record is no longer in use and its data has been transferred ' f'to {target_company} for the following reason: Duplicate record.') assert source_company.modified_by == user assert source_company.modified_on == merge_time assert source_company.transfer_reason == Company.TRANSFER_REASONS.duplicate assert source_company.transferred_by == user assert source_company.transferred_on == merge_time assert source_company.transferred_to == target_company
def company_with_investment_projects_factory(): """Factory for a company with investment projects.""" company = CompanyFactory() for field in INVESTMENT_PROJECT_COMPANY_FIELDS: InvestmentProjectFactory(**{field: company}) return company
def test_successfully_copies_from_company_model_when_duplicates_involved( self, monkeypatch, num_objects, batch_size, expected_batches, ): """Test that the task copies data for various batch sizes.""" new_countries = list(Country.objects.order_by('?')[:7]) new_export_to_countries = new_countries[:3] new_future_interest_countries = new_countries[:5] task_mock = Mock( wraps=copy_export_countries_to_company_export_country_model, ) monkeypatch.setattr( 'datahub.dbmaintenance.tasks.copy_export_countries_to_company_export_country_model', task_mock, ) companies_to_update = CompanyFactory.create_batch( num_objects, export_to_countries=new_export_to_countries, future_interest_countries=new_future_interest_countries, ) # populate destination table with countries that overlap # with the countries we're adding through the task for company in companies_to_update: for country in new_future_interest_countries: CompanyExportCountryFactory( company=company, country=country, status='future_interest', ) result_currently_exporting = task_mock.apply_async( kwargs={ 'batch_size': batch_size, 'status': 'currently_exporting', }, ) assert result_currently_exporting.successful() assert task_mock.apply_async.call_count == expected_batches updated_countries = CompanyExportCountry.objects.filter(company__in=companies_to_update) assert set([ export_country.company for export_country in updated_countries ]) == set(companies_to_update) assert all([ [ item.country in set(new_future_interest_countries) - set(new_export_to_countries) and item.country not in new_export_to_countries for item in CompanyExportCountry.objects.filter( country_id=export_country.country.pk, ) ] for export_country in updated_countries.filter(status='future_interest') ]) assert all([ [ item.country in new_export_to_countries for item in CompanyExportCountry.objects.filter( country_id=export_country.country.pk, status='currently_exporting', ) ] for export_country in updated_countries.filter(status='currently_exporting') ])
def test_add(self, extra_data): """Test add a new service delivery.""" adviser = AdviserFactory() company = CompanyFactory() contact = ContactFactory() url = reverse('api-v3:interaction:collection') request_data = { 'kind': Interaction.KINDS.service_delivery, 'subject': 'whatever', 'date': date.today().isoformat(), 'dit_adviser': adviser.pk, 'company': company.pk, 'contact': contact.pk, 'service': Service.trade_enquiry.value.id, 'dit_team': Team.healthcare_uk.value.id, **resolve_data(extra_data), } response = self.api_client.post(url, request_data) assert response.status_code == status.HTTP_201_CREATED response_data = response.json() assert response_data == { 'id': response_data['id'], 'kind': Interaction.KINDS.service_delivery, 'is_event': request_data['is_event'], 'service_delivery_status': request_data.get('service_delivery_status'), 'grant_amount_offered': request_data.get('grant_amount_offered'), 'net_company_receipt': request_data.get('net_company_receipt'), 'communication_channel': None, 'policy_areas': [], 'policy_issue_type': None, 'subject': 'whatever', 'date': '2017-04-18', 'dit_adviser': { 'id': str(adviser.pk), 'first_name': adviser.first_name, 'last_name': adviser.last_name, 'name': adviser.name, }, 'notes': request_data.get('notes', ''), 'company': { 'id': str(company.pk), 'name': company.name, }, 'contact': { 'id': str(contact.pk), 'name': contact.name, 'first_name': contact.first_name, 'last_name': contact.last_name, 'job_title': contact.job_title, }, 'event': request_data.get('event'), 'service': { 'id': str(Service.trade_enquiry.value.id), 'name': Service.trade_enquiry.value.name, }, 'dit_team': { 'id': str(Team.healthcare_uk.value.id), 'name': Team.healthcare_uk.value.name, }, 'investment_project': None, 'archived_documents_url_path': '', 'created_by': { 'id': str(self.user.pk), 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'name': self.user.name, }, 'modified_by': { 'id': str(self.user.pk), 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'name': self.user.name, }, 'created_on': '2017-04-18T13:25:30.986208Z', 'modified_on': '2017-04-18T13:25:30.986208Z', }
def test_merge_succeeds( self, source_num_interactions, source_num_contacts, source_num_investment_projects, source_num_orders, ): """ Test that the merge succeeds and the source company is marked as a duplicate when the source company has various amounts of contacts, interactions, investment projects and orders. """ creation_time = datetime(2010, 12, 1, 15, 0, 10, tzinfo=utc) with freeze_time(creation_time): source_company = _company_factory( source_num_interactions, source_num_contacts, source_num_investment_projects, source_num_orders, ) target_company = CompanyFactory() source_interactions = list(source_company.interactions.all()) source_contacts = list(source_company.contacts.all()) source_orders = list(source_company.orders.all()) source_investment_projects_by_field = { investment_project_field: list( InvestmentProject.objects.filter(**{ investment_project_field: source_company, }), ) for investment_project_field in INVESTMENT_PROJECT_COMPANY_FIELDS } # Each interaction and order has a contact, so actual number of contacts is # source_num_interactions + source_num_contacts + source_num_orders assert len(source_contacts) == ( source_num_interactions + source_num_contacts + source_num_orders ) confirm_merge_url = _make_confirm_merge_url(source_company, target_company) merge_time = datetime(2011, 2, 1, 14, 0, 10, tzinfo=utc) with freeze_time(merge_time): response = self.client.post(confirm_merge_url, follow=True) assert response.status_code == status.HTTP_200_OK assert len(response.redirect_chain) == 1 assert response.redirect_chain[0][0] == _get_changelist_url() messages = list(response.context['messages']) assert len(messages) == 1 assert messages[0].level == django_messages.SUCCESS merge_entries = [] if len(source_interactions) > 0: interaction_noun = 'interaction' if len(source_interactions) == 1 else 'interactions' merge_entries.append( f'{len(source_interactions)} {interaction_noun}', ) if len(source_contacts) > 0: interaction_noun = 'contact' if len(source_contacts) == 1 else 'contacts' merge_entries.append( f'{len(source_contacts)} {interaction_noun}', ) for field, investment_projects in source_investment_projects_by_field.items(): num_investment_projects = len(investment_projects) if num_investment_projects > 0: project_noun = 'project' if num_investment_projects == 1 else 'projects' description = FIELD_TO_DESCRIPTION_MAPPING.get(field) merge_entries.append( f'{num_investment_projects} investment {project_noun}{description}', ) if len(source_orders) > 0: order_noun = 'order' if len(source_contacts) == 1 else 'orders' merge_entries.append( f'{len(source_orders)} {order_noun}', ) merge_entries = ', '.join(merge_entries) match = re.match( r'^Merge complete – (?P<merge_entries>.*)' r' moved from' r' <a href="(?P<source_company_url>.*)" target="_blank">(?P<source_company>.*)</a>' r' to' r' <a href="(?P<target_company_url>.*)" target="_blank">(?P<target_company>.*)</a>' r'\.$', messages[0].message, ) assert match assert match.groupdict() == { 'merge_entries': merge_entries, 'source_company_url': escape(source_company.get_absolute_url()), 'source_company': escape(str(source_company)), 'target_company_url': escape(target_company.get_absolute_url()), 'target_company': escape(str(target_company)), } for obj in chain(source_interactions, source_contacts, source_orders, chain.from_iterable( investment_projects for investment_projects in source_investment_projects_by_field.values() )): obj.refresh_from_db() assert all(obj.company == target_company for obj in source_interactions) assert all(obj.modified_on == creation_time for obj in source_interactions) assert all(obj.company == target_company for obj in source_contacts) assert all(obj.modified_on == creation_time for obj in source_contacts) assert all(obj.company == target_company for obj in source_orders) assert all(obj.modified_on == creation_time for obj in source_orders) for field, investment_projects in source_investment_projects_by_field.items(): assert all(getattr(obj, field) == target_company for obj in investment_projects) assert all(obj.modified_on == creation_time for obj in investment_projects) source_company.refresh_from_db() assert source_company.archived assert source_company.archived_by == self.user assert source_company.archived_on == merge_time assert source_company.archived_reason == ( f'This record is no longer in use and its data has been transferred ' f'to {target_company} for the following reason: Duplicate record.' ) assert source_company.modified_by == self.user assert source_company.modified_on == merge_time assert source_company.transfer_reason == Company.TRANSFER_REASONS.duplicate assert source_company.transferred_by == self.user assert source_company.transferred_on == merge_time assert source_company.transferred_to == target_company
def test_sync_outdated_companies_with_dnb_partial_fields( requests_mock, dnb_response_uk, base_company_dict, existing_company_dnb_modified_on, caplog, ): """ Test the sync_outdated_companies_with_dnb task when only a subset of fields should be synced. """ caplog.set_level('INFO') if callable(existing_company_dnb_modified_on): existing_company_dnb_modified_on = existing_company_dnb_modified_on() requests_mock.post( DNB_SEARCH_URL, json=dnb_response_uk, ) company = CompanyFactory( duns_number='123456789', dnb_modified_on=existing_company_dnb_modified_on, ) original_company = Company.objects.get(id=company.id) task_result = sync_outdated_companies_with_dnb.apply_async( kwargs={ 'fields_to_update': ['global_ultimate_duns_number'], 'dnb_modified_on_before': now() + timedelta(days=1), 'simulate': False, }, ) assert task_result.successful() company.refresh_from_db() assert model_to_dict(company) == { **base_company_dict, 'address_1': original_company.address_1, 'address_2': original_company.address_2, 'address_country': original_company.address_country_id, 'address_county': original_company.address_county, 'address_postcode': original_company.address_postcode, 'address_town': original_company.address_town, 'archived_documents_url_path': original_company.archived_documents_url_path, 'business_type': original_company.business_type_id, 'company_number': original_company.company_number, 'created_by': original_company.created_by_id, 'duns_number': original_company.duns_number, 'employee_range': original_company.employee_range_id, 'export_experience_category': original_company.export_experience_category_id, 'global_ultimate_duns_number': '291332174', 'id': original_company.id, 'is_number_of_employees_estimated': original_company.is_number_of_employees_estimated, 'is_turnover_estimated': original_company.is_turnover_estimated, 'modified_by': original_company.modified_by_id, 'name': original_company.name, 'number_of_employees': original_company.number_of_employees, 'registered_address_1': original_company.registered_address_1, 'registered_address_2': original_company.registered_address_2, 'registered_address_country': original_company.registered_address_country_id, 'registered_address_county': original_company.registered_address_county, 'registered_address_postcode': original_company.registered_address_postcode, 'registered_address_town': original_company.registered_address_town, 'sector': original_company.sector_id, 'trading_names': original_company.trading_names, 'turnover': original_company.turnover, 'turnover_range': original_company.turnover_range_id, 'uk_region': original_company.uk_region_id, 'website': original_company.website, 'dnb_modified_on': now(), } expected_message = f'Syncing dnb-linked company "{company.id}" Succeeded' assert expected_message in caplog.text
def test_link_company_with_dnb_success( requests_mock, dnb_response_uk, ): """ Test the link_company_with_dnb utility. """ requests_mock.post( DNB_SEARCH_URL, json=dnb_response_uk, ) company = CompanyFactory() original_company = Company.objects.get(id=company.id) modifying_adviser = AdviserFactory() link_company_with_dnb(company.id, '123456789', modifying_adviser) company.refresh_from_db() uk_country = Country.objects.get(iso_alpha2_code='GB') assert model_to_dict_company(company) == { 'address_1': 'Unit 10, Ockham Drive', 'address_2': '', 'address_country': uk_country.id, 'address_county': '', 'address_postcode': 'UB6 0F2', 'address_town': 'GREENFORD', 'archived': False, 'archived_by': None, 'archived_documents_url_path': original_company.archived_documents_url_path, 'archived_on': None, 'archived_reason': None, 'business_type': original_company.business_type_id, 'company_number': '01261539', 'created_by': original_company.created_by_id, 'description': None, 'dnb_investigation_data': None, 'duns_number': '123456789', 'employee_range': original_company.employee_range_id, 'export_experience_category': original_company.export_experience_category_id, 'export_potential': None, 'export_to_countries': [], 'future_interest_countries': [], 'global_headquarters': None, 'global_ultimate_duns_number': '291332174', 'great_profile_status': None, 'headquarter_type': None, 'id': original_company.id, 'is_number_of_employees_estimated': True, 'is_turnover_estimated': None, 'modified_by': modifying_adviser.id, 'name': 'FOO BICYCLE LIMITED', 'number_of_employees': 260, 'one_list_account_owner': None, 'one_list_tier': None, 'pending_dnb_investigation': False, 'reference_code': '', 'sector': original_company.sector_id, 'trading_names': [], 'transfer_reason': '', 'transferred_by': None, 'transferred_on': None, 'transferred_to': None, 'turnover': 50651895, 'turnover_range': original_company.turnover_range_id, 'uk_region': original_company.uk_region_id, 'vat_number': '', 'dnb_modified_on': now(), }
def test_with_manual_address(self): """Test add with manual address.""" company = CompanyFactory() url = reverse('api-v3:contact:list') response = self.api_client.post( url, data={ 'title': { 'id': constants.Title.admiral_of_the_fleet.value.id, }, 'first_name': 'Oratio', 'last_name': 'Nelson', 'job_title': 'Head of Sales', 'company': { 'id': str(company.pk), }, 'email': '*****@*****.**', 'email_alternative': '*****@*****.**', 'primary': True, 'telephone_countrycode': '+44', 'telephone_number': '123456789', 'telephone_alternative': '987654321', 'address_same_as_company': False, 'address_1': 'Foo st.', 'address_2': 'adr 2', 'address_town': 'London', 'address_county': 'London', 'address_country': { 'id': constants.Country.united_kingdom.value.id, }, 'address_postcode': 'SW1A1AA', 'notes': 'lorem ipsum', 'accepts_dit_email_marketing': True, }, ) assert response.status_code == status.HTTP_201_CREATED assert response.json() == { 'id': response.json()['id'], 'title': { 'id': constants.Title.admiral_of_the_fleet.value.id, 'name': constants.Title.admiral_of_the_fleet.value.name, }, 'first_name': 'Oratio', 'last_name': 'Nelson', 'name': 'Oratio Nelson', 'job_title': 'Head of Sales', 'company': { 'id': str(company.pk), 'name': company.name, }, 'adviser': { 'id': str(self.user.pk), 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'name': self.user.name, }, 'email': '*****@*****.**', 'email_alternative': '*****@*****.**', 'primary': True, 'telephone_countrycode': '+44', 'telephone_number': '123456789', 'telephone_alternative': '987654321', 'address_same_as_company': False, 'address_1': 'Foo st.', 'address_2': 'adr 2', 'address_town': 'London', 'address_county': 'London', 'address_country': { 'id': constants.Country.united_kingdom.value.id, 'name': constants.Country.united_kingdom.value.name, }, 'address_postcode': 'SW1A1AA', 'notes': 'lorem ipsum', 'accepts_dit_email_marketing': True, 'archived': False, 'archived_by': None, 'archived_documents_url_path': '', 'archived_on': None, 'archived_reason': None, 'created_on': '2017-04-18T13:25:30.986208Z', 'modified_on': '2017-04-18T13:25:30.986208Z', }
def test_view(self): """Test view.""" company = CompanyFactory() contact = ContactFactory( title_id=constants.Title.admiral_of_the_fleet.value.id, first_name='Oratio', last_name='Nelson', job_title='Head of Sales', company=company, email='*****@*****.**', email_alternative='*****@*****.**', primary=True, adviser=self.user, telephone_countrycode='+44', telephone_number='123456789', telephone_alternative='987654321', address_same_as_company=False, address_1='Foo st.', address_2='adr 2', address_town='London', address_county='London', address_country_id=constants.Country.united_kingdom.value.id, address_postcode='SW1A1AA', notes='lorem ipsum', accepts_dit_email_marketing=False, ) url = reverse('api-v3:contact:detail', kwargs={'pk': contact.pk}) response = self.api_client.get(url) assert response.status_code == status.HTTP_200_OK assert response.json() == { 'id': response.json()['id'], 'title': { 'id': constants.Title.admiral_of_the_fleet.value.id, 'name': constants.Title.admiral_of_the_fleet.value.name, }, 'first_name': 'Oratio', 'last_name': 'Nelson', 'name': 'Oratio Nelson', 'job_title': 'Head of Sales', 'company': { 'id': str(company.pk), 'name': company.name, }, 'adviser': { 'id': str(self.user.pk), 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'name': self.user.name, }, 'email': '*****@*****.**', 'email_alternative': '*****@*****.**', 'primary': True, 'telephone_countrycode': '+44', 'telephone_number': '123456789', 'telephone_alternative': '987654321', 'address_same_as_company': False, 'address_1': 'Foo st.', 'address_2': 'adr 2', 'address_town': 'London', 'address_county': 'London', 'address_country': { 'id': constants.Country.united_kingdom.value.id, 'name': constants.Country.united_kingdom.value.name, }, 'address_postcode': 'SW1A1AA', 'notes': 'lorem ipsum', 'accepts_dit_email_marketing': False, 'archived': False, 'archived_by': None, 'archived_documents_url_path': contact.archived_documents_url_path, 'archived_on': None, 'archived_reason': None, 'created_on': '2017-04-18T13:25:30.986208Z', 'modified_on': '2017-04-18T13:25:30.986208Z', }
def company_with_pipeline_items_factory(): """Factory for a company that is on users' personal pipeline.""" company = CompanyFactory() PipelineItemFactory.create_batch(3, company=company) return company
def company_with_company_list_items_factory(): """Factory for a company that is on users' personal company lists.""" company = CompanyFactory() CompanyListItemFactory.create_batch(3, company=company) return company
def setup_data(setup_es): """Sets up data for the tests.""" investor_company = CompanyFactory(name='large abcdef') argentina_investor_company = CompanyFactory( name='argentina plc', address_country_id=CountryConstant.argentina.value.id, ) with freeze_time('2010-02-01'): frozen_created_on_profile = LargeInvestorProfileFactory( investor_company=CompanyFactory(name='Frozen limited', ), investor_description='frozen in 2010', construction_risks=[ ConstructionRiskConstant.greenfield.value.id, ], desired_deal_roles=[ DesiredDealRoleConstant.lead_manager.value.id, ], minimum_equity_percentage_id=EquityPercentageConstant.zero_percent. value.id, investable_capital=0, global_assets_under_management=10, uk_region_locations=[ UKRegionConstant.north_west.value.id, UKRegionConstant.east_of_england.value.id, ], ) with freeze_time('2018-01-01 10:00:00'): south_project = LargeInvestorProfileFactory( investor_company=CompanyFactory(name='South', ), investor_description='South Project', investment_types=[ InvestmentTypesConstant.direct_investment_in_project_equity. value.id, ], global_assets_under_management=60, ) with freeze_time('2018-01-01 11:00:00'): north_project = LargeInvestorProfileFactory( investable_capital=20, investor_company=CompanyFactory(name='North', ), investor_description='North Project', uk_region_locations=[ UKRegionConstant.north_west.value.id, UKRegionConstant.north_east.value.id, ], other_countries_being_considered=[ CountryConstant.ireland.value.id, CountryConstant.canada.value.id, ], global_assets_under_management=70, ) with freeze_time('2019-01-01'): investor_profiles = [ LargeInvestorProfileFactory( investor_description='Operational construction', investor_company=investor_company, investable_capital=950, construction_risks=[ ConstructionRiskConstant.operational.value.id, ], minimum_return_rate_id=ReturnRateConstant.up_to_five_percent. value.id, time_horizons=[ TimeHorizonConstant.up_to_five_years.value.id, TimeHorizonConstant.five_to_nine_years.value.id, ], global_assets_under_management=20, ), LargeInvestorProfileFactory( investor_description='Argentina project', investor_company=argentina_investor_company, investable_capital=1490, construction_risks=[ ConstructionRiskConstant.brownfield.value.id, ], time_horizons=[ TimeHorizonConstant.up_to_five_years.value.id, ], restrictions=[ RestrictionConstant.inflation_adjustment.value.id, ], global_assets_under_management=30, ), frozen_created_on_profile, LargeInvestorProfileFactory( investor_company=CompanyFactory( address_country_id=CountryConstant.argentina.value.id, name='2 constructions ltd', ), investor_description='2 construction risks', construction_risks=[ ConstructionRiskConstant.brownfield.value.id, ConstructionRiskConstant.greenfield.value.id, ], investable_capital=3000, asset_classes_of_interest=[ AssetClassInterestConstant.biomass.value.id, ], restrictions=[ RestrictionConstant.inflation_adjustment.value.id, ], global_assets_under_management=40, ), LargeInvestorProfileFactory( investor_company=CompanyFactory(name='Deal up ltd', ), investable_capital=10, investor_description='Deal up', deal_ticket_sizes=[ DealTicketSizeConstant.up_to_forty_nine_million.value.id, ], asset_classes_of_interest=[ AssetClassInterestConstant.biofuel.value.id, ], time_horizons=[ TimeHorizonConstant.five_to_nine_years.value.id, ], restrictions=[ RestrictionConstant.liquidity.value.id, ], minimum_equity_percentage_id=EquityPercentageConstant. zero_percent.value.id, global_assets_under_management=50, ), north_project, south_project, ] setup_es.indices.refresh() yield investor_profiles
def company_with_orders_factory(): """Factory for a company with orders.""" company = CompanyFactory() OrderFactory.create_batch(3, company=company) return company
def test_run(s3_stubber, caplog, simulate, overwrite): """ Test that the command: - updates records if simulate=False is passed - doesn't update records if simulate=True is passed - only overwrites non-None values if overwrite=True is passed - ignores rows with errors """ caplog.set_level('ERROR') original_datetime = datetime(2017, 1, 1, tzinfo=utc) uk_region_a, uk_region_b = UKRegion.objects.order_by('?')[:2] original_uk_region_ids = [ uk_region_a.pk, None, uk_region_a.pk, uk_region_a.pk, uk_region_a.pk, ] with freeze_time(original_datetime): companies = CompanyFactory.create_batch( len(original_uk_region_ids), uk_region_id=factory.Iterator(original_uk_region_ids), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,uk_region_id 00000000-0000-0000-0000-000000000000,ongoing {companies[0].pk},invalid {companies[1].pk},{uk_region_a.pk} {companies[2].pk},{uk_region_a.pk} {companies[3].pk},{uk_region_b.pk} {companies[4].pk}, """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) with freeze_time('2018-11-11 00:00:00'): call_command( 'update_company_uk_region', bucket, object_key, simulate=simulate, overwrite=overwrite, ) for company in companies: company.refresh_from_db() assert 'Company matching query does not exist' in caplog.text assert 'Must be a valid UUID.' in caplog.text assert len(caplog.records) == 2 if simulate: assert [company.uk_region_id for company in companies] == original_uk_region_ids else: expected_uk_region_ids = [ uk_region_a.pk, # no change as the new value wasn't valid uk_region_a.pk, uk_region_a.pk, # unchanged uk_region_b.pk if overwrite else uk_region_a.pk, None if overwrite else uk_region_a.pk, ] assert [company.uk_region_id for company in companies] == expected_uk_region_ids assert all(company.modified_on == original_datetime for company in companies)
def test_ensure_no_existing_data_fails_when_existing_data(): """Checks that an error is raised when data already exists in the database.""" CompanyFactory() fixture_path = PurePath(__file__).parent / 'loadinitialmetadata_test_data.yaml' with pytest.raises(ExistingDataFoundError): _ensure_no_existing_data([fixture_path])
def test_run(s3_stubber, caplog, simulate): """ Test that the command: - updates records only if simulate=False is passed - does not update records if simulate=True is passed - does not update created_on if it exists already - ignores rows with unmatched Company UUIDs """ caplog.set_level('WARNING') original_created_on = datetime(2017, 1, 1, tzinfo=utc) with freeze_time(original_created_on): companies = CompanyFactory.create_batch(2) company_with_created_on = companies[0] company_without_created_on = companies[1] company_without_created_on.created_on = None company_without_created_on.save() apr_13_2018_str = '13/04/2018' bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""UUID,Suggested Created Date 00000000-0000-0000-0000-000000000000,17/02/2018 {company_with_created_on.pk},19/02/2018 {company_without_created_on.pk},{apr_13_2018_str} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command( 'update_company_created_on', bucket, object_key, simulate=simulate, ) for company in companies: company.refresh_from_db() log_records = caplog.get_records(when='call') assert log_records[0].exc_info[0] == Company.DoesNotExist assert log_records[1].msg == ( f'Company {company_with_created_on.pk} already has a `created_on`; skipping' ) assert company_with_created_on.created_on == original_created_on if simulate: assert company_without_created_on.created_on is None else: new_created_on = company_without_created_on.created_on d, m, y = new_created_on.day, new_created_on.month, new_created_on.year assert (d, m, y) == tuple([int(s) for s in apr_13_2018_str.split('/')])
def test_response_body(self, es_with_collector, public_company_api_client): """Tests the response body of a search query.""" company = CompanyFactory( company_number='123', trading_names=['Xyz trading', 'Abc trading'], global_headquarters=None, one_list_tier=None, one_list_account_owner=None, ) es_with_collector.flush_and_refresh() url = reverse('api-v4:search:public-company') response = public_company_api_client.post(url, {}) assert response.status_code == status.HTTP_200_OK assert response.json() == { 'count': 1, 'results': [ { 'id': str(company.pk), 'created_on': company.created_on.isoformat(), 'modified_on': company.modified_on.isoformat(), 'name': company.name, 'reference_code': company.reference_code, 'company_number': company.company_number, 'vat_number': company.vat_number, 'duns_number': company.duns_number, 'trading_names': company.trading_names, 'address': { 'line_1': company.address_1, 'line_2': company.address_2 or '', 'town': company.address_town, 'county': company.address_county or '', 'postcode': company.address_postcode or '', 'country': { 'id': str(company.address_country.id), 'name': company.address_country.name, }, }, 'registered_address': { 'line_1': company.registered_address_1, 'line_2': company.registered_address_2 or '', 'town': company.registered_address_town, 'county': company.registered_address_county or '', 'postcode': company.registered_address_postcode or '', 'country': { 'id': str(company.registered_address_country.id), 'name': company.registered_address_country.name, }, }, 'uk_based': (company.address_country.id == uuid.UUID( constants.Country.united_kingdom.value.id, )), 'uk_region': { 'id': str(company.uk_region.id), 'name': company.uk_region.name, }, 'business_type': { 'id': str(company.business_type.id), 'name': company.business_type.name, }, 'description': company.description, 'employee_range': { 'id': str(company.employee_range.id), 'name': company.employee_range.name, }, 'export_experience_category': { 'id': str(company.export_experience_category.id), 'name': company.export_experience_category.name, }, 'export_to_countries': [], 'future_interest_countries': [], 'headquarter_type': company.headquarter_type, 'sector': { 'id': str(company.sector.id), 'name': company.sector.name, 'ancestors': [{ 'id': str(ancestor.id) } for ancestor in company.sector.get_ancestors()], }, 'turnover_range': { 'id': str(company.turnover_range.id), 'name': company.turnover_range.name, }, 'website': company.website, 'global_headquarters': None, 'archived': False, 'archived_on': None, 'archived_reason': None, }, ], }
class TestPublicCompanyViewSet: """Tests for the Hawk-authenticated public company view.""" def test_without_credentials(self, api_client): """Test that making a request without credentials returns an error.""" company = CompanyFactory() url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) response = api_client.get(url) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_without_scope(self, hawk_api_client): """Test that making a request without the correct Hawk scope returns an error.""" company = CompanyFactory() hawk_api_client.set_credentials( 'test-id-without-scope', 'test-key-without-scope', ) url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) response = hawk_api_client.get(url) assert response.status_code == status.HTTP_403_FORBIDDEN def test_without_whitelisted_ip(self, public_company_api_client): """Test that making a request without the whitelisted IP returns an error.""" company = CompanyFactory() url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) public_company_api_client.set_http_x_forwarded_for('1.1.1.1') response = public_company_api_client.get(url) assert response.status_code == status.HTTP_401_UNAUTHORIZED @pytest.mark.parametrize('method', ('delete', 'patch', 'post', 'put')) def test_other_methods_not_allowed(self, method, public_company_api_client): """Test that various HTTP methods are not allowed.""" company = CompanyFactory() url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) response = public_company_api_client.request(method, url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_response_is_signed(self, public_company_api_client): """Test that responses are signed.""" company = CompanyFactory() url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) response = public_company_api_client.get(url) assert response.status_code == status.HTTP_200_OK assert 'Server-Authorization' in response def test_get(self, public_company_api_client): """Test getting a single company.""" ghq = CompanyFactory( global_headquarters=None, one_list_tier=OneListTier.objects.first(), one_list_account_owner=AdviserFactory(), ) company = CompanyFactory( company_number='123', trading_names=['Xyz trading', 'Abc trading'], global_headquarters=ghq, one_list_tier=None, one_list_account_owner=None, ) url = reverse('api-v4:company:public-item', kwargs={'pk': company.id}) response = public_company_api_client.get(url) assert response.status_code == status.HTTP_200_OK assert response.json() == { 'address': { 'line_1': company.address_1, 'line_2': company.address_2 or '', 'town': company.address_town, 'county': company.address_county or '', 'postcode': company.address_postcode or '', 'country': { 'id': str(company.address_country.id), 'name': company.address_country.name, }, }, 'archived': False, 'archived_on': None, 'archived_reason': None, 'business_type': { 'id': str(company.business_type.id), 'name': company.business_type.name, }, 'company_number': company.company_number, 'created_on': format_date_or_datetime(company.created_on), 'description': company.description, 'duns_number': company.duns_number, 'employee_range': { 'id': str(company.employee_range.id), 'name': company.employee_range.name, }, 'export_experience_category': { 'id': str(company.export_experience_category.id), 'name': company.export_experience_category.name, }, 'export_to_countries': [], 'future_interest_countries': [], 'global_headquarters': { 'id': str(ghq.id), 'name': ghq.name, }, 'headquarter_type': company.headquarter_type, 'id': str(company.pk), 'is_number_of_employees_estimated': company.is_number_of_employees_estimated, 'is_turnover_estimated': company.is_turnover_estimated, 'modified_on': format_date_or_datetime(company.modified_on), 'name': company.name, 'number_of_employees': company.number_of_employees, 'one_list_group_tier': { 'id': str(ghq.one_list_tier.id), 'name': ghq.one_list_tier.name, }, 'reference_code': company.reference_code, 'registered_address': { 'line_1': company.registered_address_1, 'line_2': company.registered_address_2 or '', 'town': company.registered_address_town, 'county': company.registered_address_county or '', 'postcode': company.registered_address_postcode or '', 'country': { 'id': str(company.registered_address_country.id), 'name': company.registered_address_country.name, }, }, 'sector': { 'id': str(company.sector.id), 'name': company.sector.name, }, 'trading_names': company.trading_names, 'vat_number': company.vat_number, 'uk_based': (company.address_country.id == uuid.UUID( Country.united_kingdom.value.id)), 'uk_region': { 'id': str(company.uk_region.id), 'name': company.uk_region.name, }, 'transferred_on': None, 'transferred_to': None, 'transfer_reason': '', 'turnover_range': { 'id': str(company.turnover_range.id), 'name': company.turnover_range.name, }, 'turnover': company.turnover, 'website': company.website, } def test_get_company_without_country(self, public_company_api_client): """ Tests the company item view for a company without a country. Checks that the endpoint returns 200 and the uk_based attribute is set to None. """ company = CompanyFactory(address_country_id=None, ) url = reverse('api-v4:company:public-item', kwargs={'pk': company.id}) response = public_company_api_client.get(url) assert response.status_code == status.HTTP_200_OK assert response.json()['uk_based'] is None @pytest.mark.parametrize( 'input_website,expected_website', ( ('www.google.com', 'http://www.google.com'), ('http://www.google.com', 'http://www.google.com'), ('https://www.google.com', 'https://www.google.com'), ('', ''), (None, None), ), ) def test_get_company_with_website( self, input_website, expected_website, public_company_api_client, ): """ Test that if the website field on a company doesn't have any scheme specified, the endpoint adds it automatically. """ company = CompanyFactory(website=input_website, ) url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) response = public_company_api_client.get(url) assert response.status_code == status.HTTP_200_OK assert response.json()['website'] == expected_website @pytest.mark.parametrize( 'build_company', ( # subsidiary with Global Headquarters on the One List lambda one_list_tier: CompanyFactory( one_list_tier=None, global_headquarters=CompanyFactory(one_list_tier=one_list_tier ), ), # subsidiary with Global Headquarters not on the One List lambda one_list_tier: CompanyFactory( one_list_tier=None, global_headquarters=CompanyFactory(one_list_tier=None), ), # single company on the One List lambda one_list_tier: CompanyFactory( one_list_tier=one_list_tier, global_headquarters=None, ), # single company not on the One List lambda one_list_tier: CompanyFactory( one_list_tier=None, global_headquarters=None, ), ), ids=( 'as_subsidiary_of_one_list_company', 'as_subsidiary_of_non_one_list_company', 'as_one_list_company', 'as_non_one_list_company', ), ) def test_one_list_group_tier(self, build_company, public_company_api_client): """ Test that the endpoint includes the One List Tier of the Global Headquarters in the group. """ one_list_tier = OneListTier.objects.first() company = build_company(one_list_tier) url = reverse('api-v4:company:public-item', kwargs={'pk': company.pk}) response = public_company_api_client.get(url) assert response.status_code == status.HTTP_200_OK group_global_headquarters = company.global_headquarters or company actual_one_list_group_tier = response.json()['one_list_group_tier'] if not group_global_headquarters.one_list_tier: assert not actual_one_list_group_tier else: assert actual_one_list_group_tier == { 'id': str(one_list_tier.id), 'name': one_list_tier.name, }
class TestConfirmMergeViewGet(AdminTestMixin): """Tests GET requests for the 'Confirm merge' view.""" @pytest.mark.parametrize( 'data', ( {}, { 'source_company': '12345', 'target_company': '64567', }, { 'source_company': '', 'target_company': '', }, { 'source_company': '12345', }, { 'source_company': lambda: str(CompanyFactory().pk), 'target_company': '64567', }, { 'source_company': '13495', 'target_company': lambda: str(CompanyFactory().pk), }, ), ) def test_returns_400_if_invalid_companies_passed(self, data): """ Test that a 400 is returned when invalid values are passed in the query string. This could only happen if the query string was manipulated, or one of the referenced companies was deleted. """ for key, value in data.items(): if callable(value): data[key] = value() confirm_merge_route_name = admin_urlname(Company._meta, 'merge-confirm') confirm_merge_url = reverse(confirm_merge_route_name) response = self.client.get(confirm_merge_url, data=data) assert response.status_code == status.HTTP_400_BAD_REQUEST def test_returns_200_if_valid_companies_passed(self): """Tests that a 200 is returned if valid companies are passed in the query string.""" source_company = CompanyFactory() target_company = CompanyFactory() confirm_merge_route_name = admin_urlname(Company._meta, 'merge-confirm') confirm_merge_url = reverse(confirm_merge_route_name) response = self.client.get( confirm_merge_url, data={ 'source_company': str(source_company.pk), 'target_company': str(target_company.pk), }, ) assert response.status_code == status.HTTP_200_OK
def test_get(self, public_company_api_client): """Test getting a single company.""" ghq = CompanyFactory( global_headquarters=None, one_list_tier=OneListTier.objects.first(), one_list_account_owner=AdviserFactory(), ) company = CompanyFactory( company_number='123', trading_names=['Xyz trading', 'Abc trading'], global_headquarters=ghq, one_list_tier=None, one_list_account_owner=None, ) url = reverse('api-v4:company:public-item', kwargs={'pk': company.id}) response = public_company_api_client.get(url) assert response.status_code == status.HTTP_200_OK assert response.json() == { 'address': { 'line_1': company.address_1, 'line_2': company.address_2 or '', 'town': company.address_town, 'county': company.address_county or '', 'postcode': company.address_postcode or '', 'country': { 'id': str(company.address_country.id), 'name': company.address_country.name, }, }, 'archived': False, 'archived_on': None, 'archived_reason': None, 'business_type': { 'id': str(company.business_type.id), 'name': company.business_type.name, }, 'company_number': company.company_number, 'created_on': format_date_or_datetime(company.created_on), 'description': company.description, 'duns_number': company.duns_number, 'employee_range': { 'id': str(company.employee_range.id), 'name': company.employee_range.name, }, 'export_experience_category': { 'id': str(company.export_experience_category.id), 'name': company.export_experience_category.name, }, 'export_to_countries': [], 'future_interest_countries': [], 'global_headquarters': { 'id': str(ghq.id), 'name': ghq.name, }, 'headquarter_type': company.headquarter_type, 'id': str(company.pk), 'is_number_of_employees_estimated': company.is_number_of_employees_estimated, 'is_turnover_estimated': company.is_turnover_estimated, 'modified_on': format_date_or_datetime(company.modified_on), 'name': company.name, 'number_of_employees': company.number_of_employees, 'one_list_group_tier': { 'id': str(ghq.one_list_tier.id), 'name': ghq.one_list_tier.name, }, 'reference_code': company.reference_code, 'registered_address': { 'line_1': company.registered_address_1, 'line_2': company.registered_address_2 or '', 'town': company.registered_address_town, 'county': company.registered_address_county or '', 'postcode': company.registered_address_postcode or '', 'country': { 'id': str(company.registered_address_country.id), 'name': company.registered_address_country.name, }, }, 'sector': { 'id': str(company.sector.id), 'name': company.sector.name, }, 'trading_names': company.trading_names, 'vat_number': company.vat_number, 'uk_based': (company.address_country.id == uuid.UUID( Country.united_kingdom.value.id)), 'uk_region': { 'id': str(company.uk_region.id), 'name': company.uk_region.name, }, 'transferred_on': None, 'transferred_to': None, 'transfer_reason': '', 'turnover_range': { 'id': str(company.turnover_range.id), 'name': company.turnover_range.name, }, 'turnover': company.turnover, 'website': company.website, }
def test_merge_interactions_contacts_succeeds( self, factory_relation_kwarg, creates_contacts, num_related_objects, ): """ Tests that perform_merge() moves contacts and interactions to the target company, and marks the source company as archived and transferred. """ creation_time = datetime(2010, 12, 1, 15, 0, 10, tzinfo=utc) with freeze_time(creation_time): source_company = _company_factory( **{factory_relation_kwarg: num_related_objects}, ) target_company = CompanyFactory() user = AdviserFactory() source_interactions = list(source_company.interactions.all()) source_contacts = list(source_company.contacts.all()) source_orders = list(source_company.orders.all()) source_referrals = list(source_company.referrals.all()) source_company_list_items = list( source_company.company_list_items.all()) source_pipeline_list_items = list( source_company.pipeline_list_items.all()) # Each interaction and order has a contact, so actual number of contacts is # source_num_interactions + source_num_contacts + source_num_orders assert len(source_contacts) == (num_related_objects if creates_contacts else 0) merge_time = datetime(2011, 2, 1, 14, 0, 10, tzinfo=utc) with freeze_time(merge_time): result = merge_companies(source_company, target_company, user) assert result == { CompanyListItem: { 'company': len(source_company_list_items) }, CompanyReferral: { 'company': len(source_referrals) }, Contact: { 'company': len(source_contacts) }, Interaction: { 'company': len(source_interactions), 'companies': len(source_interactions), }, InvestmentProject: {field: 0 for field in INVESTMENT_PROJECT_COMPANY_FIELDS}, Order: { 'company': len(source_orders) }, PipelineItem: { 'company': len(source_pipeline_list_items) }, } source_related_objects = [ *source_company_list_items, *source_contacts, *source_interactions, *source_orders, *source_referrals, *source_pipeline_list_items, ] for obj in source_related_objects: obj.refresh_from_db() assert all(obj.company == target_company for obj in source_related_objects) assert all(obj.modified_on == merge_time for obj in source_related_objects) source_company.refresh_from_db() assert source_company.archived assert source_company.archived_by == user assert source_company.archived_on == merge_time assert source_company.archived_reason == ( f'This record is no longer in use and its data has been transferred ' f'to {target_company} for the following reason: Duplicate record.') assert source_company.modified_by == user assert source_company.modified_on == merge_time assert source_company.transfer_reason == Company.TransferReason.DUPLICATE assert source_company.transferred_by == user assert source_company.transferred_on == merge_time assert source_company.transferred_to == target_company
def test_successfully_copies_from_company_model_currently_exporting( self, monkeypatch, num_objects, batch_size, expected_batches, ): """Test that the task copies data for various batch sizes.""" task_mock = Mock( wraps=copy_export_countries_to_company_export_country_model, ) monkeypatch.setattr( 'datahub.dbmaintenance.tasks.copy_export_countries_to_company_export_country_model', task_mock, ) countries = list(Country.objects.order_by('?')[:12]) mock_export_to_countries = countries[:5] other_countries_list = countries[5:] companies_to_update = CompanyFactory.create_batch( num_objects, export_to_countries=mock_export_to_countries, ) current_countries_already_in_the_new_table = CompanyExportCountryFactory.create_batch( 5, company=factory.SubFactory(CompanyFactory), country=factory.Iterator(other_countries_list), status='currently_exporting', ) result_currently_exporting = task_mock.apply_async( kwargs={ 'batch_size': batch_size, 'status': 'currently_exporting', }, ) assert result_currently_exporting.successful() assert task_mock.apply_async.call_count == expected_batches updated_countries = CompanyExportCountry.objects.filter(company__in=companies_to_update) assert set([ export_country.company for export_country in updated_countries ]) == set(companies_to_update) assert set( item.country for item in set(updated_countries) ) == set(mock_export_to_countries) assert all( [ set(CompanyExportCountry.objects.filter( ~Q( country_id__in=[ export_country.country.pk for export_country in updated_countries ], status='currently_exporting', ), )) == set(current_countries_already_in_the_new_table), ], )