def _process_row(self, row, simulate=False, overwrite=False, **options): """Process one single row.""" company = Company.objects.get(pk=parse_uuid(row['id'])) if self._should_update(company, overwrite=overwrite): global_hq_id = parse_uuid(row['global_hq_id']) global_hq = { 'id': global_hq_id, } if global_hq_id is not None else None data = { 'global_headquarters': global_hq, } serializer = CompanySerializer( instance=company, data=data, partial=True, ) serializer.is_valid(raise_exception=True) if simulate: return with reversion.create_revision(): serializer.validated_data['modified_by'] = None serializer.save() reversion.set_comment('Global HQ data correction.')
def _process_row(self, row, simulate=False, **options): """Process a single row.""" pk = parse_uuid(row['id']) pipeline_item = PipelineItem.objects.get(pk=pk) old_sector_id = parse_uuid(row['old_sector_id']) new_sector_id = parse_uuid(row['new_sector_id']) if any( [ pipeline_item.sector.pk != old_sector_id, pipeline_item.sector.pk == new_sector_id, ], ): logger.warning( f'Not updating PipelineItem {pipeline_item} as its sector has not changed', ) return pipeline_item.sector = Sector.objects.get(pk=new_sector_id) if simulate: return with reversion.create_revision(): pipeline_item.save(update_fields=('sector',)) reversion.set_comment('PipelineItem sector correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) sector = Sector.objects.get(pk=pk) old_parent_pk = parse_uuid(row['old_parent_id']) new_parent_pk = parse_uuid(row['new_parent_id']) if sector.parent and any([ sector.parent.pk != old_parent_pk, sector.parent.pk == new_parent_pk ], ): logger.warning( f'Not updating sector {sector} as its parent has not changed') return if new_parent_pk: new_parent = Sector.objects.get(pk=new_parent_pk) else: new_parent = None if simulate: return with reversion.create_revision(): sector.move_to(new_parent) reversion.set_comment('Sector parent correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" company = Company.objects.get(pk=parse_uuid(row['id'])) # remove company.pk from the list of companies to reset self.companies_to_reset.pop(company.pk, None) classification_id = parse_uuid(row['classification_id']) one_list_account_owner_id = parse_uuid(row['one_list_account_owner_id']) if self._should_update(company, classification_id, one_list_account_owner_id): self._update_company(company, classification_id, one_list_account_owner_id, simulate)
def _process_row(self, row, simulate=False, ignore_old_regions=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) investment_project = InvestmentProject.objects.get(pk=pk) if investment_project.archived_on: action_required = row['Action Required'] if action_required in self.STATUS_MAP: investment_project.status = self.STATUS_MAP[action_required] else: logger.warning( (f'Not updating project {pk} as its desired status ' f'could not be derived from [{action_required}].')) return else: logger.warning( f'Not updating project {pk} as it is already unarchived.') return if simulate: return with reversion.create_revision(): # unarchive performs save investment_project.unarchive() reversion.set_comment( 'Investment Project was unarchived and has changed its status.' )
def _process_row(self, row, simulate=False, **options): """ Process one single row. """ pk = parse_uuid(row['datahub_company_id']) company = Company.objects.get(pk=pk) has_profile = parse_bool(row['has_find_a_supplier_profile']) is_published = parse_bool(row['is_published_find_a_supplier']) profile_status = None if has_profile and is_published: profile_status = Company.GreatProfileStatus.PUBLISHED elif has_profile: profile_status = Company.GreatProfileStatus.UNPUBLISHED if company.great_profile_status == profile_status: return company.great_profile_status = profile_status if simulate: return with reversion.create_revision(): company.save(update_fields=('great_profile_status', )) reversion.set_comment('GREAT profile status updated.')
def _process_row(self, row, simulate=False, ignore_old_regions=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) investment_project = InvestmentProject.objects.get(pk=pk) allow_blank_possible_uk_regions = parse_bool(row['allow_blank_possible_uk_regions']) uk_region_locations = parse_uuid_list(row['uk_region_locations']) current_regions = investment_project.uk_region_locations.all() current_region_ids = set(region.pk for region in current_regions) if (investment_project.allow_blank_possible_uk_regions == allow_blank_possible_uk_regions and current_region_ids == set(uk_region_locations)): return if not ignore_old_regions: old_uk_region_locations = parse_uuid_list(row['old_uk_region_locations']) if current_region_ids != set(old_uk_region_locations): return investment_project.allow_blank_possible_uk_regions = allow_blank_possible_uk_regions if simulate: return with reversion.create_revision(): investment_project.save( update_fields=('allow_blank_possible_uk_regions',), ) investment_project.uk_region_locations.set(uk_region_locations) reversion.set_comment('Possible UK regions data migration correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" company = Company.objects.get(pk=parse_uuid(row['id'])) batch_number = options['batch_number'] raw_data = b64decode(row['data']) data = json.loads(raw_data) if not self._validate_data(data): logger.warning( 'Required fields are missing for given company: %s, %s', company.pk, data, ) return enriched_data = self._resolve_dnb_address_country(data) if not enriched_data: logger.warning( 'Could not resolve country for given company: %s, %s', company.pk, data, ) return if not simulate: DnBMatchingCSVRecord.objects.update_or_create( company_id=company.id, defaults={ 'batch_number': batch_number, 'data': enriched_data, }, )
def test_uk_region_name(actual_uk_region_id, possible_uk_region_id, expected): """Tests that the field uk region name has correct default value.""" investment_project = InvestmentProjectFactory() if actual_uk_region_id: investment_project.actual_uk_regions.add( parse_uuid(actual_uk_region_id)) if possible_uk_region_id: investment_project.uk_region_locations.add( parse_uuid(possible_uk_region_id)) etl = ETLInvestmentProjects(destination=MIInvestmentProject) updated, created = etl.load() assert (0, 1) == (updated, created) mi_investment_project = MIInvestmentProject.objects.values( *etl.COLUMNS).first() assert mi_investment_project['uk_region_name'] == expected
def _process_row(self, row, update_descriptor, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) company = Company.objects.get(pk=pk) if simulate: return rollback_dnb_company_update(company, update_descriptor)
def _process_row(self, row, simulate=False, **options): """Process one single row.""" sector_id = parse_uuid(row['sector_id']) fdi_sic_grouping_id = parse_uuid(row['fdi_sic_grouping_id']) matches = InvestmentSector.objects.filter( sector_id=sector_id, fdi_sic_grouping_id=fdi_sic_grouping_id, ) if len(matches) == 0: error_msg = ('InvestmentSector does not exist\n' 'sector_id: {0}' 'fdi_sic_grouping_id: {1}').format( sector_id, fdi_sic_grouping_id) raise Exception(error_msg) if simulate: return with reversion.create_revision(): matches.delete() reversion.set_comment('InvestmentSector deletion.')
def _process_row(self, row, simulate=False, **options): """Process a single row.""" pk = parse_uuid(row['id']) company = Company.objects.get(pk=pk) old_sector_id = parse_uuid(row['old_sector_id']) new_sector_id = parse_uuid(row['new_sector_id']) if any([company.sector.pk != old_sector_id, company.sector.pk == new_sector_id]): logger.warning( f'Not updating company {company} as its sector has not changed', ) return company.sector = Sector.objects.get(pk=new_sector_id) if simulate: return with reversion.create_revision(): company.save(update_fields=('sector',)) reversion.set_comment('Company sector correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) segment = parse_limited_string(row['segment']) sector_cluster_pk = parse_uuid(row['sector_cluster_id']) parent_pk = parse_uuid(row['parent_id']) sector = Sector(pk=pk, segment=segment) if sector_cluster_pk: sector.sector_cluster = SectorCluster.objects.get( pk=sector_cluster_pk) if parent_pk: sector.parent = Sector.objects.get(pk=parent_pk) if simulate: return with reversion.create_revision(): sector.save(update_fields=('segment', )) reversion.set_comment('Sector creation.')
def _process_row(self, row, simulate=False, overwrite=False, **options): """Process a single row.""" pk = parse_uuid(row['id']) company = Company.objects.get(pk=pk) sector_id = parse_uuid(row['sector_id']) if company.sector_id and not overwrite: logger.warning( f'Skipping update of company {company.pk} as it already has a sector.', ) return if company.sector_id == sector_id: return company.sector_id = sector_id if simulate: return with reversion.create_revision(): company.save(update_fields=('sector_id',)) reversion.set_comment('Sector updated.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" company = Company.objects.get(pk=parse_uuid(row['id'])) raw_data = b64decode(row['data']) data = json.loads(raw_data) if data and not simulate: DnBMatchingResult.objects.update_or_create( company_id=company.id, defaults={ 'data': data, }, )
def _process_row(self, row, simulate=False, overwrite=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) status_id = parse_uuid(row['status_id']) grant_amount_offered = parse_decimal(row['grant_offered']) net_company_receipt = parse_decimal(row['net_company_receipt']) interaction = Interaction.objects.get(pk=pk) something_updated = _update_fields( interaction, status_id, grant_amount_offered, net_company_receipt, overwrite, ) if simulate or not something_updated: return interaction.save( update_fields=( 'service_delivery_status', 'grant_amount_offered', 'net_company_receipt', ), )
def _process_row(self, row, simulate=False, **options): """Process one single row.""" sector_id = parse_uuid(row['sector_id']) fdi_sic_grouping_id = parse_uuid(row['fdi_sic_grouping_id']) matches = InvestmentSector.objects.filter(sector_id=sector_id) if len(matches) > 0: raise Exception( f'InvestmentSector for sector_id: {sector_id} already exists', ) sector = Sector.objects.get(pk=sector_id) fdi_sic_grouping = FDISICGrouping.objects.get(pk=fdi_sic_grouping_id) investment_sector = InvestmentSector( sector=sector, fdi_sic_grouping=fdi_sic_grouping, ) if simulate: return with reversion.create_revision(): investment_sector.save() reversion.set_comment('InvestmentSector creation.')
def _process_row(self, row, simulate=False, **options): """Process a single row.""" pk = parse_uuid(row['id']) investment_project = InvestmentProject.objects.get(pk=pk) old_sector_id = parse_uuid(row['old_sector_id']) new_sector_id = parse_uuid(row['new_sector_id']) if any([ investment_project.sector.pk != old_sector_id, investment_project.sector.pk == new_sector_id, ], ): logger.warning( f'Not updating InvestmentProject {investment_project} as its ' 'sector has not changed', ) return investment_project.sector = Sector.objects.get(pk=new_sector_id) if simulate: return with reversion.create_revision(): investment_project.save(update_fields=('sector', )) reversion.set_comment('InvestmentProject sector correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" contact = Contact.objects.get(pk=parse_uuid(row['id'])) new_accepts_dit_email_marketing = parse_bool( row['accepts_dit_email_marketing']) if contact.accepts_dit_email_marketing == new_accepts_dit_email_marketing: return contact.accepts_dit_email_marketing = new_accepts_dit_email_marketing if not simulate: with reversion.create_revision(): contact.save(update_fields=('accepts_dit_email_marketing', )) reversion.set_comment( 'Accepts DIT email marketing correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) company = Company.objects.get(pk=pk) company_number = parse_limited_string(row['company_number']) if company.company_number == company_number: return company.company_number = company_number if simulate: return with reversion.create_revision(): company.save(update_fields=('company_number',)) reversion.set_comment('Company number updated.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) investment_project = InvestmentProject.objects.get(pk=pk) status = parse_choice(row['status'], InvestmentProject.STATUSES) if investment_project.status == status: return investment_project.status = status if simulate: return with reversion.create_revision(): investment_project.save(update_fields=('status', )) reversion.set_comment('Bulk status update.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) contact_email = parse_email(row['contact_email']) adviser = Advisor.objects.get(pk=pk) if adviser.contact_email == contact_email: return adviser.contact_email = contact_email if simulate: return with reversion.create_revision(): adviser.save(update_fields=('contact_email', )) reversion.set_comment('Loaded contact email from spreadsheet.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) investment_project = InvestmentProject.objects.get(pk=pk) new_delivery_partners = parse_uuid_list(row['delivery_partners']) if investment_project.delivery_partners.all(): logger.warning( 'Not updating project with existing delivery partners: %s, %s', investment_project.project_code, investment_project, ) return if not simulate: with reversion.create_revision(): investment_project.delivery_partners.set(new_delivery_partners) reversion.set_comment('Investment delivery partners migration.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) company = Company.objects.get(pk=pk) old_company_name = parse_limited_string(row['old_company_name']) new_company_name = parse_limited_string(row['new_company_name']) if company.name != old_company_name: return company.name = new_company_name if simulate: return with reversion.create_revision(): company.save(update_fields=('name', )) reversion.set_comment('Company name correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) sector = Sector.objects.get(pk=pk) old_sector_segment = parse_limited_string(row['old_sector_segment']) new_sector_segment = parse_limited_string(row['new_sector_segment']) if any([sector.segment != old_sector_segment, sector.segment == new_sector_segment]): logger.warning(f'Not updating sector {sector} as its segment has not changed') return sector.segment = new_sector_segment if simulate: return with reversion.create_revision(): sector.save(update_fields=('segment',)) reversion.set_comment('Sector segment correction.')
def _process_row(self, row, simulate=False, **options): """Process one single row.""" pk = parse_uuid(row['id']) sector = Sector.objects.get(pk=pk) # Check that the sector is not referenced by any objects: if sector not in self.unreferenced_sectors: logger.warning( f'Not deleting sector {sector} as it is referenced by another object' ) return if simulate: return # This will attempt to delete all descendants as well, but as the # parent foreign key relationship is protected it will fail. # Therefore, children must be deleted before attempting to delete # their parent. sector.delete()
def _process_row(self, row, simulate=False, fields=None, **options): """Process one single row.""" self.processed_count += 1 pk = parse_uuid(row['id']) company = Company.objects.get(pk=pk) if not company.duns_number: raise CompanyNotDunsLinkedError() if simulate: return self._limit_call_rate() sync_company_with_dnb.apply( args=(pk, fields, self.update_descriptor), throw=True, ) self.processed_ids.append(pk) self.success_count += 1
def _process_row(self, row, simulate=False, **options): """Process one single row.""" score_dict = { value.lower(): key for key, value in Company.EXPORT_POTENTIAL_SCORES } pk = parse_uuid(row['datahub_company_id']) company = Company.objects.get(pk=pk) raw_potential = parse_limited_string(row['export_propensity']) export_potential = score_dict[raw_potential.lower()] if company.export_potential == export_potential: return company.export_potential = export_potential if simulate: return with reversion.create_revision(): company.save(update_fields=('export_potential', )) reversion.set_comment('Export potential updated.')
def _process_row(self, row, simulate=False, **options): """Process a single row.""" # .parse() creates a datetime object even in the absence of hours, minutes supplied_datetime = parse( row['Suggested Created Date'], parserinfo=self._uk_date_format_parserinfo, ) pk = parse_uuid(row['UUID']) company = Company.objects.get(pk=pk) if company.created_on is not None: logger.warning( f'Company {pk} already has a `created_on`; skipping', ) return if simulate: return company.created_on = supplied_datetime with reversion.create_revision(): company.save(update_fields=('created_on', )) reversion.set_comment('Created datetime updated.')
def _process_row(self, row, simulate=False, ignore_old_regions=False, **options): """Processes a CSV file row.""" pk = parse_uuid(row['id']) investment_project = InvestmentProject.objects.get(pk=pk) old_business_activity_ids = parse_uuid_list( row['old_business_activities']) new_business_activity_ids = parse_uuid_list( row['new_business_activities']) current_business_activities = investment_project.business_activities.all( ) current_business_activity_ids = { activity.pk for activity in current_business_activities } if current_business_activity_ids == set(new_business_activity_ids): return if current_business_activity_ids != set(old_business_activity_ids): logger.warning( 'Not updating project %s as its business activities have changed', pk) return if simulate: return with reversion.create_revision(): investment_project.business_activities.set( new_business_activity_ids) reversion.set_comment( 'Business activities data migration correction.')