Ejemplo n.º 1
0
def _company_factory(
    num_interactions=0,
    num_contacts=0,
    num_investment_projects=0,
    num_orders=0,
    num_referrals=0,
    num_company_list_items=0,
    num_pipeline_items=0,
):
    """
    Factory for a company that has companies, interactions, investment projects and OMIS orders.
    """
    company = CompanyFactory()
    ContactFactory.create_batch(num_contacts, company=company)
    CompanyInteractionFactory.create_batch(num_interactions, company=company)
    CompanyReferralFactory.create_batch(num_referrals,
                                        company=company,
                                        contact=None)
    OrderFactory.create_batch(num_orders, company=company)
    CompanyListItemFactory.create_batch(num_company_list_items,
                                        company=company)
    PipelineItemFactory.create_batch(num_pipeline_items, company=company)

    fields_iter = cycle(INVESTMENT_PROJECT_COMPANY_FIELDS)
    fields = islice(fields_iter, 0, num_investment_projects)
    InvestmentProjectFactory.create_batch(
        num_investment_projects,
        **{field: company
           for field in fields},
    )
    return company
Ejemplo n.º 2
0
    def test_merge_when_both_companies_are_on_pipeline_for_same_adviser(
        self,
        source_status,
        target_status,
    ):
        """
        Test that both source and target company are on pipeline for the same adviser
        and same status. And the merge is successful.
        """
        adviser = AdviserFactory()
        source_company = CompanyFactory()
        target_company = CompanyFactory()

        PipelineItemFactory(
            adviser=adviser,
            company=source_company,
            status=source_status,
        )
        PipelineItemFactory(
            adviser=adviser,
            company=target_company,
            status=target_status,
        )

        user = AdviserFactory()
        merge_companies(source_company, target_company, user)

        assert not PipelineItem.objects.filter(
            adviser=adviser,
            company=source_company,
        ).exists()
        assert PipelineItem.objects.filter(
            adviser=adviser,
            company=target_company,
        ).exists()
def test_no_change(s3_stubber, caplog):
    """Test that the command ignores records that haven't changed
    or records with incorrect current values.
    """
    caplog.set_level('WARNING')

    old_sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(
            ['sector_1_old', 'sector_2_old', 'sector_3_old']),
    )

    pipeline_items = PipelineItemFactory.create_batch(
        3,
        sector_id=factory.Iterator([sector.pk for sector in old_sectors]),
    )

    new_sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(
            ['sector_1_new', 'sector_2_new', 'sector_3_new']),
    )

    bucket = 'test_bucket'
    object_key = 'test_key'
    csv_content = f"""id,old_sector_id,new_sector_id
{pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk}
{pipeline_items[1].pk},{old_sectors[1].pk},{old_sectors[1].pk}
{pipeline_items[2].pk},00000000-0000-0000-0000-000000000000,{new_sectors[2].pk}
"""

    s3_stubber.add_response(
        'get_object',
        {
            'Body': BytesIO(csv_content.encode(encoding='utf-8')),
        },
        expected_params={
            'Bucket': bucket,
            'Key': object_key,
        },
    )

    call_command('update_pipeline_item_sector', bucket, object_key)

    for pipeline_item in pipeline_items:
        pipeline_item.refresh_from_db()

    assert (
        f'Not updating PipelineItem {pipeline_items[1]} as its sector has not changed'
    ) in caplog.text
    assert (
        f'Not updating PipelineItem {pipeline_items[2]} as its sector has not changed'
    ) in caplog.text
    assert len(caplog.records) == 2

    assert [pipeline_item.sector for pipeline_item in pipeline_items] == [
        new_sectors[0],
        old_sectors[1],
        old_sectors[2],
    ]
Ejemplo n.º 4
0
 def test_with_multiple_records(self, data_flow_api_client):
     """Test that endpoint returns correct number of records"""
     with freeze_time('2019-01-01 12:30:00'):
         item1 = PipelineItemFactory()
     with freeze_time('2019-01-03 12:00:00'):
         item2 = PipelineItemFactory()
     with freeze_time('2019-01-01 12:00:00'):
         item3 = PipelineItemFactory()
         item4 = ArchivedPipelineItemFactory()
     response = data_flow_api_client.get(self.view_url)
     assert response.status_code == status.HTTP_200_OK
     response_results = response.json()['results']
     assert len(response_results) == 4
     expected_list = sorted([item3, item4],
                            key=lambda x: x.pk) + [item1, item2]
     for index, item in enumerate(expected_list):
         assert str(item.id) == response_results[index]['id']
Ejemplo n.º 5
0
def _company_factory(
    num_interactions=0,
    num_contacts=0,
    num_orders=0,
    num_referrals=0,
    num_company_list_items=0,
    num_pipeline_items=0,
):
    """Factory for a company that has companies, interactions and OMIS orders."""
    company = CompanyFactory()
    ContactFactory.create_batch(num_contacts, company=company)
    CompanyInteractionFactory.create_batch(num_interactions, company=company)
    CompanyReferralFactory.create_batch(num_referrals,
                                        company=company,
                                        contact=None)
    OrderFactory.create_batch(num_orders, company=company)
    CompanyListItemFactory.create_batch(num_company_list_items,
                                        company=company)
    PipelineItemFactory.create_batch(num_pipeline_items, company=company)
    return company
def test_non_existent_pipeline_item(s3_stubber, caplog):
    """Test that the command logs an error when the pipeline item PK does not exist."""
    caplog.set_level('ERROR')

    old_sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(
            ['sector_1_old', 'sector_2_old', 'sector_3_old']),
    )

    pipeline_items = PipelineItemFactory.create_batch(
        3,
        sector_id=factory.Iterator([sector.pk for sector in old_sectors]),
    )

    new_sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(
            ['sector_1_new', 'sector_2_new', 'sector_3_new']),
    )

    bucket = 'test_bucket'
    object_key = 'test_key'
    csv_content = f"""id,old_sector_id,new_sector_id
{pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk}
{pipeline_items[1].pk},{old_sectors[1].pk},{new_sectors[1].pk}
00000000-0000-0000-0000-000000000000,{old_sectors[2].pk},{new_sectors[2].pk}
"""

    s3_stubber.add_response(
        'get_object',
        {
            'Body': BytesIO(csv_content.encode(encoding='utf-8')),
        },
        expected_params={
            'Bucket': bucket,
            'Key': object_key,
        },
    )

    call_command('update_pipeline_item_sector', bucket, object_key)

    for pipeline_item in pipeline_items:
        pipeline_item.refresh_from_db()

    assert 'PipelineItem matching query does not exist' in caplog.text
    assert len(caplog.records) == 1

    assert [pipeline_item.sector for pipeline_item in pipeline_items] == [
        new_sectors[0],
        new_sectors[1],
        old_sectors[2],
    ]
def test_audit_log(s3_stubber):
    """Test that reversion revisions are created."""
    sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(['sector_1', 'sector_2', 'sector_3']),
    )

    pipeline_item_without_change = PipelineItemFactory(
        sector_id=sectors[0].pk, )

    pipeline_item_with_change = PipelineItemFactory(sector_id=sectors[1].pk, )

    bucket = 'test_bucket'
    object_key = 'test_key'
    csv_content = f"""id,old_sector_id,new_sector_id
{pipeline_item_without_change.pk},{sectors[0].pk},{sectors[0].pk}
{pipeline_item_with_change.pk},{sectors[1].pk},{sectors[2].pk}
"""

    s3_stubber.add_response(
        'get_object',
        {
            'Body': BytesIO(csv_content.encode(encoding='utf-8')),
        },
        expected_params={
            'Bucket': bucket,
            'Key': object_key,
        },
    )

    call_command('update_pipeline_item_sector', bucket, object_key)

    versions = Version.objects.get_for_object(pipeline_item_without_change)
    assert versions.count() == 0

    versions = Version.objects.get_for_object(pipeline_item_with_change)
    assert versions.count() == 1
    assert versions[0].revision.get_comment(
    ) == 'PipelineItem sector correction.'
Ejemplo n.º 8
0
    def test_merge_when_both_companies_are_on_pipeline_diff_adviser(self):
        """
        Test that both source and target company are on pipeline with different advisers.
        Merge is successful and both items are migrated to the target company.
        """
        adviser_1 = AdviserFactory()
        adviser_2 = AdviserFactory()
        source_company = CompanyFactory()
        target_company = CompanyFactory()

        PipelineItemFactory(
            adviser=adviser_1,
            company=source_company,
            status=PipelineItem.Status.LEADS,
        )
        PipelineItemFactory(
            adviser=adviser_2,
            company=target_company,
            status=PipelineItem.Status.IN_PROGRESS,
        )

        user = AdviserFactory()
        merge_companies(source_company, target_company, user)

        assert not PipelineItem.objects.filter(
            adviser=adviser_1,
            company=source_company,
        ).exists()
        assert PipelineItem.objects.filter(
            adviser=adviser_1,
            company=target_company,
        ).exists()
        assert PipelineItem.objects.filter(
            adviser=adviser_2,
            company=target_company,
        ).exists()
def test_simulate(s3_stubber):
    """Test that the command simulates updates if --simulate is passed in."""
    old_sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(
            ['sector_1_old', 'sector_2_old', 'sector_3_old']),
    )

    pipeline_items = PipelineItemFactory.create_batch(
        3,
        sector_id=factory.Iterator([sector.pk for sector in old_sectors]),
    )

    new_sectors = SectorFactory.create_batch(
        3,
        segment=factory.Iterator(
            ['sector_1_new', 'sector_2_new', 'sector_3_new']),
    )

    bucket = 'test_bucket'
    object_key = 'test_key'
    csv_content = f"""id,old_sector_id,new_sector_id
{pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk}
{pipeline_items[1].pk},{old_sectors[1].pk},{new_sectors[1].pk}
{pipeline_items[2].pk},{old_sectors[2].pk},{new_sectors[2].pk}
"""

    s3_stubber.add_response(
        'get_object',
        {
            'Body': BytesIO(csv_content.encode(encoding='utf-8')),
        },
        expected_params={
            'Bucket': bucket,
            'Key': object_key,
        },
    )

    call_command('update_pipeline_item_sector',
                 bucket,
                 object_key,
                 simulate=True)

    for pipeline_item in pipeline_items:
        pipeline_item.refresh_from_db()

    assert [pipeline_item.sector
            for pipeline_item in pipeline_items] == old_sectors
Ejemplo n.º 10
0
 def test_str(self):
     """Test the human friendly string representation of the object."""
     pipeline_item = PipelineItemFactory()
     status = f'{pipeline_item.company} - {pipeline_item.name} - {pipeline_item.status}'
     assert str(pipeline_item) == status
Ejemplo n.º 11
0
def company_with_pipeline_items_factory():
    """Factory for a company that is on users' personal pipeline."""
    company = CompanyFactory()
    PipelineItemFactory.create_batch(3, company=company)
    return company