def get_plan_list_for_country(country_iso3):
    source_url = HPC_V1_ROOT_URL + 'plan/country/{}'.format(country_iso3)
    try:
        return get_json_from_url(source_url)['data']
    except Exception:
        logger.exception('Error trying to list plans for country')
        return []
def get_project_list_for_plan(plan_id):
    source_url = HPC_V1_ROOT_URL + 'project/plan/{}'.format(plan_id)
    try:
        return get_json_from_url(source_url)['data']
    except Exception:
        logger.exception('Error trying to list projects for response plan')
        return []
Ejemplo n.º 3
0
def sync_partners(area):
    source_url = HPC_V1_ROOT_URL + 'organization'
    org_data = get_json_from_url(source_url)['data']

    # Prepare OCHA data dict
    orgs_dict = dict()
    for org in org_data:
        orgs_dict[str.upper(org['name'])] = org['id']
        if org['abbreviation']:
            orgs_dict[str.upper(org['abbreviation'])] = org['id']

    # Iterate over stored Partners and try assign proper organization id
    partners = Partner.objects.filter(
        country_code=area) if area else Partner.objects.all()
    for partner in partners:
        if str.upper(partner.title) in orgs_dict:
            # We have a match. Check if this is 1:1 match
            logger.debug('Match found for {}'.format(partner.title))
            if partners.filter(title=partner.title).count() == 1:
                logger.debug('Assigned OCHA external ID: {}'.format(
                    orgs_dict[partner.title]))
                partner.ocha_external_id = orgs_dict[partner.title]
                partner.save()
            else:
                logger.debug(
                    'SKIPPING. Found more then one entity of {}'.format(
                        partner.title))
    def get(self, request, *args, **kwargs):
        source_url = HPC_V1_ROOT_URL + 'rpm/plan/id/{}?format=json&content=entities'.format(
            self.kwargs['id'])
        try:
            plan_data = get_json_from_url(source_url)['data']
        except Exception:
            raise serializers.ValidationError('OCHA service unavailable.')

        out_data = {
            k: v
            for k, v in plan_data.items() if type(v) not in {list, dict}
        }

        if 'governingEntities' in plan_data:
            cluster_names = [
                ge['governingEntityVersion']['name']
                for ge in plan_data['governingEntities']
                if ge['entityPrototype']['refCode'] == RefCode.CLUSTER
            ]
        else:
            cluster_names = []
        out_data['clusterNames'] = cluster_names
        if plan_data['categories'] and plan_data['categories'][0]['id'] == 5:
            out_data['planType'] = RESPONSE_PLAN_TYPE.fa
        else:
            out_data['planType'] = RESPONSE_PLAN_TYPE.hrp

        out_data['startDate'] = parse(
            plan_data['planVersion']['startDate']).strftime(
                settings.DATE_FORMAT)
        out_data['endDate'] = parse(
            plan_data['planVersion']['endDate']).strftime(settings.DATE_FORMAT)

        return Response(out_data)
Ejemplo n.º 5
0
def finish_response_plan_import(external_plan_id):
    source_url = HPC_V1_ROOT_URL + 'rpm/plan/id/{}?format=json&content=entities'.format(
        external_plan_id)
    plan_data = get_json_from_url(source_url)['data']
    save_location_list(plan_data.get('locations', []), "response_plan")

    strategic_objectives_url = HPC_V1_ROOT_URL + 'rpm/plan/id/{}?format=json&content=measurements'.format(
        external_plan_id)
    strategic_objectives_data = get_json_from_url(
        strategic_objectives_url)['data']

    logger.debug(
        'Importing Cluster Objectives and Activities for Response Plan #{}'.
        format(external_plan_id))
    save_activities_and_objectives_for_response_plan(
        entities_response=plan_data,
        measurements_response=strategic_objectives_data)
def import_project(external_project_id,
                   partner_id,
                   response_plan=None,
                   asynch=True):
    source_url = HPC_V2_ROOT_URL + 'project/{}'.format(external_project_id)
    project_data = get_json_from_url(source_url)['data']
    # Grab project details from projectVersion array of dict
    current_project_data = None

    for project in project_data['projectVersions']:
        if project_data['currentPublishedVersionId'] == project['id']:
            current_project_data = project
            break

    current_project_data['partner'] = partner_id
    if 'code' in project_data:
        current_project_data['code'] = project_data['code']

    additional_information = list()
    if 'contacts' in current_project_data:
        for contact in current_project_data['contacts']:
            if "website" in contact and contact['website']:
                additional_information.append(contact['website'])
    current_project_data['additional_information'] = ", ".join(
        additional_information)

    current_project_data['cluster_ids'] = list()
    if 'governingEntities' in current_project_data:
        for cluster in current_project_data['governingEntities']:
            current_project_data['cluster_ids'].append(cluster['id'])

    serializer = V2PartnerProjectImportSerializer(data=current_project_data)
    serializer.is_valid(raise_exception=True)
    project = serializer.save()

    from etools_prp.apps.ocha.tasks import finish_partner_project_import
    (finish_partner_project_import.delay if asynch else
     finish_partner_project_import)(project.pk,
                                    external_project_id,
                                    response_plan_id=getattr(
                                        response_plan, 'id', None))

    return project
def import_response_plan(external_plan_id, workspace=None, asynch=True):
    logger.debug('Importing Response Plan #{}'.format(external_plan_id))
    source_url = HPC_V1_ROOT_URL + 'rpm/plan/id/{}?format=json&content=entities'.format(external_plan_id)
    plan_data = get_json_from_url(source_url)['data']
    if workspace:
        plan_data['workspace_id'] = workspace.id

    plan_data['name'] = plan_data['planVersion']['name']
    plan_data['startDate'] = plan_data['planVersion']['startDate']
    plan_data['endDate'] = plan_data['planVersion']['endDate']

    plan_serializer = V1ResponsePlanImportSerializer(data=plan_data)
    plan_serializer.is_valid(raise_exception=True)
    response_plan = plan_serializer.save()

    # Do most of the work in background, otherwise it times out the request a lot
    from etools_prp.apps.ocha.tasks import finish_response_plan_import
    (finish_response_plan_import.delay if asynch else finish_response_plan_import)(external_plan_id)

    return response_plan
def import_project_details(project, external_project_id):
    source_url = HPC_V2_ROOT_URL + 'project/{}/attachments'.format(
        external_project_id)
    attachments = get_json_from_url(source_url)['data']

    if not attachments:
        logger.warning(
            'No project attachment V2 data found for project_id: {}. Skipping reportables and location data'
            .format(external_project_id))
        return

    reportables = []

    for attachment in attachments:
        if attachment['attachment']['type'] == 'indicator':
            cluster_activity = ClusterActivity.objects.filter(
                external_source=EXTERNAL_DATA_SOURCES.HPC,
                external_id=attachment['attachment']['objectId'],
            ).first()

            blueprint, _ = IndicatorBlueprint.objects.update_or_create(
                external_source=EXTERNAL_DATA_SOURCES.HPC,
                external_id=attachment['attachment']['id'],
                defaults={
                    'title':
                    attachment['attachment']['attachmentVersion']['value']
                    ['description'],
                })

            totals = attachment['attachment']['attachmentVersion']['value'][
                'metrics']['values']['totals']

            target = get_dict_from_list_by_key(totals, 'Target',
                                               key='name.en')['value']
            in_need = get_dict_from_list_by_key(totals,
                                                'In Need',
                                                key='name.en')['value']
            baseline = get_dict_from_list_by_key(totals,
                                                 'Baseline',
                                                 key='name.en')['value']

            defaults = {
                'blueprint': blueprint,
                'target': convert_to_json_ratio_value(target),
                'baseline': convert_to_json_ratio_value(baseline),
                'in_need': convert_to_json_ratio_value(in_need),
                'content_object': project,
            }

            reportable, _ = Reportable.objects.update_or_create(
                external_source=EXTERNAL_DATA_SOURCES.HPC,
                external_id=attachment['attachment']['id'],
                defaults={k: v
                          for k, v in defaults.items() if v})

            try:
                disaggregated = attachment['attachment']['attachmentVersion'][
                    'value']['metrics']['values']['disaggregated']
                for disaggregation in save_disaggregations(
                        disaggregated.get('categories', []),
                        response_plan=project.response_plan):
                    reportable.disaggregations.through.objects.get_or_create(
                        reportable_id=reportable.id,
                        disaggregation_id=disaggregation.id)

                locations = save_location_list(disaggregated['locations'],
                                               "indicator")
                for location in locations:
                    ReportableLocationGoal.objects.get_or_create(
                        reportable=reportable, location=location)
            except (KeyError, TypeError, AttributeError):
                locations = []

            if cluster_activity:
                from etools_prp.apps.indicator.models import create_papc_reportables_from_ca
                partner_activity, _ = PartnerActivity.objects.update_or_create(
                    cluster_activity=cluster_activity,
                    defaults={
                        'title': cluster_activity.title,
                        'partner': project.partner,
                    })
                partner_activity.locations.add(*locations)

                project_context, created = PartnerActivityProjectContext.objects.update_or_create(
                    start_date=project.start_date,
                    end_date=project.end_date,
                    defaults={
                        'activity': partner_activity,
                        'project': project,
                    },
                )

                if created:
                    create_papc_reportables_from_ca(project_context,
                                                    cluster_activity)

                project.reportables.add(reportable)
                project.locations.add(*locations)

            reportables.append(reportable)

    logger.debug('Saved {} reportables for {}'.format(len(reportables),
                                                      project))
Ejemplo n.º 9
0
def finish_partner_project_import(project_id,
                                  external_id,
                                  response_plan_id=None):
    project = PartnerProject.objects.get(pk=project_id)

    source_url = HPC_V2_ROOT_URL + 'project/{}'.format(external_id)
    project_data = get_json_from_url(source_url)['data']

    # Grab project details from projectVersion array of dict
    current_project_data = None

    for projectVersion in project_data['projectVersions']:
        if project_data['currentPublishedVersionId'] == projectVersion['id']:
            current_project_data = projectVersion
            break

    if not current_project_data:
        logger.warning(
            'No project V2 data found for project_id: {}. Using V1 data'.
            format(external_id))
        return

    funding_url = HPC_V1_ROOT_URL + 'fts/flow?projectId={}'.format(external_id)
    funding_data = get_json_from_url(funding_url)

    try:
        funding_serializer = V1FundingSourceImportSerializer(
            data=funding_data['data'])
        funding_serializer.is_valid(raise_exception=True)
        funding_serializer.save()
    except Exception:
        logger.warning(
            'No funding data found for project_id: {}'.format(external_id))

    clusters = []
    if not response_plan_id:
        for plan in current_project_data['plans']:
            if not ResponsePlan.objects.filter(
                    external_source=EXTERNAL_DATA_SOURCES.HPC,
                    external_id=plan['id']).exists():
                import_response_plan(plan['id'])
    else:
        response_plan = ResponsePlan.objects.get(pk=response_plan_id)
        for global_cluster_data in current_project_data['globalClusters']:
            # Don't save external_id for global clusters - it won't pass unique constraint
            cluster, _ = Cluster.objects.get_or_create(
                external_source=EXTERNAL_DATA_SOURCES.HPC,
                type=CLUSTER_TYPES.imported,
                imported_type=global_cluster_data['name'],
                response_plan=response_plan,
            )
            clusters.append(cluster)

    project_cluster_ids = [
        c['id'] for c in current_project_data['governingEntities']
        if c['entityPrototypeId'] == 9
    ]

    # At this point all clusters should be in DB
    clusters.extend(
        list(
            Cluster.objects.filter(
                external_source=EXTERNAL_DATA_SOURCES.HPC,
                external_id__in=project_cluster_ids,
            )))
    logger.debug('Adding {} clusters to project {} and it\'s partner.'.format(
        len(clusters), project))

    project.clusters.add(*clusters)
    for partner_cluster in project.partner.clusters.all():
        non_partner_cluster_roles = partner_cluster.prp_roles.exclude(
            role=PRP_ROLE_TYPES.cluster_member)

        for prp_role in non_partner_cluster_roles:
            for ocha_cluster in clusters:
                PRPRole.objects.update_or_create(user=prp_role.user,
                                                 workspace=prp_role.workspace,
                                                 cluster=ocha_cluster,
                                                 defaults={
                                                     'role': prp_role.role,
                                                     'is_active': True,
                                                 })

    project.partner.clusters.add(*clusters)
    import_project_details(project, external_id)
Ejemplo n.º 10
0
    def get(self, request, *args, **kwargs):
        details_url = HPC_V2_ROOT_URL + 'project/{}'.format(self.kwargs['id'])

        details = fetch_json_urls([
            details_url,
        ])

        # We should use project code whenever is possible. ID filtering might be not working in case of new OPS data
        if details:
            project_code = details[0]['data']['projectVersion']['code']
            budget_url = HPC_V1_ROOT_URL + 'fts/flow?projectCode={}'.format(
                project_code)
        else:
            budget_url = HPC_V1_ROOT_URL + 'fts/flow?projectId={}'.format(
                self.kwargs['id'])

        details, budget_info = fetch_json_urls([
            details_url,
            budget_url,
        ])

        out_data = {
            k: v
            for k, v in details['data'].items() if type(v) not in {list, dict}
        }

        # Grab project details from projectVersion array of dict
        current_project_data = None

        for project in details['data']['projectVersions']:
            if details['data']['currentPublishedVersionId'] == project['id']:
                current_project_data = project
                break

        # Fetch attachment data
        attachment_url = HPC_V2_ROOT_URL \
            + 'project/{}/attachments'.format(details['data']['id'])
        attachments = get_json_from_url(attachment_url)

        if 'data' in attachments:
            out_data['attachments'] = map(
                lambda item: item['attachment']['attachmentVersion']['value'][
                    'description'],
                filter(lambda x: x['attachment']['type'] == 'indicator',
                       attachments['data']))

        out_data['startDate'] = current_project_data['startDate']
        out_data['endDate'] = current_project_data['endDate']
        out_data['name'] = current_project_data['name']

        # out_data['totalBudgetUSD'] = sum([
        #     f['amountUSD'] for f in budget_info['data']['flows']
        # ]) if budget_info['data']['flows'] else None

        out_data['totalBudgetUSD'] = current_project_data[
            'currentRequestedFunds']

        funding_sources = []

        if 'data' in budget_info:
            for flow in budget_info['data']['flows']:
                funding_sources.extend([
                    fs['name'] for fs in flow.get('sourceObjects', [])
                    if fs['type'] == 'Organization'
                ])

        out_data['fundingSources'] = funding_sources
        out_data['objective'] = current_project_data['objective']
        additional_information = list()
        if 'contacts' in current_project_data:
            for contact in current_project_data['contacts']:
                if "website" in contact and contact['website']:
                    additional_information.append(contact['website'])
        out_data['additional_information'] = ", ".join(additional_information)

        start_datetime = parse(out_data['startDate'])
        end_datetime = parse(out_data['endDate'])

        out_data['startDate'] = start_datetime.strftime(settings.DATE_FORMAT)
        out_data['endDate'] = end_datetime.strftime(settings.DATE_FORMAT)

        clusters = []

        try:
            clusters += [
                global_cluster_data['name'] for global_cluster_data in
                current_project_data['globalClusters']
            ]
        except Exception:
            pass

        try:
            clusters += [
                c['name'] for c in current_project_data['governingEntities']
                if c['entityPrototypeId'] == 9
            ]
        except Exception:
            pass

        out_data['clusters'] = clusters

        today = timezone.now()
        if start_datetime > today:
            out_data['status'] = 'Planned'
        elif end_datetime < today:
            out_data['status'] = 'Completed'
        else:
            out_data['status'] = 'Ongoing'

        return Response(out_data)