示例#1
0
def indiv_contribution_section(entity_id, standardized_name, cycle, amount, external_ids):
    section = {
        'name': 'Campaign Finance',
        'template': 'contributions.html',
    }
    
    section['contributions_data'] = True
    recipient_candidates = api.indiv.pol_recipients(entity_id, cycle)
    recipient_orgs = api.indiv.org_recipients(entity_id, cycle)

    candidates_barchart_data = []
    for record in recipient_candidates:
        candidates_barchart_data.append({
                'key': generate_label(str(PoliticianNameCleaver(record['recipient_name']).parse().plus_metadata(record['party'], record['state']))),
                'value' : record['amount'],
                'href' : barchart_href(record, cycle, entity_type="politician"),
                })
    section['candidates_barchart_data'] = json.dumps(bar_validate(candidates_barchart_data))

    orgs_barchart_data = []
    for record in recipient_orgs:
        orgs_barchart_data.append({
                'key': generate_label(str(OrganizationNameCleaver(record['recipient_name']).parse())),
                'value' : record['amount'],
                'href' : barchart_href(record, cycle, entity_type="organization"),
                })
    section['orgs_barchart_data'] = json.dumps(bar_validate(orgs_barchart_data))

    party_breakdown = api.indiv.party_breakdown(entity_id, cycle)
    for key, values in party_breakdown.iteritems():
        party_breakdown[key] = float(values[1])
    section['party_breakdown'] = json.dumps(pie_validate(party_breakdown))

    # if none of the charts have data, or if the aggregate total
    # received was negative, then suppress that whole content
    # section except the overview bar
    if amount < 0:
        section['suppress_contrib_graphs'] = True
        section['reason'] = "negative"

    elif (not section['candidates_barchart_data']
        and not section['orgs_barchart_data']
        and not section['party_breakdown']):
        section['suppress_contrib_graphs'] = True
        section['reason'] = 'empty'

    section['external_links'] = external_sites.get_contribution_links('individual', standardized_name, external_ids, cycle)

    bundling = api.entities.bundles(entity_id, cycle)
    section['bundling_data'] = [ [x[key] for key in 'recipient_entity recipient_name recipient_type firm_entity firm_name amount'.split()] for x in bundling ]

    return section
示例#2
0
    def build_section_data(self):
        entity_id, cycle, standardized_name, external_ids = self.entity.entity_id, self.entity.cycle, self.entity.standardized_name, self.entity.external_ids
        self.contributions_data = True

        candidates_barchart_data = []
        for record in self.data['recipient_candidates']:
            candidates_barchart_data.append({
                'key': generate_label(str(PoliticianNameCleaver(record['recipient_name']).parse().plus_metadata(record['party'], record['state']))),
                'value' : record['amount'],
                'href' : barchart_href(record, cycle, entity_type="politician"),
            })
        self.candidates_barchart_data = json.dumps(bar_validate(candidates_barchart_data))

        orgs_barchart_data = []
        for record in self.data['recipient_orgs']:
            orgs_barchart_data.append({
                'key': generate_label(str(OrganizationNameCleaver(record['recipient_name']).parse())),
                'value' : record['amount'],
                'href' : barchart_href(record, cycle, entity_type="organization"),
            })
        self.orgs_barchart_data = json.dumps(bar_validate(orgs_barchart_data))

        for key, values in self.data['party_breakdown'].iteritems():
            self.data['party_breakdown'][key] = float(values[1])
        self.party_breakdown = json.dumps(pie_validate(self.data['party_breakdown']))

        # if none of the charts have data, or if the aggregate total
        # received was negative, then suppress that whole content
        # section except the overview bar
        amount = int(float(self.entity.metadata['entity_info']['totals']['contributor_amount']))
        if amount < 0:
            self.suppress_contrib_graphs = True
            self.reason = "negative"

        elif (not self.candidates_barchart_data
            and not self.orgs_barchart_data
            and not self.party_breakdown):
            self.suppress_contrib_graphs = True
            self.reason = 'empty'

        self.external_links = external_sites.get_contribution_links('individual', standardized_name, external_ids, cycle)

        self.bundling_data = [[x[key] for key in 'recipient_entity recipient_name recipient_type firm_entity firm_name amount'.split()] for x in self.data['bundling']]
示例#3
0
def pol_contribution_section(entity_id, standardized_name, cycle, amount, external_ids):
    section = {
        'name': 'Campaign Finance',
        'template': 'contributions.html',
    }
    
    section['contributions_data'] = True

    top_contributors = api.pol.contributors(entity_id, cycle)
    top_industries = api.pol.industries(entity_id, cycle=cycle)

    section['pct_known'] = pct_contribs_from_known_industries(entity_id, cycle, amount)

    contributors_barchart_data = []
    for record in top_contributors:
        contributors_barchart_data.append({
            'key': generate_label(str(OrganizationNameCleaver(record['name']).parse())),
            'value' : record['total_amount'],
            'value_employee' : record['employee_amount'],
            'value_pac' : record['direct_amount'],
            'href' : barchart_href(record, cycle, 'organization')
        })
    contributors_barchart_data = bar_validate(contributors_barchart_data)
    section['contributors_barchart_data'] = json.dumps(contributors_barchart_data)

    industries_barchart_data = []
    for record in top_industries:
        industries_barchart_data.append({
            'key': generate_label(str(OrganizationNameCleaver(record['name']).parse())),
            'href': barchart_href(record, cycle, 'industry'),
            'value' : record['amount'],
        })
    industries_barchart_data = bar_validate(industries_barchart_data)
    section['industries_barchart_data'] = json.dumps(industries_barchart_data)

    local_breakdown = api.pol.local_breakdown(entity_id, cycle)
    for key, values in local_breakdown.iteritems():
        # values is a list of [count, amount]
        local_breakdown[key] = float(values[1])
    local_breakdown = pie_validate(local_breakdown)
    section['local_breakdown'] = json.dumps(local_breakdown)

    entity_breakdown = api.pol.contributor_type_breakdown(entity_id, cycle)
    for key, values in entity_breakdown.iteritems():
        # values is a list of [count, amount]
        entity_breakdown[key] = float(values[1])
    entity_breakdown = pie_validate(entity_breakdown)
    section['entity_breakdown'] = json.dumps(entity_breakdown)

    # if none of the charts have data, or if the aggregate total
    # received was negative, then suppress that whole content
    # section except the overview bar
    if amount < 0:
        section['suppress_contrib_graphs'] = True
        section['reason'] = "negative"

    elif not any((industries_barchart_data, contributors_barchart_data, local_breakdown, entity_breakdown)):
        section['suppress_contrib_graphs'] = True
        section['reason'] = 'empty'

    partytime_link, section['partytime_data'] = external_sites.get_partytime_data(external_ids)
    
    section['external_links'] = external_sites.get_contribution_links('politician', standardized_name.name_str(), external_ids, cycle)
    if partytime_link:
        section['external_links'].append({'url': partytime_link, 'text': 'Party Time'})
    
    bundling = api.entities.bundles(entity_id, cycle)
    section['bundling_data'] = [ [x[key] for key in 'lobbyist_entity lobbyist_name firm_entity firm_name amount'.split()] for x in bundling ]

    if int(cycle) == LATEST_CYCLE:
        section['fec_summary'] = api.pol.fec_summary(entity_id, cycle)
        if section['fec_summary']:
            section['include_fec'] = True

            if section['fec_summary'] and 'date' in section['fec_summary']:
                section['fec_summary']['clean_date'] = datetime.datetime.strptime(section['fec_summary']['date'], "%Y-%m-%d")
        
            timelines = []
            for pol in api.pol.fec_timeline(entity_id, cycle):
                tl = {
                    'name': pol['candidate_name'],
                    'party': pol['party'],
                    'is_this': pol['entity_id'] == entity_id,
                    'timeline': map(lambda item: item if item >= 0 else 0, pol['timeline']),
                    'href': '/politician/%s/%s?cycle=%s' % (slugify(PoliticianNameCleaver(pol['candidate_name']).parse().name_str()), pol['entity_id'], cycle)
                }
                tl['sum'] = sum(tl['timeline'])
                timelines.append(tl)
            timelines.sort(key=lambda t: (int(t['is_this']), t['sum']), reverse=True)
            # restrict to top 5, and only those receiving at least 10% of this pol's total
            if timelines:
                this_sum = timelines[0]['sum']
                timelines = [timeline for timeline in timelines if timeline['sum'] > 0.1 * this_sum]
                timelines = timelines[:5]
        
            section['fec_timelines'] = json.dumps(timelines)
        
            section['fec_indexp'] = api.pol.fec_indexp(entity_id, cycle)[:10]

    return section
示例#4
0
def org_contribution_section(entity_id, standardized_name, cycle, amount, type, external_ids):
    section = {
        'name': 'Campaign Finance',
        'template': 'contributions.html',
    }
    
    if type == 'industry':
        section['top_orgs'] = json.dumps([
            {
                'key': generate_label(str(OrganizationNameCleaver(org['name']).parse())),
                'value': org['total_amount'],
                'value_employee': org['employee_amount'],
                'value_pac': org['direct_amount'],
                'href' : barchart_href(org, cycle, 'organization')
            } for org in api.org.industry_orgs(entity_id, cycle, limit=10)
        ])

    section['contributions_data'] = True
    recipients = api.org.recipients(entity_id, cycle=cycle)
    recipient_pacs = api.org.pac_recipients(entity_id, cycle)

    pol_recipients_barchart_data = []
    for record in recipients:
        pol_recipients_barchart_data.append({
                'key': generate_label(str(PoliticianNameCleaver(record['name']).parse().plus_metadata(record['party'], record['state']))),
                'value' : record['total_amount'],
                'value_employee' : record['employee_amount'],
                'value_pac' : record['direct_amount'],
                'href' : barchart_href(record, cycle, entity_type='politician')
                })
    section['pol_recipients_barchart_data'] = json.dumps(bar_validate(pol_recipients_barchart_data))

    pacs_barchart_data = []
    for record in recipient_pacs:
        pacs_barchart_data.append({
                'key': generate_label(str(OrganizationNameCleaver(record['name']).parse())),
                'value' : record['total_amount'],
                'value_employee' : record['employee_amount'],
                'value_pac' : record['direct_amount'],
                'href' : barchart_href(record, cycle, entity_type="organization"),
                })
    section['pacs_barchart_data'] = json.dumps(bar_validate(pacs_barchart_data))

    party_breakdown = api.org.party_breakdown(entity_id, cycle)
    for key, values in party_breakdown.iteritems():
        party_breakdown[key] = float(values[1])
    section['party_breakdown'] = json.dumps(pie_validate(party_breakdown))

    level_breakdown = api.org.level_breakdown(entity_id, cycle)
    for key, values in level_breakdown.iteritems():
        level_breakdown[key] = float(values[1])
    section['level_breakdown'] = json.dumps(pie_validate(level_breakdown))

    # if none of the charts have data, or if the aggregate total
    # received was negative, then suppress that whole content
    # section except the overview bar
    if amount <= 0:
        section['suppress_contrib_graphs'] = True
        if amount < 0:
            section['reason'] = "negative"

    elif (not section['pol_recipients_barchart_data']
          and not section['party_breakdown']
          and not section['level_breakdown']
          and not section['pacs_barchart_data']):
        section['suppress_contrib_graphs'] = True
        section['reason'] = 'empty'

    section['external_links'] = external_sites.get_contribution_links(type, standardized_name, external_ids, cycle)

    bundling = api.entities.bundles(entity_id, cycle)
    section['bundling_data'] = [ [x[key] for key in 'recipient_entity recipient_name recipient_type lobbyist_entity lobbyist_name firm_name amount'.split()] for x in bundling ]

    if int(cycle) == LATEST_CYCLE:
        section['fec_summary'] = api.org.fec_summary(entity_id)
        if section['fec_summary']:
            section['fec_summary']['clean_date'] = datetime.datetime.strptime(section['fec_summary']['date'], "%Y-%m-%d")
            section['fec_top_contribs_data'] = json.dumps([dict(key=generate_label(row['contributor_name'], 27), value=row['amount'], href='') 
                                                    for row in api.org.fec_top_contribs(entity_id)])
            
        section['fec_indexp'] = api.org.fec_indexp(entity_id)[:10]
        
        if section['fec_summary'] or section['fec_indexp']:
            section['include_fec'] = True

    return section
示例#5
0
    def build_section_data(self):
        entity_id, cycle, type, standardized_name, external_ids = self.entity.entity_id, self.entity.cycle, self.entity.type, self.entity.standardized_name, self.entity.external_ids
        amount = int(float(self.entity.metadata['entity_info']['totals']['contributor_amount']))

        if type == 'industry':
            self.top_orgs = json.dumps([
                {
                    'key': generate_label(str(OrganizationNameCleaver(org['name']).parse())),
                    'value': org['total_amount'],
                    'value_employee': org['employee_amount'],
                    'value_pac': org['direct_amount'],
                    'href' : barchart_href(org, cycle, 'organization')
                } for org in self.data['industry_orgs']
            ])

        self.contributions_data = True

        pol_recipients_barchart_data = []
        for record in self.data['recipients']:
            pol_recipients_barchart_data.append({
                'key': generate_label(str(PoliticianNameCleaver(record['name']).parse().plus_metadata(record['party'], record['state']))),
                'value' : record['total_amount'],
                'value_employee' : record['employee_amount'],
                'value_pac' : record['direct_amount'],
                'href' : barchart_href(record, cycle, entity_type='politician')
            })
        self.pol_recipients_barchart_data = json.dumps(bar_validate(pol_recipients_barchart_data))

        pacs_barchart_data = []
        for record in self.data['recipient_pacs']:
            pacs_barchart_data.append({
                'key': generate_label(str(OrganizationNameCleaver(record['name']).parse())),
                'value' : record['total_amount'],
                'value_employee' : record['employee_amount'],
                'value_pac' : record['direct_amount'],
                'href' : barchart_href(record, cycle, entity_type="organization"),
            })
        self.pacs_barchart_data = json.dumps(bar_validate(pacs_barchart_data))

        for key, values in self.data['party_breakdown'].iteritems():
            self.data['party_breakdown'][key] = float(values[1])
        self.party_breakdown = json.dumps(pie_validate(self.data['party_breakdown']))

        for key, values in self.data['level_breakdown'].iteritems():
            self.data['level_breakdown'][key] = float(values[1])
        self.level_breakdown = json.dumps(pie_validate(self.data['level_breakdown']))

        # if none of the charts have data, or if the aggregate total
        # received was negative, then suppress that whole content
        # section except the overview bar
        if amount <= 0:
            self.suppress_contrib_graphs = True
            if amount < 0:
                self.reason = "negative"

        elif (not self.pol_recipients_barchart_data
              and not self.party_breakdown
              and not self.level_breakdown
              and not self.pacs_barchart_data):
            self.suppress_contrib_graphs = True
            self.reason = 'empty'

        self.external_links = external_sites.get_contribution_links(type, standardized_name, external_ids, cycle)

        self.bundling_data = [[x[key] for key in 'recipient_entity recipient_name recipient_type lobbyist_entity lobbyist_name firm_name amount'.split()] for x in self.data['bundling']]

        if int(cycle) != -1:
            self.fec_indexp = self.data['fec_indexp']

            if self.data['fec_summary'] and self.data['fec_summary']['num_committee_filings'] > 0 and self.data['fec_summary'].get('first_filing_date'):
                self.fec_summary = self.data['fec_summary']
                self.fec_summary['clean_date'] = datetime.datetime.strptime(self.fec_summary['first_filing_date'], "%Y-%m-%d")
                top_contribs_data = [dict(key=generate_label(row['contributor_name'] if row['contributor_name'] else '<Name Missing>', 27),
                                            value=row['amount'], href='')
                                    for row in self.data['fec_top_contribs']
                                    if float(row['amount']) >= 100000]
                if top_contribs_data:
                    self.fec_top_contribs_data = json.dumps(top_contribs_data)

            if getattr(self, 'fec_indexp', False) or getattr(self, 'fec_summary', False):
                self.include_fec = True
示例#6
0
    def build_section_data(self):
        entity_id, standardized_name, cycle, external_ids = self.entity.entity_id, self.entity.standardized_name, self.entity.cycle, self.entity.external_ids

        self.contributions_data = True

        contributors_barchart_data = []
        for record in self.data['top_contributors']:
            contributors_barchart_data.append({
                'key': generate_label(str(OrganizationNameCleaver(record['name']).parse())),
                'value' : record['total_amount'],
                'value_employee' : record['employee_amount'],
                'value_pac' : record['direct_amount'],
                'href' : barchart_href(record, cycle, 'organization')
            })
        contributors_barchart_data = bar_validate(contributors_barchart_data)
        self.contributors_barchart_data = json.dumps(contributors_barchart_data)

        industries_barchart_data = []
        for record in self.data['top_industries']:
            industries_barchart_data.append({
                'key': generate_label(str(OrganizationNameCleaver(record['name']).parse())),
                'href': barchart_href(record, cycle, 'industry'),
                'value' : record['amount'],
            })
        industries_barchart_data = bar_validate(industries_barchart_data)
        self.industries_barchart_data = json.dumps(industries_barchart_data)

        for key, values in self.data['local_breakdown'].iteritems():
            # values is a list of [count, amount]
            self.data['local_breakdown'][key] = float(values[1])
        self.data['local_breakdown'] = pie_validate(self.data['local_breakdown'])
        self.local_breakdown = json.dumps(self.data['local_breakdown'])

        for key, values in self.data['entity_breakdown'].iteritems():
            # values is a list of [count, amount]
            self.data['entity_breakdown'][key] = float(values[1])
        self.data['entity_breakdown'] = pie_validate(self.data['entity_breakdown'])
        self.entity_breakdown = json.dumps(self.data['entity_breakdown'])

        # if none of the charts have data, or if the aggregate total
        # received was negative, then suppress that whole content
        # section except the overview bar
        amount = int(float(self.entity.metadata['entity_info']['totals']['recipient_amount']))
        if amount < 0:
            self.suppress_contrib_graphs = True
            self.reason = "negative"
        elif not any((industries_barchart_data, contributors_barchart_data, self.data['local_breakdown'], self.data['entity_breakdown'])):
            self.suppress_contrib_graphs = True
            self.reason = 'empty'

        pct_unknown = 0
        if amount:
            pct_unknown = float(self.data['industries_unknown_amount'].get('amount', 0)) * 100 / amount
        self.pct_known = int(round(100 - pct_unknown))
        
        self.external_links = external_sites.get_contribution_links('politician', standardized_name.name_str(), external_ids, cycle)
        if self.partytime_link:
            self.external_links.append({'url': self.partytime_link, 'text': 'Party Time'})
        
        self.bundling_data = [[x[key] for key in 'lobbyist_entity lobbyist_name firm_entity firm_name amount'.split()] for x in self.data['bundling']]

        if self.fec_summary:
            self.include_fec = True

            if self.fec_summary and 'date' in self.fec_summary:
                self.fec_summary['clean_date'] = datetime.datetime.strptime(self.fec_summary['date'], "%Y-%m-%d")
        
            timelines = []
            for pol in self.data['fec_timeline']:
                tl = {
                    'name': pol['candidate_name'],
                    'party': pol['party'],
                    'is_this': pol['entity_id'] == entity_id,
                    'timeline': map(lambda item: item if item >= 0 else 0, pol['timeline']),
                    'href': '/politician/%s/%s?cycle=%s' % (slugify(PoliticianNameCleaver(pol['candidate_name']).parse().name_str()), pol['entity_id'], cycle)
                }
                tl['sum'] = sum(tl['timeline'])
                timelines.append(tl)
            timelines.sort(key=lambda t: (int(t['is_this']), t['sum']), reverse=True)
            # restrict to top 5, and only those receiving at least 10% of this pol's total
            if timelines:
                this_sum = timelines[0]['sum']
                timelines = [timeline for timeline in timelines if timeline['sum'] > 0.1 * this_sum]
                timelines = timelines[:5]
        
            self.fec_timelines = json.dumps(timelines)
示例#7
0
    def build_section_data(self):
        entity_id, cycle, standardized_name, external_ids = self.entity.entity_id, self.entity.cycle, self.entity.standardized_name, self.entity.external_ids
        self.contributions_data = True

        candidates_barchart_data = []
        for record in self.data['recipient_candidates']:
            candidates_barchart_data.append({
                'key':
                generate_label(
                    str(
                        PoliticianNameCleaver(
                            record['recipient_name']).parse().plus_metadata(
                                record['party'], record['state']))),
                'value':
                record['amount'],
                'href':
                barchart_href(record, cycle, entity_type="politician"),
            })
        self.candidates_barchart_data = json.dumps(
            bar_validate(candidates_barchart_data))

        orgs_barchart_data = []
        for record in self.data['recipient_orgs']:
            orgs_barchart_data.append({
                'key':
                generate_label(
                    str(
                        OrganizationNameCleaver(
                            record['recipient_name']).parse())),
                'value':
                record['amount'],
                'href':
                barchart_href(record, cycle, entity_type="organization"),
            })
        self.orgs_barchart_data = json.dumps(bar_validate(orgs_barchart_data))

        for key, values in self.data['party_breakdown'].iteritems():
            self.data['party_breakdown'][key] = float(values[1])
        self.party_breakdown = json.dumps(
            pie_validate(self.data['party_breakdown']))

        # if none of the charts have data, or if the aggregate total
        # received was negative, then suppress that whole content
        # section except the overview bar
        amount = int(
            float(self.entity.metadata['entity_info']['totals']
                  ['contributor_amount']))
        if amount < 0:
            self.suppress_contrib_graphs = True
            self.reason = "negative"

        elif (not self.candidates_barchart_data and not self.orgs_barchart_data
              and not self.party_breakdown):
            self.suppress_contrib_graphs = True
            self.reason = 'empty'

        self.external_links = external_sites.get_contribution_links(
            'individual', standardized_name, external_ids, cycle)

        self.bundling_data = [[
            x[key] for key in
            'recipient_entity recipient_name recipient_type firm_entity firm_name amount'
            .split()
        ] for x in self.data['bundling']]
示例#8
0
    def build_section_data(self):
        entity_id, cycle, type, standardized_name, external_ids = self.entity.entity_id, self.entity.cycle, self.entity.type, self.entity.standardized_name, self.entity.external_ids
        amount = int(
            float(self.entity.metadata['entity_info']['totals']
                  ['contributor_amount']))

        if type == 'industry':
            self.top_orgs = json.dumps([{
                'key':
                generate_label(
                    str(OrganizationNameCleaver(org['name']).parse())),
                'value':
                org['total_amount'],
                'value_employee':
                org['employee_amount'],
                'value_pac':
                org['direct_amount'],
                'href':
                barchart_href(org, cycle, 'organization')
            } for org in self.data['industry_orgs']])

        self.contributions_data = True

        pol_recipients_barchart_data = []
        for record in self.data['recipients']:
            pol_recipients_barchart_data.append({
                'key':
                generate_label(
                    str(
                        PoliticianNameCleaver(
                            record['name']).parse().plus_metadata(
                                record['party'], record['state']))),
                'value':
                record['total_amount'],
                'value_employee':
                record['employee_amount'],
                'value_pac':
                record['direct_amount'],
                'href':
                barchart_href(record, cycle, entity_type='politician')
            })
        self.pol_recipients_barchart_data = json.dumps(
            bar_validate(pol_recipients_barchart_data))

        pacs_barchart_data = []
        for record in self.data['recipient_pacs']:
            pacs_barchart_data.append({
                'key':
                generate_label(
                    str(OrganizationNameCleaver(record['name']).parse())),
                'value':
                record['total_amount'],
                'value_employee':
                record['employee_amount'],
                'value_pac':
                record['direct_amount'],
                'href':
                barchart_href(record, cycle, entity_type="organization"),
            })
        self.pacs_barchart_data = json.dumps(bar_validate(pacs_barchart_data))

        for key, values in self.data['party_breakdown'].iteritems():
            self.data['party_breakdown'][key] = float(values[1])
        self.party_breakdown = json.dumps(
            pie_validate(self.data['party_breakdown']))

        for key, values in self.data['level_breakdown'].iteritems():
            self.data['level_breakdown'][key] = float(values[1])
        self.level_breakdown = json.dumps(
            pie_validate(self.data['level_breakdown']))

        # if none of the charts have data, or if the aggregate total
        # received was negative, then suppress that whole content
        # section except the overview bar
        if amount <= 0:
            self.suppress_contrib_graphs = True
            if amount < 0:
                self.reason = "negative"

        elif (not self.pol_recipients_barchart_data
              and not self.party_breakdown and not self.level_breakdown
              and not self.pacs_barchart_data):
            self.suppress_contrib_graphs = True
            self.reason = 'empty'

        self.external_links = external_sites.get_contribution_links(
            type, standardized_name, external_ids, cycle)

        self.bundling_data = [[
            x[key] for key in
            'recipient_entity recipient_name recipient_type lobbyist_entity lobbyist_name firm_name amount'
            .split()
        ] for x in self.data['bundling']]

        if int(cycle) != -1:
            self.fec_indexp = self.data['fec_indexp']

            if self.data['fec_summary'] and self.data['fec_summary'][
                    'num_committee_filings'] > 0 and self.data[
                        'fec_summary'].get('first_filing_date'):
                self.fec_summary = self.data['fec_summary']
                self.fec_summary['clean_date'] = datetime.datetime.strptime(
                    self.fec_summary['first_filing_date'], "%Y-%m-%d")
                top_contribs_data = [
                    dict(key=generate_label(
                        row['contributor_name']
                        if row['contributor_name'] else '<Name Missing>', 27),
                         value=row['amount'],
                         href='') for row in self.data['fec_top_contribs']
                    if float(row['amount']) >= 100000
                ]
                if top_contribs_data:
                    self.fec_top_contribs_data = json.dumps(top_contribs_data)

            if getattr(self, 'fec_indexp', False) or getattr(
                    self, 'fec_summary', False):
                self.include_fec = True
示例#9
0
    def build_section_data(self):
        entity_id, standardized_name, cycle, external_ids = self.entity.entity_id, self.entity.standardized_name, self.entity.cycle, self.entity.external_ids

        self.contributions_data = True

        contributors_barchart_data = []
        for record in self.data['top_contributors']:
            contributors_barchart_data.append({
                'key':
                generate_label(
                    str(OrganizationNameCleaver(record['name']).parse())),
                'value':
                record['total_amount'],
                'value_employee':
                record['employee_amount'],
                'value_pac':
                record['direct_amount'],
                'href':
                barchart_href(record, cycle, 'organization')
            })
        contributors_barchart_data = bar_validate(contributors_barchart_data)
        self.contributors_barchart_data = json.dumps(
            contributors_barchart_data)

        industries_barchart_data = []
        for record in self.data['top_industries']:
            industries_barchart_data.append({
                'key':
                generate_label(
                    str(OrganizationNameCleaver(record['name']).parse())),
                'href':
                barchart_href(record, cycle, 'industry'),
                'value':
                record['amount'],
            })
        industries_barchart_data = bar_validate(industries_barchart_data)
        self.industries_barchart_data = json.dumps(industries_barchart_data)

        for key, values in self.data['local_breakdown'].iteritems():
            # values is a list of [count, amount]
            self.data['local_breakdown'][key] = float(values[1])
        self.data['local_breakdown'] = pie_validate(
            self.data['local_breakdown'])
        self.local_breakdown = json.dumps(self.data['local_breakdown'])

        for key, values in self.data['entity_breakdown'].iteritems():
            # values is a list of [count, amount]
            self.data['entity_breakdown'][key] = float(values[1])
        self.data['entity_breakdown'] = pie_validate(
            self.data['entity_breakdown'])
        self.entity_breakdown = json.dumps(self.data['entity_breakdown'])

        # if none of the charts have data, or if the aggregate total
        # received was negative, then suppress that whole content
        # section except the overview bar
        amount = int(
            float(self.entity.metadata['entity_info']['totals']
                  ['recipient_amount']))
        if amount < 0:
            self.suppress_contrib_graphs = True
            self.reason = "negative"
        elif not any(
            (industries_barchart_data, contributors_barchart_data,
             self.data['local_breakdown'], self.data['entity_breakdown'])):
            self.suppress_contrib_graphs = True
            self.reason = 'empty'

        pct_unknown = 0
        if amount:
            pct_unknown = float(self.data['industries_unknown_amount'].get(
                'amount', 0)) * 100 / amount
        self.pct_known = int(round(100 - pct_unknown))

        self.external_links = external_sites.get_contribution_links(
            'politician', standardized_name.name_str(), external_ids, cycle)
        if self.partytime_link:
            self.external_links.append({
                'url': self.partytime_link,
                'text': 'Party Time'
            })

        self.bundling_data = [[
            x[key] for key in
            'lobbyist_entity lobbyist_name firm_entity firm_name amount'.split(
            )
        ] for x in self.data['bundling']]

        if self.fec_summary:
            self.include_fec = True

            if self.fec_summary and 'date' in self.fec_summary:
                self.fec_summary['clean_date'] = datetime.datetime.strptime(
                    self.fec_summary['date'], "%Y-%m-%d")

            timelines = []
            for pol in self.data['fec_timeline']:
                tl = {
                    'name':
                    pol['candidate_name'],
                    'party':
                    pol['party'],
                    'is_this':
                    pol['entity_id'] == entity_id,
                    'timeline':
                    map(lambda item: item
                        if item >= 0 else 0, pol['timeline']),
                    'href':
                    '/politician/%s/%s?cycle=%s' % (slugify(
                        PoliticianNameCleaver(
                            pol['candidate_name']).parse().name_str()),
                                                    pol['entity_id'], cycle)
                }
                tl['sum'] = sum(tl['timeline'])
                timelines.append(tl)
            timelines.sort(key=lambda t: (int(t['is_this']), t['sum']),
                           reverse=True)
            # restrict to top 5, and only those receiving at least 10% of this pol's total
            if timelines:
                this_sum = timelines[0]['sum']
                timelines = [
                    timeline for timeline in timelines
                    if timeline['sum'] > 0.1 * this_sum
                ]
                timelines = timelines[:5]

            self.fec_timelines = json.dumps(timelines)