예제 #1
0
def generate_xlsx():
    def guess_types(cell_value):
        try:
            if float(cell_value) == int(float(cell_value)):
                return int(float(cell_value))
            return float(cell_value)
        except ValueError:
            pass
        try:
            return datetime.datetime.strptime(cell_value, "%Y-%m-%d").date()
        except ValueError:
            pass
        return cell_value

    class xlsxDictWriter(object):
        def writerow(self, row_data):
            hm = self.header_mapping
            for column_header, cell in row_data.items():
                column_letter = get_column_letter((hm[column_header]))
                self.ws.cell('%s%s' %
                             (column_letter,
                              (self.row_index))).value = guess_types(cell)
            self.row_index += 1

        def writeheader(self):
            self.header_mapping.values()
            self.writerow(
                dict(map(lambda x: (x, x), self.header_mapping.keys())))

        def save(self):
            out = BytesIO()
            self.wb.save(out)
            return out

        def __init__(self, headers):
            self.wb = Workbook()
            self.ws = self.wb.worksheets[0]
            self.row_index = 1
            self.header_mapping = dict(
                map(lambda x: (x[1], x[0]), enumerate(headers, start=1)))

    writer = xlsxDictWriter(headers)
    writer.writeheader()
    fr_headers_row = dict(
        map(lambda x: (x[1], fr_headers[x[0]]), enumerate(headers)))
    writer.writerow(fr_headers_row)
    cl_lookups = get_codelists_lookups()
    activities = qactivity.list_activities()
    for activity in activities:
        writer.writerow(activity_to_json(activity, cl_lookups))

    return writer.save()
예제 #2
0
def generate_csv():
    csv_file = StringIO.StringIO()
    cl_lookups = get_codelists_lookups()
    disb_fys = generate_disb_fys()
    _headers = headers + disb_fys
    csv = unicodecsv.DictWriter(csv_file, _headers)
    csv.writeheader()
    activities = qactivity.list_activities()
    for activity in activities:
        activity_data = activity_to_json(activity, cl_lookups)
        remove_keys = set(activity_data)-set(_headers)
        for remove_key in remove_keys: del activity_data[remove_key]
        csv.writerow(activity_data)
    return csv_file
예제 #3
0
def generate_csv():
    csv_file = StringIO.StringIO()
    cl_lookups = get_codelists_lookups()

    csv = unicodecsv.DictWriter(csv_file, headers)
    csv.writeheader()
    fr_headers_row = dict(
        map(lambda x: (x[1], fr_headers[x[0]]), enumerate(headers)))
    csv.writerow(fr_headers_row)

    activities = qactivity.list_activities()
    for activity in activities:
        csv.writerow(activity_to_json(activity, cl_lookups))
    return csv_file
예제 #4
0
def generate_xlsx_filtered(arguments):
    disbFYs = generate_disb_fys()
    _headers = headers + disbFYs
    writer = xlsxDictWriter(_headers)
    writer.writesheet(u"Data")
    writer.writeheader()
    cl_lookups = get_codelists_lookups()
    if (arguments):
        activities = qactivity.list_activities_by_filters(arguments)
    else:
        activities = qactivity.list_activities()
    for activity in activities:
        writer.writerow(activity_to_json(activity, cl_lookups))
    writer.delete_first_sheet()
    return writer.save()
예제 #5
0
def generate_xlsx_transactions(filter_key=None, filter_value=None):
    disbFYs = generate_disb_fys()
    writer = xlsxDictWriter(headers_transactions)
    writer.writesheet(u"Data")
    writer.writeheader()
    cl_lookups = get_codelists_lookups()
    if (filter_key and filter_value):
        activities = qactivity.list_activities_by_filters(
            {filter_key: filter_value})
    else:
        activities = qactivity.list_activities()
    for activity in activities:
        for tr in activity_to_transactions_list(activity, cl_lookups):
            writer.writerow(tr)
    writer.delete_first_sheet()
    return writer.save()
예제 #6
0
def generate_xlsx(filter_key=None, filter_value=None):
    disbFYs = generate_disb_fys()
    _headers = headers + disbFYs
    writer = xlsxDictWriter(_headers)
    writer.writesheet("Data")
    writer.writeheader()
    cl_lookups = get_codelists_lookups()
    if (filter_key and filter_value):
        activities = qactivity.list_activities_by_filters(
            {filter_key: filter_value})
    else:
        activities = qactivity.list_activities()
    for activity in activities:
        writer.writerow(activity_to_json(activity, cl_lookups))
    writer.delete_first_sheet()
    return writer.save()
예제 #7
0
def activity(activity_id):
    activity = qactivity.get_activity(activity_id)
    if not activity:
        return(abort(404))
    locations = qlocation.get_locations_country(
                                    activity.recipient_country_code)
    return render_template(
        "activity.html",
        activity=activity,
        loggedinuser=current_user,
        codelists=codelists.get_codelists(),
        codelist_lookups=codelists.get_codelists_lookups(),
        locations=locations,
        api_locations_url=url_for("api.api_locations", country_code=activity.recipient_country_code),
        api_activity_locations_url=url_for("api.api_activity_locations", activity_id=activity_id),
        api_activity_finances_url=url_for("api.api_activity_finances", activity_id=activity_id),
        api_update_activity_finances_url=url_for("api.finances_edit_attr", activity_id=activity_id),
        api_iati_search_url=url_for("api.api_iati_search"),
        api_activity_forwardspends_url=url_for("api.api_activity_forwardspends", activity_id=activity_id),
        users=quser.user()
    )
예제 #8
0
def build_activity_103(doc, activity):
    db_activity = activity

    cl_lookups = get_codelists_lookups()

    ia = et.Element("iati-activity")
    doc.append(ia)

    ia.set("last-updated-datetime", activity.updated_date.isoformat())
    ia.set("default-currency",
           current_app.config["ORGANISATION"]["default_currency"])
    ia.set("{http://www.w3.org/XML/1998/namespace}lang",
           current_app.config["ORGANISATION"]["default_language"])

    o_name = current_app.config["ORGANISATION"]["organisation_name"]
    o_ref = current_app.config["ORGANISATION"]["organisation_ref"]
    o_type = current_app.config["ORGANISATION"]["organisation_type"]

    # IATI Identifier
    ia.append(el_iati_identifier(activity, o_ref))

    # Reporting org
    ia.append(el_org_103("reporting", o_name, o_ref, o_type))

    # Title, Description
    ia.append(el_with_text("title", activity.title))
    ia.append(el_with_text("description", activity.description))

    # Participating orgs
    for organisation in activity.funding_organisations:
        ia.append(
            el_org_103("Funding", organisation.name, organisation.code, "10"))
    for organisation in activity.implementing_organisations:
        ia.append(
            el_org_103("Implementing", organisation.name, organisation.code,
                       "10"))

    ia.append(
        el_with_code_103(
            "activity-status", activity.activity_status,
            cl_lookups["ActivityStatus"][activity.activity_status]))

    # Activity dates
    if activity.start_date:
        ia.append(el_date_103("start", activity.start_date.isoformat()))
    if activity.end_date:
        ia.append(el_date_103("end", activity.end_date.isoformat()))

    # Contact info
    #ia.append(el_contact_info(current_app.config["ORGANISATION"]))

    # Geography
    ia.append(
        el_with_code_103(
            "recipient-country",
            activity.recipient_country_code,
            cl_lookups["Country"][activity.recipient_country_code],
        ))

    for location in activity.locations:
        ia.append(el_location_103(location))

    # Classifications
    ia.append(
        el_with_code_103("sector", activity.dac_sector or "",
                         cl_lookups["Sector"].get(activity.dac_sector,
                                                  ""), "DAC"))
    ia.append(
        el_with_code_103(
            "collaboration-type", activity.collaboration_type,
            cl_lookups["CollaborationType"].get(activity.collaboration_type)))
    ia.append(
        el_with_code_103("default-finance-type", activity.finance_type,
                         cl_lookups["FinanceType"].get(activity.finance_type)))
    ia.append(
        el_with_code_103("default-flow-type", activity.flow_type,
                         cl_lookups["FlowType"].get(activity.flow_type)))
    ia.append(
        el_with_code_103("default-aid-type", activity.aid_type,
                         cl_lookups["AidType"].get(activity.aid_type)))
    ia.append(
        el_with_code_103("default-tied-status", activity.tied_status,
                         cl_lookups["TiedStatus"].get(activity.tied_status)))

    # Transactions
    activity_commitments = filter(valid_transaction, activity.commitments)
    activity_disbursements = filter(valid_transaction, activity.disbursements)

    # Output commitments
    for transaction in activity_commitments:
        ia.append(build_transaction_103(transaction.as_dict()))

    if ((len(activity_commitments) == 0) and activity.start_date
            and activity.total_commitments):
        transaction = {
            "id": "%s-C" % activity.id,
            "transaction_date": activity.start_date,
            "transaction_value": activity.total_commitments,
            "transaction_description": "Total commitments",
            "transaction_type": "C"
        }
        ia.append(build_transaction_103(transaction))

    # Output disbursements
    for transaction in activity_disbursements:
        ia.append(build_transaction_103(transaction.as_dict()))

    if ((len(activity_disbursements) == 0) and activity.total_disbursements):
        transaction = {
            "id": "%s-D" % activity.id,
            "transaction_date": datetime.datetime.utcnow().date(),
            "transaction_value": activity.total_disbursements,
            "transaction_description": "Total disbursements",
            "transaction_type": "D"
        }
        ia.append(build_transaction_103(transaction))

    return doc
예제 #9
0
def build_activity_103(doc, activity):
    db_activity = activity

    cl_lookups = get_codelists_lookups()

    ia = et.Element("iati-activity")
    doc.append(ia)

    ia.set("last-updated-datetime", current_datetime())
    # FIXME: put default currency in organisation settings
    ia.set("default-currency", app.config["ORGANISATION"]["default_currency"])

    o_name = app.config["ORGANISATION"]["organisation_name"]
    o_ref = app.config["ORGANISATION"]["organisation_ref"]
    o_type = app.config["ORGANISATION"]["organisation_type"]

    # IATI Identifier
    ia.append(el_iati_identifier(activity, o_ref))

    # Reporting org
    ia.append(el_org_103("reporting", o_name, o_ref, o_type))

    # Title, Description
    ia.append(el_with_text("title", activity.title))
    ia.append(el_with_text("description", activity.description))

    # Participating orgs
    ia.append(
        el_org_103(
            "Funding",
            app.config["ORGANISATION"]["organisation_name"],
            app.config["ORGANISATION"]["organisation_ref"],
            app.config["ORGANISATION"]["organisation_type"],
        )
    )
    ia.append(el_org_103("Implementing", activity.implementing_org))
    ia.append(el_org_103("Extending", activity.executing_org_name.name, activity.executing_org_name.code, "10"))

    ia.append(
        el_with_code_103(
            "activity-status", activity.activity_status, cl_lookups["ActivityStatus"][activity.activity_status]
        )
    )

    # Activity dates
    if activity.start_date:
        ia.append(el_date_103("start", activity.start_date.isoformat()))
    if activity.end_date:
        ia.append(el_date_103("end", activity.end_date.isoformat()))

    # Contact info
    # ia.append(el_contact_info(app.config["ORGANISATION"]))

    # Geography
    ia.append(
        el_with_code_103(
            "recipient-country", activity.recipient_country_code, cl_lookups["Country"][activity.recipient_country_code]
        )
    )

    for location in activity.locations:
        ia.append(el_location_103(location))

    # Classifications
    ia.append(el_with_code_103("sector", activity.dac_sector, cl_lookups["Sector"][activity.dac_sector], "DAC"))
    ia.append(
        el_with_code_103(
            "collaboration-type",
            activity.collaboration_type,
            cl_lookups["CollaborationType"][activity.collaboration_type],
        )
    )
    ia.append(
        el_with_code_103(
            "default-finance-type", activity.finance_type, cl_lookups["FinanceType"][activity.finance_type]
        )
    )
    ia.append(el_with_code_103("default-flow-type", activity.flow_type, cl_lookups["FlowType"][activity.flow_type]))
    ia.append(el_with_code_103("default-aid-type", activity.aid_type, cl_lookups["AidType"][activity.aid_type]))
    ia.append(
        el_with_code_103("default-tied-status", activity.tied_status, cl_lookups["TiedStatus"][activity.tied_status])
    )

    # Transactions
    activity_commitments = filter(valid_transaction, activity.commitments)
    activity_disbursements = filter(valid_transaction, activity.disbursements)

    # Output commitments
    for transaction in activity_commitments:
        ia.append(build_transaction_103(transaction.as_dict()))

    if (len(activity_commitments) == 0) and activity.start_date and activity.total_commitments:
        transaction = {
            "id": "%s-C" % activity.id,
            "transaction_date": activity.start_date,
            "transaction_value": activity.total_commitments,
            "transaction_description": "Total commitments",
            "transaction_type": "C",
        }
        ia.append(build_transaction_103(transaction))

    # Output disbursements
    for transaction in activity_disbursements:
        ia.append(build_transaction_103(transaction.as_dict()))

    if (len(activity_disbursements) == 0) and activity.total_disbursements:
        transaction = {
            "id": "%s-D" % activity.id,
            "transaction_date": datetime.datetime.utcnow().date(),
            "transaction_value": activity.total_disbursements,
            "transaction_description": "Total disbursements",
            "transaction_type": "D",
        }
        ia.append(build_transaction_103(transaction))

    return doc
예제 #10
0
def import_file():
    data = read_file()
    print("There are {} projects found".format(len(data)))

    # Get codelists for lookup
    CODELISTS_BY_NAME = codelists.get_codelists_lookups_by_name()
    CODELISTS_IDS_BY_NAME = codelists.get_codelists_ids_by_name()
    CODELISTS_IDS = codelists.get_codelists_lookups()
    CODELIST_IDS_BY_CODE = codelists.get_codelists_lookups_by_code()

    for activity in data:
        start_date = util.fq_fy_to_date(1, int(activity["earliest_year"][0:4]),
                                        "start")
        end_date = util.fq_fy_to_date(4, int(activity["latest_year"][0:4]),
                                      "end")

        d = {
            "user_id":
            1,  #FIXME
            "domestic_external":
            u"domestic",
            "code":
            unicode(activity["code"]),
            "title":
            unicode(activity["name"].decode("utf-8")),
            "description":
            u"",
            # "description": nonempty_from_list([
            #     unicode(activity["Project Description"].decode("utf-8")),
            #     unicode(activity["Objective"].decode("utf-8")),
            # ]),
            "start_date":
            start_date.isoformat(),
            "end_date":
            end_date.isoformat(),
            "reporting_org_id":
            qorganisations.get_or_create_organisation(
                u"Government of Liberia"),
            "organisations": [
                make_organisation(4, org[1], org[0])
                for org in activity["organisations"]
            ] + [(make_organisation(1, u"Government of Liberia"))],
            "recipient_country_code":
            "LR",
            "classifications":
            process_classifications(activity, CODELIST_IDS_BY_CODE),
            # "collaboration_type": CODES["collaboration_type"][
            #           activity["Donor Type"].strip()
            #       ],
            "finance_type":
            CODES["finance_type"][u"Grant"],
            "aid_type":
            CODES["aid_type"][u""],
            "activity_status":
            activity["activity_status"],
            #        "tied_status": "5", # Assume everything is untied
            #        "flow_type": "10", # Assume everything is ODA
            "finances":
            process_transactions(activity, CODELIST_IDS_BY_CODE),
        }
        qactivity.create_activity(d)
예제 #11
0
def api_activities_by_id_complete(activity_id):
    cl_lookups = get_codelists_lookups()
    activity = qactivity.get_activity(activity_id).as_jsonable_dict()

    return jsonify(activity)
예제 #12
0
def api_activities_by_id(activity_id):
    cl_lookups = get_codelists_lookups()
    activity = qactivity.get_activity(activity_id)
    data = qgenerate_csv.activity_to_json(activity, cl_lookups)

    return jsonify(data)
예제 #13
0
def generate_xlsx_export_template(data, mtef=False):
    if mtef:
        current_year = datetime.datetime.utcnow().date().year
        mtef_cols = [
            u"FY{}/{} (MTEF)".format(str(year)[2:4],
                                     str(year + 1)[2:4])
            for year in range(current_year, current_year + 3)
        ]
        _headers = [u"ID", u"Project code", u"Activity Title"]
        _headers += mtef_cols
        _headers += [
            u'Activity Status', u'Activity Dates (Start Date)',
            u'Activity Dates (End Date)', u"County"
        ]
    else:
        mtef_cols = []
        _headers = [
            u"ID",
            u"Project code",
            u"Activity Title",
            util.previous_fy_fq(),
            u'Activity Status',
            u'Activity Dates (Start Date)',
            u'Activity Dates (End Date)',
            u"County",
        ]
    writer = xlsxDictWriter(_headers)
    cl_lookups = get_codelists_lookups()

    myFill = PatternFill(start_color='FFFF00',
                         end_color='FFFF00',
                         fill_type='solid')

    statuses = get_codelists_lookups_by_name()["ActivityStatus"].keys()

    # Activity Status validation
    v_status = DataValidation(type="list",
                              formula1='"{}"'.format(u",".join(statuses)),
                              allow_blank=False)
    v_status.error = 'Your entry is not in the list'
    v_status.errorTitle = 'Activity Status'
    v_status.prompt = 'Please select from the list'
    v_status.promptTitle = 'Activity Status'

    v_id = DataValidation(type="whole")
    v_id.errorTitle = "Invalid ID"
    v_id.error = "Please enter a valid ID"
    v_id.promptTitle = 'Liberia Project Dashboard ID'
    v_id.prompt = 'Please do not edit this ID. It is used by the Liberia Project Dashboard to uniquely identify activities.'

    v_date = DataValidation(type="date")
    v_date.errorTitle = "Invalid date"
    v_date.error = "Please enter a valid date"

    v_number = DataValidation(type="decimal")
    v_number.errorTitle = "Invalid number"
    v_number.error = "Please enter a valid number"

    for org_code, activities in sorted(data.items()):
        writer.writesheet(org_code)
        writer.ws.add_data_validation(v_status)
        writer.ws.add_data_validation(v_date)
        writer.ws.add_data_validation(v_number)
        writer.ws.add_data_validation(v_id)
        #writer.ws.protection.sheet = True
        for activity in activities:
            existing_activity = activity_to_json(activity, cl_lookups)
            for mtef_year in mtef_cols:
                fy_start, fy_end = re.match("FY(\d*)/(\d*) \(MTEF\)",
                                            mtef_year).groups()
                existing_activity[mtef_year] = sum([
                    float(
                        existing_activity["20{} Q1 (MTEF)".format(fy_start)]),
                    float(
                        existing_activity["20{} Q2 (MTEF)".format(fy_start)]),
                    float(
                        existing_activity["20{} Q3 (MTEF)".format(fy_start)]),
                    float(existing_activity["20{} Q4 (MTEF)".format(fy_start)])
                ])
            writer.writerow(existing_activity)
        if mtef == True:
            for rownum in range(1 + 1, len(activities) + 2):
                writer.ws.cell(row=rownum, column=4).fill = myFill
                writer.ws.cell(row=rownum, column=5).fill = myFill
                writer.ws.cell(row=rownum, column=6).fill = myFill
                writer.ws.cell(row=rownum,
                               column=4).number_format = u'"USD "#,##0.00'
                writer.ws.cell(row=rownum,
                               column=5).number_format = u'"USD "#,##0.00'
                writer.ws.cell(row=rownum,
                               column=6).number_format = u'"USD "#,##0.00'
            writer.ws.column_dimensions[u"C"].width = 70
            writer.ws.column_dimensions[u"D"].width = 15
            writer.ws.column_dimensions[u"E"].width = 15
            writer.ws.column_dimensions[u"F"].width = 15
            writer.ws.column_dimensions[u"G"].width = 15
            writer.ws.column_dimensions[u"H"].width = 20
            writer.ws.column_dimensions[u"I"].width = 20
            v_id.add('A2:A{}'.format(len(activities) + 2))
            v_number.add('D2:F{}'.format(len(activities) + 2))
            v_status.add('G2:G{}'.format(len(activities) + 2))
            v_date.add('H2:I{}'.format(len(activities) + 2))
        elif mtef == False:
            for rownum in range(1 + 1, len(activities) + 2):
                writer.ws.cell(row=rownum, column=4).fill = myFill
                writer.ws.cell(row=rownum,
                               column=4).number_format = u'"USD "#,##0.00'
            writer.ws.column_dimensions[u"C"].width = 70
            writer.ws.column_dimensions[u"D"].width = 15
            writer.ws.column_dimensions[u"E"].width = 15
            writer.ws.column_dimensions[u"F"].width = 20
            writer.ws.column_dimensions[u"G"].width = 15
            v_id.add('A2:A{}'.format(len(activities) + 2))
            v_number.add('D2:D{}'.format(len(activities) + 2))
            v_status.add('E2:E{}'.format(len(activities) + 2))
            v_date.add('F2:G{}'.format(len(activities) + 2))
    writer.delete_first_sheet()
    return writer.save()
예제 #14
0
def import_xls(input_file, column_name=u"2018 Q1 (D)"):
    xl_workbook = xlrd.open_workbook(filename=input_file.filename,
                                     file_contents=input_file.read())
    num_sheets = len(xl_workbook.sheet_names())
    num_updated_activities = 0
    activity_id = None
    cl_lookups = get_codelists_lookups()
    cl_lookups_by_name = get_codelists_lookups_by_name()
    try:
        for sheet_id in range(0, num_sheets):
            input_file.seek(0)
            data = xlsx_to_csv.getDataFromFile(input_file.filename,
                                               input_file.read(), sheet_id,
                                               True)
            for row in data:  # each row is one ID
                if column_name not in row:
                    flash(
                        u"The column {} containing financial data was not \
                    found in the uploaded spreadsheet!".format(column_name),
                        "danger")
                    raise Exception
                if ((row[column_name] == "") or (float(row[column_name]) == 0)
                        or (float(row[column_name]) == "-")):
                    continue
                activity_id = row[u"ID"]
                activity = qactivity.get_activity(activity_id)
                if not activity:
                    flash(
                        u"Warning, activity ID \"{}\" with title \"{}\" was not found in the system \
                        and was not imported! Please create this activity in the \
                        system before trying to import.".format(
                            row[u'ID'], row[u'Activity Title']), "warning")
                    continue
                existing_activity = activity_to_json(activity, cl_lookups)
                row_value, row_currency = tidy_amount(row[column_name])
                updated_activity_data = update_activity_data(
                    activity, existing_activity, row, cl_lookups_by_name)
                #FIXME need to handle multiple currencies later... also handle this in process_transaction()
                difference = row_value - float(
                    existing_activity.get(column_name, 0))
                if (difference == 0) and (updated_activity_data == False):
                    continue
                if difference != 0:
                    activity.finances.append(
                        process_transaction(activity, difference, row_currency,
                                            column_name))
                db.session.add(activity)
                num_updated_activities += 1
                qactivity.activity_updated(activity.id)

                if difference == 0:
                    # Financial values not updated, only other activity data
                    flash(
                        u"Updated {} (Project ID: {})".format(
                            activity.title, activity.id), "success")
                elif existing_activity.get(column_name, 0) != 0:
                    # Non-zero financial values were previously provided and should be adjusted upwards/downwards
                    flash(
                        u"Updated {} for {} (Project ID: {}); previous value was {}; \
                        new value is {}. New entry for {} added.".format(
                            util.column_data_to_string(column_name),
                            activity.title, activity.id,
                            existing_activity.get(column_name),
                            row.get(column_name), difference), "success")
                else:
                    # Financial values were not previously provided, and are now entered
                    flash(
                        u"Updated {} for {} (Project ID: {})".format(
                            util.column_data_to_string(column_name),
                            activity.title, activity.id), "success")
    except Exception, e:
        if activity_id:
            flash(
                u"""There was an unexpected error when importing your
            projects, there appears to be an error around activity ID {}.
            The error was: {}""".format(activity_id, e), "danger")
        else:
            flash(
                u"""There was an unexpected error when importing your projects,
        the error was: {}""".format(e), "danger")
예제 #15
0
def import_xls_mtef(input_file):
    xl_workbook = xlrd.open_workbook(filename=input_file.filename,
                                     file_contents=input_file.read())
    num_sheets = len(xl_workbook.sheet_names())
    num_updated_activities = 0
    activity_id = None
    cl_lookups = get_codelists_lookups()
    cl_lookups_by_name = get_codelists_lookups_by_name()

    def filter_mtef(column):
        pattern = "(.*) \(MTEF\)$"
        return re.match(pattern, column)

    try:
        for sheet_id in range(0, num_sheets):
            input_file.seek(0)
            data = xlsx_to_csv.getDataFromFile(input_file.filename,
                                               input_file.read(), sheet_id,
                                               True)
            mtef_cols = filter(filter_mtef, data[0].keys())
            if len(mtef_cols) == 0:
                flash(
                    "No columns containing MTEF projections data \
                were found in the uploaded spreadsheet!", "danger")
                raise Exception
            for row in data:  # each row is one ID
                activity_id = row[u"ID"]
                activity = qactivity.get_activity(activity_id)
                if not activity:
                    flash(
                        "Warning, activity ID \"{}\" with title \"{}\" was not found in the system \
                        and was not imported! Please create this activity in the \
                        system before trying to import.".format(
                            row[u'ID'], row[u'Activity Title']), "warning")
                    continue
                existing_activity = activity_to_json(activity, cl_lookups)
                updated_activity_data = update_activity_data(
                    activity, existing_activity, row, cl_lookups_by_name)
                updated_years = []
                for mtef_year in mtef_cols:
                    new_fy_value, row_currency = tidy_amount(row[mtef_year])
                    fy_start, fy_end = re.match("FY(\d*)/(\d*) \(MTEF\)",
                                                mtef_year).groups()
                    existing_fy_value = sum([
                        float(existing_activity["20{} Q1 (MTEF)".format(
                            fy_start)]),
                        float(existing_activity["20{} Q2 (MTEF)".format(
                            fy_start)]),
                        float(existing_activity["20{} Q3 (MTEF)".format(
                            fy_start)]),
                        float(existing_activity["20{} Q4 (MTEF)".format(
                            fy_start)])
                    ])
                    #FIXME need to handle multiple currencies later... also handle this in process_transaction()
                    difference = new_fy_value - existing_fy_value
                    # We ignore differences < 1 USD, because this can be due to rounding errors
                    # when we divided input date by 4.
                    if round(difference) == 0:
                        continue
                    # Create 1/4 of new_fy_value for each quarter in this FY
                    value = round(new_fy_value / 4.0, 2)
                    for _fq in [1, 2, 3, 4]:
                        year, quarter = util.lr_quarter_to_cal_quarter(
                            int("20{}".format(fy_start)), _fq)
                        inserted = qfinances.create_or_update_forwardspend(
                            activity_id, quarter, year, value, u"USD")
                    updated_years.append(u"FY{}/{}".format(fy_start, fy_end))
                if updated_years or updated_activity_data:
                    num_updated_activities += 1
                    qactivity.activity_updated(activity.id)
                if updated_years:
                    #FIXME there is an issue with a maximum number of flash messages
                    # so this sometimes breaks -- this is the reason we only display
                    # for MTEF updates as for these it is more important.
                    flash(
                        u"Updated MTEF projections for {} for {} (Project ID: {})"
                        .format(", ".join(updated_years), activity.title,
                                activity.id), "success")
    except Exception, e:
        if activity_id:
            flash(
                """There was an unexpected error when importing your
            projects, there appears to be an error around activity ID {}.
            The error was: {}""".format(activity_id, e), "danger")
        else:
            flash(
                """There was an unexpected error when importing your projects,
        the error was: {}""".format(e), "danger")