def export_full_library_addresses(request):
    q = pyes.query.TermQuery('organisation_type', 'library')
    conn.default_indices=['documents']
    parents = conn.search(q, indices=settings.INDEX_NAME, doc_types=['organisation'], sort='name_fi')
    rows = []

    for p in parents:
        cq = pyes.query.TermQuery('parent_organisation', p.get_id())
        kids = conn.search(cq, indices=settings.INDEX_NAME, doc_types=['organisation'], sort='name_fi')
        rows.append(export.extract_address_row(p))
        rows.append(export.extract_alternate_address_row(p))
        rows.append({'br': ''})

        for k in kids:
            rows.append(export.extract_address_row(k))
            rows.append(export.extract_alternate_address_row(k))
            rows.append({'br': ''})

    # Filter errorneous rows
    rows = filter(None, rows)

    response = HttpResponse(export.dump_csv(rows))
    response['content-type'] = 'text/csv'
    response['Content-Disposition'] = 'attachment; filename=kirjastojen_osoitteet_full.csv'

    return response
def export_staff(request):
    q = pyes.query.MatchAllQuery()
    staff = conn.search(q, indices=settings.INDEX_NAME, doc_types=['person'], sort='last_name')
    organisations = conn.search(q, indices=settings.INDEX_NAME, doc_types=['organisation'], sort='name_fi')

    labels = OrderedDict()
    labels[""] = ""
    labels["name"] = "Nimi"
    labels["email"] = "Email"
    labels["title"] = "Nimike"
    labels["responsibility"] = "Vastuualue"
    labels["head"] = "Johtaja"
    rows = [labels]
    rows.append({'br': ''})
    used = []
    cache = OrderedDict()

    for row in staff:
        oid = row['organisation']
        if oid not in cache:
            cache[oid] = []
        cache[oid].append(row)

    #print("Staff: {0}".format(len(staff)))
    #print("Libs: {0}".format(len(organisations)))

    for organisation in organisations:
        oid = organisation._meta.id
        if oid not in cache:
            continue

        if oid not in used:
            used.append(oid)
            rows.append({'name': organisation['name_fi']})

        for row in cache[oid]:
            rows.append(export.extract_staff_row(row))

        rows.append({'br': ''})

    response = HttpResponse(export.dump_csv(rows))
    response['content-type'] = 'text/csv'
    response['Content-Disposition'] = 'attachment; filename=kirjastojen_henkilosto.csv'

    return response