Beispiel #1
0
    def handle(self, *args, **options):
        activate(settings.LANGUAGE_CODE)
        conn = connections.get_connection('default')

        person_qs = Person.objects.filter(publish=True)
        docs_to_index = [
            ElasticPerson(**p.to_dict())
            for p in tqdm(person_qs.nocache().iterator(), total=person_qs.count())
        ]

        if options["drop_indices"]:
            Index(ElasticPerson._doc_type.index).delete(ignore=404)
            ElasticPerson.init()

            conn.indices.put_settings(
                index=ElasticPerson._doc_type.index,
                body={
                    'index.max_result_window': 100000
                }
            )

        self.bulk_write(conn, docs_to_index)

        if options["drop_indices"]:
            # invalidate old values and immediatelly cache again
            ElasticPerson.get_all_persons.invalidate(ElasticPerson)
            ElasticPerson.get_all_persons()

        self.stdout.write(
            'Loaded {} persons to persistence storage'.format(
                len(docs_to_index)))

        company_qs = Company.objects.filter(publish=True)
        docs_to_index = [
            ElasticCompany(**p.to_dict())
            for p in tqdm(company_qs.nocache().iterator(), total=company_qs.count())]

        if options["drop_indices"]:
            Index(ElasticCompany._doc_type.index).delete(ignore=404)
            ElasticCompany.init()
            conn.indices.put_settings(
                index=ElasticCompany._doc_type.index,
                body={
                    'index.max_result_window': 100000
                }
            )

        self.bulk_write(conn, docs_to_index)

        if options["drop_indices"]:
            # invalidate old values and immediatelly cache again
            ElasticCompany.get_all_companies.invalidate(ElasticCompany)
            ElasticCompany.get_all_companies()

        self.stdout.write(
            'Loaded {} companies to persistence storage'.format(
                len(docs_to_index)))
Beispiel #2
0
def export_persons(request, fmt):
    if not request.user.has_perm("core.export_persons"):
        return HttpResponseForbidden()

    if request.user.has_perm("core.export_id_and_last_modified"):
        fields_to_blacklist = []
    else:
        fields_to_blacklist = ["id", "last_change"]

    data = map(
        lambda p: blacklist(
            add_encrypted_url(p, request.user, "encrypted_person_redirect"),
            fields_to_blacklist),
        ElasticPerson.get_all_persons(),
    )

    ActionLog(user=request.user, action="download_dataset", details=fmt).save()

    if fmt == "json":
        response = JsonResponse(data, safe=False)

    if fmt == "xml":
        response = render(request,
                          "xml.jinja", {"data": data},
                          content_type="application/xhtml+xml")

    response[
        "Content-Disposition"] = "attachment; filename=peps_{:%Y%m%d_%H%M}.{}".format(
            datetime.now(), fmt)

    response["Content-Length"] = len(response.content)

    return response
Beispiel #3
0
    def handle(self, *args, **options):
        activate(settings.LANGUAGE_CODE)
        conn = connections.get_connection("default")

        person_qs = Person.objects.filter(publish=True)
        docs_to_index = [
            ElasticPerson(**p.to_dict())
            for p in tqdm(person_qs.nocache().iterator(), total=person_qs.count())
        ]

        persons_total = len(docs_to_index)

        if options["drop_indices"]:
            person_idx.delete(ignore=404)
            person_idx.create()

            ElasticPerson.init()

            conn.indices.put_settings(
                index=ElasticPerson._doc_type.index,
                body={"index.max_result_window": settings.ES_MAX_RESULT_WINDOW},
            )

        self.bulk_write(conn, docs_to_index)

        self.stdout.write(
            "Loaded {} persons to persistence storage".format(len(docs_to_index))
        )

        company_qs = Company.objects.filter(publish=True)
        docs_to_index = [
            ElasticCompany(**p.to_dict())
            for p in tqdm(company_qs.nocache().iterator(), total=company_qs.count())
        ]

        companies_total = len(docs_to_index)

        if options["drop_indices"]:
            company_idx.delete(ignore=404)
            company_idx.create()

            ElasticCompany.init()
            conn.indices.put_settings(
                index=ElasticCompany._doc_type.index,
                body={"index.max_result_window": settings.ES_MAX_RESULT_WINDOW},
            )

        self.bulk_write(conn, docs_to_index)

        self.stdout.write(
            "Loaded {} companies to persistence storage".format(len(docs_to_index))
        )

        if options["drop_indices"]:
            sleep(60)
            # invalidate old values and immediatelly cache again
            ElasticPerson.get_all_persons.invalidate(ElasticPerson)
            indexed_persons_total = len(ElasticPerson.get_all_persons())

            # invalidate old values and immediatelly cache again
            ElasticCompany.get_all_companies.invalidate(ElasticCompany)
            indexed_companies_total = len(ElasticCompany.get_all_companies())

            if persons_total != indexed_persons_total:
                self.stderr.write(
                    "Mismatch between persons in DB ({}) and indexed persons ({})".format(
                        persons_total, indexed_persons_total
                    )
                )

            if companies_total != indexed_companies_total:
                self.stderr.write(
                    "Mismatch between companies in DB ({}) and indexed companies ({})".format(
                        companies_total, indexed_companies_total
                    )
                )