def handle(self, *args, **options): activate(settings.LANGUAGE_CODE) conn = connections.get_connection('default') person_qs = Person.objects.filter(publish=True) docs_to_index = [ ElasticPerson(**p.to_dict()) for p in tqdm(person_qs.nocache().iterator(), total=person_qs.count()) ] if options["drop_indices"]: Index(ElasticPerson._doc_type.index).delete(ignore=404) ElasticPerson.init() conn.indices.put_settings( index=ElasticPerson._doc_type.index, body={ 'index.max_result_window': 100000 } ) self.bulk_write(conn, docs_to_index) if options["drop_indices"]: # invalidate old values and immediatelly cache again ElasticPerson.get_all_persons.invalidate(ElasticPerson) ElasticPerson.get_all_persons() self.stdout.write( 'Loaded {} persons to persistence storage'.format( len(docs_to_index))) company_qs = Company.objects.filter(publish=True) docs_to_index = [ ElasticCompany(**p.to_dict()) for p in tqdm(company_qs.nocache().iterator(), total=company_qs.count())] if options["drop_indices"]: Index(ElasticCompany._doc_type.index).delete(ignore=404) ElasticCompany.init() conn.indices.put_settings( index=ElasticCompany._doc_type.index, body={ 'index.max_result_window': 100000 } ) self.bulk_write(conn, docs_to_index) if options["drop_indices"]: # invalidate old values and immediatelly cache again ElasticCompany.get_all_companies.invalidate(ElasticCompany) ElasticCompany.get_all_companies() self.stdout.write( 'Loaded {} companies to persistence storage'.format( len(docs_to_index)))
def handle(self, *args, **options): activate(settings.LANGUAGE_CODE) conn = connections.get_connection("default") person_qs = Person.objects.filter(publish=True) docs_to_index = [ ElasticPerson(**p.to_dict()) for p in tqdm(person_qs.nocache().iterator(), total=person_qs.count()) ] persons_total = len(docs_to_index) if options["drop_indices"]: person_idx.delete(ignore=404) person_idx.create() ElasticPerson.init() conn.indices.put_settings( index=ElasticPerson._doc_type.index, body={"index.max_result_window": settings.ES_MAX_RESULT_WINDOW}, ) self.bulk_write(conn, docs_to_index) self.stdout.write( "Loaded {} persons to persistence storage".format(len(docs_to_index)) ) company_qs = Company.objects.filter(publish=True) docs_to_index = [ ElasticCompany(**p.to_dict()) for p in tqdm(company_qs.nocache().iterator(), total=company_qs.count()) ] companies_total = len(docs_to_index) if options["drop_indices"]: company_idx.delete(ignore=404) company_idx.create() ElasticCompany.init() conn.indices.put_settings( index=ElasticCompany._doc_type.index, body={"index.max_result_window": settings.ES_MAX_RESULT_WINDOW}, ) self.bulk_write(conn, docs_to_index) self.stdout.write( "Loaded {} companies to persistence storage".format(len(docs_to_index)) ) if options["drop_indices"]: sleep(60) # invalidate old values and immediatelly cache again ElasticPerson.get_all_persons.invalidate(ElasticPerson) indexed_persons_total = len(ElasticPerson.get_all_persons()) # invalidate old values and immediatelly cache again ElasticCompany.get_all_companies.invalidate(ElasticCompany) indexed_companies_total = len(ElasticCompany.get_all_companies()) if persons_total != indexed_persons_total: self.stderr.write( "Mismatch between persons in DB ({}) and indexed persons ({})".format( persons_total, indexed_persons_total ) ) if companies_total != indexed_companies_total: self.stderr.write( "Mismatch between companies in DB ({}) and indexed companies ({})".format( companies_total, indexed_companies_total ) )