def create_es(failure_lines): # Store the failure lines in elastic_search es_lines = [] for failure_line in failure_lines: es_line = TestFailureLine.from_model(failure_line) if es_line: es_lines.append(es_line) bulk_insert(es_lines)
def create_es(failure_lines): # Store the failure lines in elastic_search es_lines = [] for failure_line in failure_lines: es_line = TestFailureLine.from_model(failure_line) if es_line: es_lines.append(es_line) bulk_insert(es_lines)
def handle(self, *args, **options): if options["recreate"]: connection.indices.delete(TestFailureLine._doc_type.index, ignore=404) TestFailureLine.init() elif connection.indices.exists(TestFailureLine._doc_type.index): self.stderr.write("Index already exists; can't perform import") return fields = [ 'id', 'action', 'job_guid', 'test', 'subtest', 'status', 'expected', 'message', 'best_classification_id', 'best_is_verified', ] failure_lines = FailureLine.objects.filter(action='test_result') for rows in chunked_qs(failure_lines, options['chunk_size'], fields=fields): if not rows: break es_lines = [TestFailureLine.from_model(line) for line in rows] self.stdout.write("Inserting %i rows" % len(es_lines)) bulk_insert(es_lines) time.sleep(options['sleep']) count = Search(doc_type=TestFailureLine).count() self.stdout.write("Index contains %i documents" % count)