def handle(self, *args, **options):
        min_id = FailureLine.objects.order_by('id').values_list("id", flat=True)[0] - 1
        chunk_size = options['chunk_size']

        if options["recreate"]:
            connection.indices.delete(TestFailureLine._doc_type.index, ignore=404)
            TestFailureLine.init()
        else:
            if connection.indices.exists(TestFailureLine._doc_type.index):
                self.stderr.write("Index already exists; can't perform import")
                return

        while True:
            rows = (FailureLine.objects
                    .filter(id__gt=min_id)
                    .order_by('id')
                    .values("id", "job_guid", "action", "test", "subtest",
                            "status", "expected", "message", "best_classification_id",
                            "best_is_verified"))[:chunk_size]
            if not rows:
                break
            es_lines = []
            for item in rows:
                es_line = failure_line_from_value(item)
                if es_line:
                    es_lines.append(es_line)
            self.stdout.write("Inserting %i rows" % len(es_lines))
            bulk_insert(es_lines)
            min_id = rows[len(rows) - 1]["id"]
            time.sleep(options['sleep'])
        s = Search(doc_type=TestFailureLine).params(search_type="count")
        self.stdout.write("Index contains %i documents" % s.execute().hits.total)
def create_es(failure_lines):
    # Store the failure lines in elastic_search
    es_lines = []
    for failure_line in failure_lines:
        es_line = TestFailureLine.from_model(failure_line)
        if es_line:
            es_lines.append(es_line)
    bulk_insert(es_lines)
Beispiel #3
0
def create_es(failure_lines):
    # Store the failure lines in elastic_search
    es_lines = []
    for failure_line in failure_lines:
        es_line = TestFailureLine.from_model(failure_line)
        if es_line:
            es_lines.append(es_line)
    bulk_insert(es_lines)
    def handle(self, *args, **options):
        if options["recreate"]:
            connection.indices.delete(TestFailureLine._doc_type.index,
                                      ignore=404)
            TestFailureLine.init()
        elif connection.indices.exists(TestFailureLine._doc_type.index):
            self.stderr.write("Index already exists; can't perform import")
            return

        fields = [
            'id',
            'action',
            'job_guid',
            'test',
            'subtest',
            'status',
            'expected',
            'message',
            'best_classification_id',
            'best_is_verified',
        ]

        failure_lines = FailureLine.objects.filter(action='test_result')
        for rows in chunked_qs(failure_lines,
                               options['chunk_size'],
                               fields=fields):
            if not rows:
                break

            es_lines = [TestFailureLine.from_model(line) for line in rows]
            self.stdout.write("Inserting %i rows" % len(es_lines))
            bulk_insert(es_lines)

            time.sleep(options['sleep'])

        count = Search(doc_type=TestFailureLine).count()
        self.stdout.write("Index contains %i documents" % count)