Exemplo n.º 1
0
def create_failure_lines(repository, job_guid, failure_line_list):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list):
        data = {"job_guid": job_guid,
                "repository": repository,
                "line": i}
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        failure_line.save()
        failure_lines.append(failure_line)

    return failure_lines
Exemplo n.º 2
0
def create_failure_lines(repository, job_guid, failure_line_list):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list):
        data = {"job_guid": job_guid,
                "repository": repository,
                "line": i}
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        failure_line.save()
        failure_lines.append(failure_line)

    return failure_lines
Exemplo n.º 3
0
def create_failure_lines(job, failure_line_list, start_line=0):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list[start_line:]):
        data = {
            "job_guid": job.guid,
            "repository": job.repository,
            "line": i + start_line
        }
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        failure_line.save()
        failure_lines.append(failure_line)

    return failure_lines
    def handle(self, *args, **options):

        if not len(args) == 3:
            raise CommandError('3 arguments required, %s given' % len(args))
        log_response = requests.get(args[0], timeout=30)
        log_response.raise_for_status()

        if log_response.text:
            log_content = StringIO(log_response.text)

            try:
                repository = Repository.objects.get(name=args[2], active_status='active')
            except Repository.DoesNotExist:
                raise CommandError('Unknown repository %s' % args[2])

            log_iter = reader.read(log_content)

            failure_lines_cutoff = settings.FAILURE_LINES_CUTOFF
            log_iter = list(islice(log_iter, failure_lines_cutoff+1))

            if len(log_iter) > failure_lines_cutoff:
                # Alter the N+1th log line to indicate the list was truncated.
                log_iter[-1].update(action='truncated')

            with JobsModel(args[2]) as jobs_model:
                job_id = jobs_model.get_job_ids_by_guid([args[1]])

                if not job_id:
                    raise CommandError('No job found with guid %s in the %s repository' % (args[1], args[2]))

            FailureLine.objects.bulk_create(
                [FailureLine(repository=repository, job_guid=args[1], **failure_line)
                 for failure_line in log_iter]
            )
Exemplo n.º 5
0
def create_failure_lines(job, failure_line_list,
                         start_line=0):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list[start_line:]):
        data = {"job_guid": job.guid,
                "repository": job.repository,
                "line": i + start_line}
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        failure_line.save()
        failure_line.elastic_search_insert()
        failure_lines.append(failure_line)

    refresh_all()

    return failure_lines
Exemplo n.º 6
0
def create(repository, job_guid, job_log, log_iter):
    FailureLine.objects.bulk_create([
        FailureLine(repository=repository,
                    job_guid=job_guid,
                    job_log=job_log,
                    **failure_line) for failure_line in log_iter
    ])
    job_log.status == JobLog.PARSED
    job_log.save()
Exemplo n.º 7
0
def create_failure_lines(job, failure_line_list, start_line=0):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list[start_line:]):
        data = {"job_guid": job.guid, "repository": job.repository, "line": i + start_line}
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        job_log = JobLog.objects.create(
            job=job,
            name='{}{}'.format(base_data.get('test'), job.id),
            url='bar{}'.format(i),
            status=1,
        )
        print('create jobLog for job id: {}'.format(job.id))
        failure_line.job_log = job_log
        failure_line.save()
        failure_lines.append(failure_line)

    return failure_lines
Exemplo n.º 8
0
def test_has_line(find_it):
    line = FailureLine(line=123)
    line_list = [
        {'line_number': 111},
        {'line_number': 222},
    ]

    if find_it:
        line_list.append({'line_number': 123})
        assert has_line(line, line_list)
    else:
        assert not has_line(line, line_list)
    def handle(self, *args, **options):
        try:
            log_url, job_guid, repository_name = args
        except ValueError:
            raise CommandError('3 arguments required, %s given' % len(args))

        log_text = fetch_text(log_url)

        if not log_text:
            return

        log_content = StringIO(log_text)

        try:
            repository = Repository.objects.get(name=repository_name,
                                                active_status='active')
        except Repository.DoesNotExist:
            raise CommandError('Unknown repository %s' % repository_name)

        log_iter = reader.read(log_content)

        failure_lines_cutoff = settings.FAILURE_LINES_CUTOFF
        log_iter = list(islice(log_iter, failure_lines_cutoff + 1))

        if len(log_iter) > failure_lines_cutoff:
            # Alter the N+1th log line to indicate the list was truncated.
            log_iter[-1].update(action='truncated')

        with JobsModel(repository_name) as jobs_model:
            job_id = jobs_model.get_job_ids_by_guid([job_guid])

            if not job_id:
                raise CommandError(
                    'No job found with guid %s in the %s repository' %
                    (job_guid, repository_name))

        with transaction.atomic():
            FailureLine.objects.bulk_create([
                FailureLine(repository=repository,
                            job_guid=job_guid,
                            **failure_line) for failure_line in log_iter
            ])
Exemplo n.º 10
0
def create_failure_lines(job, failure_line_list, start_line=0):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list[start_line:]):
        data = {
            "job_guid": job.guid,
            "repository": job.repository,
            "line": i + start_line
        }
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        failure_line.save()
        failure_line.elastic_search_insert()
        failure_lines.append(failure_line)

    if settings.ELASTICSEARCH_URL:
        refresh_index()

    return failure_lines
Exemplo n.º 11
0
def create_failure_lines(repository,
                         job_guid,
                         failure_line_list,
                         start_line=0):
    failure_lines = []
    for i, (base_data, updates) in enumerate(failure_line_list[start_line:]):
        data = {
            "job_guid": job_guid,
            "repository": repository,
            "line": i + start_line
        }
        data.update(base_data)
        data.update(updates)
        failure_line = FailureLine(**data)
        failure_line.save()
        failure_line.elastic_search_insert()
        failure_lines.append(failure_line)

    refresh_all()

    return failure_lines