예제 #1
0
def buildlogs(build_uuid):
    found_build = model.build.get_repository_build(build_uuid)
    if not found_build:
        abort(403)

    repo = found_build.repository
    has_permission = ModifyRepositoryPermission(repo.namespace_user.username,
                                                repo.name).can()
    if features.READER_BUILD_LOGS and not has_permission:
        if ReadRepositoryPermission(
                repo.namespace_user.username,
                repo.name).can() or model.repository.repository_is_public(
                    repo.namespace_user.username, repo.name):
            has_permission = True

    if not has_permission:
        abort(403)

    # If the logs have been archived, just return a URL of the completed archive
    if found_build.logs_archived:
        return redirect(
            log_archive.get_file_url(found_build.uuid, get_request_ip()))

    _, logs = build_logs.get_log_entries(found_build.uuid, 0)
    response = jsonify({"logs": [log for log in logs]})

    response.headers[
        "Content-Disposition"] = "attachment;filename=" + found_build.uuid + ".json"
    return response
예제 #2
0
    def _archive_redis_buildlogs(self):
        """
        Archive a single build, choosing a candidate at random.

        This process must be idempotent to avoid needing two-phase commit.
        """
        # Get a random build to archive
        to_archive = model.get_archivable_build()
        if to_archive is None:
            logger.debug("No more builds to archive")
            return

        logger.debug("Archiving: %s", to_archive.uuid)

        length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
        to_encode = {
            "start": 0,
            "total": length,
            "logs": entries,
        }

        if length > 0:
            with CloseForLongOperation(app.config):
                with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile:
                    with GzipFile("testarchive",
                                  fileobj=tempfile) as zipstream:
                        for chunk in StreamingJSONEncoder().iterencode(
                                to_encode):
                            zipstream.write(chunk)

                    tempfile.seek(0)
                    log_archive.store_file(tempfile,
                                           JSON_MIMETYPE,
                                           content_encoding="gzip",
                                           file_id=to_archive.uuid)

        we_updated = model.mark_build_archived(to_archive.uuid)
        if we_updated:
            build_logs.expire_status(to_archive.uuid)
            build_logs.delete_log_entries(to_archive.uuid)
        else:
            logger.debug("Another worker pre-empted us when archiving: %s",
                         to_archive.uuid)
예제 #3
0
파일: build.py 프로젝트: sabre1041/quay-1
def get_logs_or_log_url(build):
    # If the logs have been archived, just return a URL of the completed archive
    if build.logs_archived:
        return {
            "logs_url":
            log_archive.get_file_url(build.uuid,
                                     get_request_ip(),
                                     requires_cors=True)
        }
    start = int(request.args.get("start", 0))

    try:
        count, logs = build_logs.get_log_entries(build.uuid, start)
    except BuildStatusRetrievalError:
        count, logs = (0, [])

    response_obj = {}
    response_obj.update({
        "start": start,
        "total": count,
        "logs": [log for log in logs],
    })

    return response_obj