Esempio n. 1
0
async def read_logs_file(logs_path) -> List[V1Log]:
    if not os.path.exists(logs_path):
        return []

    async with aiofiles.open(logs_path, mode="r") as f:
        contents = await f.read()
        if contents:
            # Version handling
            if ".plx" in logs_path:
                return V1Logs.read_csv(contents).logs
            # Legacy logs
            logs = V1Logs.read(contents)
            return logs.logs

    return []
Esempio n. 2
0
async def sync_logs(
    run_uuid: str,
    k8s_manager: AsyncK8SManager,
    pod: V1Pod,
    last_time: Optional[AwareDT],
    stream: bool = False,
    is_running: bool = True,
):
    path_from = CONTEXT_MOUNT_ARTIFACTS_FORMAT.format(run_uuid)
    path_from = "{}/.tmpplxlogs".format(path_from)

    if not is_running:
        delete_path(path_from)
        return

    logs, _ = await query_k8s_pod_logs(
        k8s_manager=k8s_manager,
        pod=pod,
        last_time=last_time,
        stream=stream,
    )
    if not logs:
        return

    path_from = "{}/{}".format(path_from, pod.metadata.name)
    check_or_create_path(path_from, is_dir=False)
    async with aiofiles.open(path_from, "w") as filepath:
        await filepath.write(V1Logs(logs=logs).to_dict(dump=True))
Esempio n. 3
0
async def upload_logs(run_uuid: str, logs: List[V1Log]):
    if not settings.AGENT_CONFIG.artifacts_store:
        raise HTTPException(
            detail="Run's logs was not collected, resource was not found.",
            status_code=status.HTTP_400_BAD_REQUEST,
        )
    for c_logs in V1Logs.chunk_logs(logs):
        last_file = datetime.timestamp(c_logs.logs[-1].timestamp)
        subpath = "{}/plxlogs/{}".format(run_uuid, last_file)
        await upload_data(subpath=subpath, data=c_logs.to_dict(dump=True))
Esempio n. 4
0
async def read_logs_file(logs_path) -> List[V1Log]:
    if not os.path.exists(logs_path):
        return []

    async with aiofiles.open(logs_path, mode="r") as f:
        contents = await f.read()
        if contents:
            logs = V1Logs.read(contents)
            return logs.logs

    return []
Esempio n. 5
0
async def get_logs(request):
    owner = request.path_params["owner"]
    project = request.path_params["project"]
    run_uuid = request.path_params["run_uuid"]
    force = to_bool(request.query_params.get("force"), handle_none=True)
    resource_name = get_resource_name(run_uuid=run_uuid)
    operation = get_run_instance(owner=owner,
                                 project=project,
                                 run_uuid=run_uuid)
    last_time = QueryParams(request.url.query).get("last_time")
    if last_time:
        last_time = dt_parser.parse(last_time).astimezone()
    last_file = QueryParams(request.url.query).get("last_file")

    k8s_manager = None
    k8s_operation = None
    if not last_file:
        k8s_manager = AsyncK8SManager(
            namespace=settings.CLIENT_CONFIG.namespace,
            in_cluster=settings.CLIENT_CONFIG.in_cluster,
        )
        await k8s_manager.setup()
        k8s_operation = await get_k8s_operation(k8s_manager=k8s_manager,
                                                resource_name=resource_name)

    if not last_file and k8s_operation:
        last_file = None
        operation_logs, last_time = await get_k8s_operation_logs(
            operation=operation,
            last_time=last_time,
            k8s_manager=k8s_manager,
            stream=True,
        )
        if k8s_operation["status"].get("completionTime"):
            last_time = None
    elif last_time:  # Streaming should stop
        last_file = None
        last_time = None
        operation_logs = []
    else:
        last_time = None
        operation_logs, last_file = await get_archived_operation_logs(
            run_uuid=run_uuid, last_file=last_file, check_cache=not force)
    if k8s_manager:
        await k8s_manager.close()
    response = V1Logs(last_time=last_time,
                      last_file=last_file,
                      logs=operation_logs)
    return UJSONResponse(response.to_dict())
Esempio n. 6
0
async def get_logs(request: Request) -> UJSONResponse:
    run_uuid = request.path_params["run_uuid"]
    force = to_bool(request.query_params.get("force"), handle_none=True)
    last_time = QueryParams(request.url.query).get("last_time")
    if last_time:
        last_time = parse_datetime(last_time).astimezone()
    last_file = QueryParams(request.url.query).get("last_file")
    files = []

    if last_time:
        resource_name = get_resource_name(run_uuid=run_uuid)

        k8s_manager = AsyncK8SManager(
            namespace=settings.CLIENT_CONFIG.namespace,
            in_cluster=settings.CLIENT_CONFIG.in_cluster,
        )
        await k8s_manager.setup()
        k8s_operation = await get_k8s_operation(
            k8s_manager=k8s_manager, resource_name=resource_name
        )
        if k8s_operation:
            operation_logs, last_time = await get_operation_logs(
                k8s_manager=k8s_manager,
                k8s_operation=k8s_operation,
                instance=run_uuid,
                last_time=last_time,
            )
        else:
            operation_logs, last_time = await get_tmp_operation_logs(
                run_uuid=run_uuid, last_time=last_time
            )
        if k8s_manager:
            await k8s_manager.close()

    else:
        operation_logs, last_file, files = await get_archived_operation_logs(
            run_uuid=run_uuid, last_file=last_file, check_cache=not force
        )
    response = V1Logs(
        last_time=last_time, last_file=last_file, logs=operation_logs, files=files
    )
    return UJSONResponse(response.to_dict())
Esempio n. 7
0
async def get_archived_operation_logs(
        run_uuid: str,
        last_file: Optional[str]) -> Tuple[List[V1Log], Optional[str]]:

    logs = []
    last_file = await get_next_file(run_uuid=run_uuid, last_file=last_file)
    if not last_file:
        return logs, last_file

    logs_path = await download_logs_file(run_uuid=run_uuid,
                                         last_file=last_file)

    if not os.path.exists(logs_path):
        return logs, last_file

    async with aiofiles.open(logs_path, mode="r") as f:
        contents = await f.read()
        if contents:
            logs = V1Logs.read(contents)
            logs = logs.logs
    return logs, last_file