Esempio n. 1
0
async def get_path(request, file_path=""):
    print(file_path)

    as_records = request.args.get('records')
    as_json = request.args.get('json')
    as_log = request.args.get('log')
    as_attachment = int(request.args.get('download', '0'))
    is_recursive = request.args.get('recursive')
    show_hidden = request.args.get('hidden')
    query = request.args.get('query', "*").strip()

    _start = request.args.get('start', None)
    _stop = request.args.get('stop', None)
    start = None if _start is None else int(_start)
    stop = None if _stop is None else int(_stop)

    reservoir_k = int(request.args.get('reservoir', '200'))

    # limit for the search itself.
    search_limit = 500

    path = os.path.join(config.Args.logdir, file_path)
    print("=============>", [query], [path], os.path.isdir(path))

    if os.path.isdir(path):
        from itertools import islice
        with cwd(path):
            print(os.getcwd(), query, is_recursive)
            file_paths = list(islice(iglob(query, recursive=is_recursive), start or 0, stop or 200))
            files = map(file_stat, file_paths)
            res = response.json(files, status=200)
    elif os.path.isfile(path):
        if as_records:
            from ml_logger.helpers import load_pickle_as_dataframe
            df = load_pickle_as_dataframe(path, reservoir_k)
            res = response.text(df.to_json(orient="records"), status=200, content_type='application/json')
        elif as_log:
            from ml_logger.helpers import load_pickle_as_dataframe
            df = load_pickle_as_dataframe(path, reservoir_k)
            res = response.text(df.to_json(orient="records"), status=200, content_type='application/json')
        elif as_json:
            from ml_logger.helpers import load_from_pickle
            data = [_ for _ in load_from_pickle(path)]
            res = response.json(data, status=200, content_type='application/json')
        elif type(start) is int or type(stop) is int:
            from itertools import islice
            with open(path, 'r') as f:
                text = ''.join([l for l in islice(f, start, stop)])
            res = response.text(text, status=200)
        else:
            # todo: check the file handling here. Does this use correct mimeType for text files?
            res = await response.file(path)
            if as_attachment:
                res.headers['Content-Disposition'] = 'attachment'
    else:
        res = response.text('Not found', status=404)
    return res
Esempio n. 2
0
def get_path(file_path=''):
    from flask import request, make_response, jsonify, send_file, Response
    print(file_path)

    as_records = request.args.get('records')
    as_json = request.args.get('json')
    as_attachment = request.args.get('download')
    is_recursive = request.args.get('recursive')
    show_hidden = request.args.get('hidden')
    query = request.args.get('query')
    start = int(request.args.get('start', '0'))
    stop = int(request.args.get('stop', '200'))

    path = os.path.join(config.Args.logdir, file_path)

    if os.path.isdir(path):
        from itertools import islice
        files = islice(list_directory(path, ".", is_recursive, show_hidden),
                       start, stop)
        res = make_response(jsonify([*files]), 200)
    elif os.path.isfile(path):
        if as_records:
            from ml_logger.helpers import load_pickle_as_dataframe
            df = load_pickle_as_dataframe(path)
            res = make_response(df.to_json(orient="records"), 200)
        elif as_json:
            from ml_logger.helpers import load_pickle_as_dataframe
            df = load_pickle_as_dataframe(path)
            res = make_response(df.to_json(orient="records"), 200)
        else:
            res = send_file(path)
            if as_attachment:
                res.headers.add('Content-Disposition', 'attachment')
    else:
        res = make_response('Not found', 404)
    return res
Esempio n. 3
0
def read_log(path, k=200):
    from ml_logger.helpers import load_pickle_as_dataframe
    df = load_pickle_as_dataframe(path, k)
    return df.to_json(orient="records")
Esempio n. 4
0
def read_dataframe(path, k=200):
    from ml_logger.helpers import load_pickle_as_dataframe
    try:
        return load_pickle_as_dataframe(path, k)
    except FileNotFoundError:
        return None