示例#1
0
    def get(self, request, filter_type, ressource):
        glob = None
        data = []
        try:
            if filter_type == "vast":
                if ressource == "model":
                    glob = VAST_MODELS_PATH + "*.dat"
            elif filter_type == "vaml":
                if ressource == "model":
                    glob = VAML_MODELS_PATH + "*.dat"

            if glob is not None:
                data = [os_path.splitext(os_path.basename(file))[0] for file in file_glob(glob)]
            else:
                return JsonResponse({
                    'error': 'wrong ressource or filter type'
                }, status=404)
        except Exception as e:
            logger.critical(e, exc_info=1)
            error = _("An error has occurred")

            if settings.DEV_MODE:
                error = str(e)

            return JsonResponse({
                'error': error
            }, status=500)

        return JsonResponse({
            'data': data
        }, status=200)
示例#2
0
def main():
    logging.info('{0:#^40}'.format('[ Plex Log Saver ]'))

    if not os.path.isdir('logs'):
        os.mkdir('logs')
    config_file = os.path.join('logs', 'config.cfg')
    config = config_load(config_file)

    if config['plex_log_dir'] == '':
        logging.info('Config missing "plex_log_dir", Exiting!')
        print('Config missing "plex_log_dir", Exiting!')
        return

    log_file_template = os.path.join(
        'logs', config['log_file_name'])

    if config['log_save_mode'] == 'gzip':
        import gzip
        log_open = gzip.open
    else:
        log_open = open

    last_datetime = tuple(map(int, config['plex_last_datetime'].split('-')))

    log_parser = PlexSuperLogParser(last_datetime)

    all_lines = []

    # We're only interested in 'Plex Media Server.log' log files
    # I've been able to so far get all of the info i need from those logs
    log_file_glob = os.path.join(
        config['plex_log_dir'], 'Plex Media Server.log*')

    for log_file in file_glob(log_file_glob):
        all_lines.extend(log_parser.parse_file(log_file))

    if len(all_lines) == 0:
        logging.info('No new lines, finishing.')
        return

    # Sort the logs based on datetime
    all_lines.sort(key=lambda line_body: line_body['datetime'])

    time_diff = datetime_diff(all_lines[0]['datetime'], last_datetime)

    logging.info((
        '    Last entry last run:'
        ' {0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}').format(
            *last_datetime))
    logging.info((
        'Earliest entry this run:'
        ' {0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}').format(
            *all_lines[0]['datetime']))

    if time_diff > 60:
        logging.warn((
            'Possibly missing {0} seconds of log files').format(time_diff))

    logging.info('{0} new log lines added'.format(len(all_lines)))

    ## TODO: replace this! No longer needed...
    # BasketOfHandles handles our open files for us,
    # keeping only 5 open at a time.
    with BasketOfHandles(log_open, 5) as basket:
        for line_body in all_lines:
            log_file_name = log_file_template.format(**line_body)

            file_handle = basket.open(log_file_name, 'at')

            json.dump(line_body, file_handle, sort_keys=True)
            file_handle.write('\n')
            if line_body['datetime'] > last_datetime:
                last_datetime = line_body['datetime']

    config['plex_last_datetime'] = '-'.join(map(str, last_datetime))

    config_save(config_file, config)

    logging.info('Finished.')
示例#3
0
def main():
    if not os.path.isdir("logs"):
        os.mkdir("logs")

    config_file = os.path.join("logs", "config.cfg")

    config = config_load(config_file)

    log_match = os.path.join("logs", config["log_file_match"])

    total_before = 0
    total_after = 0

    if config["log_save_mode"] == "text":
        print("Enabling compression...")
        config["log_save_mode"] = "gzip"
        if not config["log_file_name"].endswith(".gz"):
            config["log_file_name"] = config["log_file_name"] + ".gz"

        for log_name in file_glob(log_match):
            # Shouldn't happen, but it might?
            if log_name.endswith(".gz"):
                continue

            new_log_name = log_name + ".gz"

            print("  Compressing {0} ...".format(log_name))
            with gzip.open(new_log_name, "wt") as out_fh, open(log_name, "rt") as in_fh:
                read_from_write_to(in_fh, out_fh)

            new_log_size = os.stat(new_log_name).st_size
            log_size = os.stat(log_name).st_size

            total_before += log_size
            total_after += new_log_size

            print("  Original size {0} bytes".format(log_size))
            print(
                "  New size {0} bytes ({1:0.02f}% of original file)".format(
                    new_log_size, (new_log_size / float(log_size) * 100)
                )
            )

            os.unlink(log_name)

    elif config["log_save_mode"] == "gzip":
        print("Disabling compression...")
        config["log_save_mode"] = "text"
        if config["log_file_name"].endswith(".gz"):
            config["log_file_name"] = config["log_file_name"][:-3]

        for log_name in file_glob(log_match):
            # Shouldn't happen, but it might?
            if not log_name.endswith(".gz"):
                continue

            new_log_name = log_name[:-3]

            print("  Decompressing {0} ...".format(log_name))
            with open(new_log_name, "wt") as out_fh, gzip.open(log_name, "rt") as in_fh:
                read_from_write_to(in_fh, out_fh)

            new_log_size = os.stat(new_log_name).st_size
            log_size = os.stat(log_name).st_size

            total_before += log_size
            total_after += new_log_size

            print("  Original size {0} bytes".format(log_size))
            print(
                "  New size {0} bytes ({1:0.02f}% of original file)".format(
                    new_log_size, (new_log_size / float(log_size) * 100)
                )
            )

            os.unlink(log_name)

    config_save(config_file, config)

    print("Logs size:")
    print(" Before: {0}".format(total_before))
    print("  After: {0}".format(total_after))