def main():
    if not os.path.isdir('logs'):
        os.mkdir('logs')

    config_file = os.path.join('logs', 'config.cfg')
    config = config_load(config_file, no_save=True)

    conn = PlexServerConnection(
        config['plex_server_host'], config['plex_server_port'])

    sections_page = conn.fetch('library/sections')
    sections_soup = BeautifulSoup(sections_page)

    for section_tag in sections_soup.find_all('directory'):
        key = section_tag['key']

        print('{0:#^40}'.format("[ " + section_tag['title'] + " ]"))
        items_page = conn.fetch('library/sections/{0}/all'.format(key))
        items_soup = BeautifulSoup(items_page)

        ratings = [[] for i in range(RATING_UNKNOWN + 1)]

        for item in itertools.chain(
                items_soup.find_all('directory'),
                items_soup.find_all('video')):
            string_rating = item.get('contentrating', '')

            content_rating = get_content_rating(string_rating)
            ratings[content_rating].append(item.get('title'))

        for rating, shows in enumerate(ratings):
            if len(shows) == 0:
                continue
            print(u"  {0}".format(get_content_rating_name(rating)))
            for show in shows:
                print(u"    {0}".format(show))
            print('')
Example #2
0
def main():
    logging.info('{0:#^40}'.format('[ Plex Log Saver ]'))

    if not os.path.isdir('logs'):
        os.mkdir('logs')
    config_file = os.path.join('logs', 'config.cfg')
    config = config_load(config_file)

    if config['plex_log_dir'] == '':
        logging.info('Config missing "plex_log_dir", Exiting!')
        print('Config missing "plex_log_dir", Exiting!')
        return

    log_file_template = os.path.join(
        'logs', config['log_file_name'])

    if config['log_save_mode'] == 'gzip':
        import gzip
        log_open = gzip.open
    else:
        log_open = open

    last_datetime = tuple(map(int, config['plex_last_datetime'].split('-')))

    log_parser = PlexSuperLogParser(last_datetime)

    all_lines = []

    # We're only interested in 'Plex Media Server.log' log files
    # I've been able to so far get all of the info i need from those logs
    log_file_glob = os.path.join(
        config['plex_log_dir'], 'Plex Media Server.log*')

    for log_file in file_glob(log_file_glob):
        all_lines.extend(log_parser.parse_file(log_file))

    if len(all_lines) == 0:
        logging.info('No new lines, finishing.')
        return

    # Sort the logs based on datetime
    all_lines.sort(key=lambda line_body: line_body['datetime'])

    time_diff = datetime_diff(all_lines[0]['datetime'], last_datetime)

    logging.info((
        '    Last entry last run:'
        ' {0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}').format(
            *last_datetime))
    logging.info((
        'Earliest entry this run:'
        ' {0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}').format(
            *all_lines[0]['datetime']))

    if time_diff > 60:
        logging.warn((
            'Possibly missing {0} seconds of log files').format(time_diff))

    logging.info('{0} new log lines added'.format(len(all_lines)))

    ## TODO: replace this! No longer needed...
    # BasketOfHandles handles our open files for us,
    # keeping only 5 open at a time.
    with BasketOfHandles(log_open, 5) as basket:
        for line_body in all_lines:
            log_file_name = log_file_template.format(**line_body)

            file_handle = basket.open(log_file_name, 'at')

            json.dump(line_body, file_handle, sort_keys=True)
            file_handle.write('\n')
            if line_body['datetime'] > last_datetime:
                last_datetime = line_body['datetime']

    config['plex_last_datetime'] = '-'.join(map(str, last_datetime))

    config_save(config_file, config)

    logging.info('Finished.')
Example #3
0
def main():
    if not os.path.isdir("logs"):
        os.mkdir("logs")

    config_file = os.path.join("logs", "config.cfg")

    config = config_load(config_file)

    log_match = os.path.join("logs", config["log_file_match"])

    total_before = 0
    total_after = 0

    if config["log_save_mode"] == "text":
        print("Enabling compression...")
        config["log_save_mode"] = "gzip"
        if not config["log_file_name"].endswith(".gz"):
            config["log_file_name"] = config["log_file_name"] + ".gz"

        for log_name in file_glob(log_match):
            # Shouldn't happen, but it might?
            if log_name.endswith(".gz"):
                continue

            new_log_name = log_name + ".gz"

            print("  Compressing {0} ...".format(log_name))
            with gzip.open(new_log_name, "wt") as out_fh, open(log_name, "rt") as in_fh:
                read_from_write_to(in_fh, out_fh)

            new_log_size = os.stat(new_log_name).st_size
            log_size = os.stat(log_name).st_size

            total_before += log_size
            total_after += new_log_size

            print("  Original size {0} bytes".format(log_size))
            print(
                "  New size {0} bytes ({1:0.02f}% of original file)".format(
                    new_log_size, (new_log_size / float(log_size) * 100)
                )
            )

            os.unlink(log_name)

    elif config["log_save_mode"] == "gzip":
        print("Disabling compression...")
        config["log_save_mode"] = "text"
        if config["log_file_name"].endswith(".gz"):
            config["log_file_name"] = config["log_file_name"][:-3]

        for log_name in file_glob(log_match):
            # Shouldn't happen, but it might?
            if not log_name.endswith(".gz"):
                continue

            new_log_name = log_name[:-3]

            print("  Decompressing {0} ...".format(log_name))
            with open(new_log_name, "wt") as out_fh, gzip.open(log_name, "rt") as in_fh:
                read_from_write_to(in_fh, out_fh)

            new_log_size = os.stat(new_log_name).st_size
            log_size = os.stat(log_name).st_size

            total_before += log_size
            total_after += new_log_size

            print("  Original size {0} bytes".format(log_size))
            print(
                "  New size {0} bytes ({1:0.02f}% of original file)".format(
                    new_log_size, (new_log_size / float(log_size) * 100)
                )
            )

            os.unlink(log_name)

    config_save(config_file, config)

    print("Logs size:")
    print(" Before: {0}".format(total_before))
    print("  After: {0}".format(total_after))