예제 #1
0
def get_summary_data_freshair_remote(token, fullURL='http://localhost:32400'):
    libraries_dict = core.get_libraries(token, fullurl=fullURL)
    keynum = max([
        key for key in libraries_dict if libraries_dict[key] == 'npr fresh air'
    ])
    sincedate = core.get_current_date_newsletter()
    key, num_songs, _, _, totdur, totsizebytes = core._get_library_stats_artist(
        keynum, token, fullurl=fullURL)
    mainstring = 'there are %d episodes of npr fresh air.' % num_songs
    sizestring = 'the total size of npr fresh air media is %s.' % get_formatted_size(
        totsizebytes)
    durstring = 'the total duration of npr fresh air media is %s.' % get_formatted_duration(
        totdur)
    if sincedate is not none:
        key, num_songs_since, _, _, \
            totdur_since, totsizebytes_since = core._get_library_stats_artist(
                keynum, token, fullurl = fullURL, sincedate = sincedate )
        if num_songs_since > 0:
            mainstring_since = ' '.join([
                'since %s, i have added %d new fresh air episodes.' %
                (sinceDate.strftime('%B %d, %Y'), num_songs_since),
                'The total size of Fresh Air media I have added is %s.' %
                get_formatted_size(totsizebytes_since),
                'The total duration of Fresh Air media I have added is %s.' %
                get_formatted_duration(totdur_since)
            ])
            return ' '.join(
                [mainstring, sizestring, durstring, mainstring_since])
    return ' '.join([mainstring, sizestring, durstring])
def get_tv_library_local( library_name = 'TV Shows' ):
    fullURL, token = core.checkServerCredentials( doLocal=True )
    library_names = list( core.get_libraries( token = token ).values( ) )
    assert( library_name in library_names )
    #
    tvdata = core.get_library_data(
        library_name, token = token, num_threads = cpu_count( ) )
    return tvdata
예제 #3
0
def get_default_tvlibrary(fullURL, token):
    try:
        all_libraries = core.get_libraries(token=token,
                                           fullURL=fullURL,
                                           do_full=True)
    except:
        return return_error_raw("Error, bad token or URL may have been given.")
    key_found = list(
        filter(lambda key: all_libraries[key][1] == 'show', all_libraries))
    if key_found is None:
        return_error_raw("Error, could not find any TV libraries.")
    key_found = min(key_found)
    return all_libraries[key_found][0], 'SUCCESS'
예제 #4
0
파일: email.py 프로젝트: tanimislam/howdy
def get_summary_data_music_remote(
    token, fullURL = 'http://localhost:32400',
    sinceDate = datetime.datetime.strptime('January 1, 2020', '%B %d, %Y' ).date( ) ):
    """
    This returns summary information on songs from all music libraries on the Plex_ server, for use as part of the Plex_ newsletter sent out to one's Plex_ server friends. The email first summarizes ALL the music data, and then summarizes the music data uploaded and processed since a previous date. For example,
    
       As of December 29, 2020, there are 17,853 songs made by 889 artists in 1,764 albums. The total size of music media is 306.979 GB. The total duration of music media is 7 months, 18 days, 15 hours, 8 minutes, and 15.785 seconds.

       Since January 01, 2020, I have added 7,117 songs made by 700 artists in 1,180 albums. The total size of music media that I have added is 48.167 GB. The total duration of music media that I have added is 28 days, 15 hours, 25 minutes, and 37.580 seconds.
    
    :param str token: the Plex_ access token.
    :param str fullURL: the Plex_ server URL.
    :param date sinceDate: the :py:class:`datetime <datetime.date>` from which we have added songs. Default is :py:class:`date <datetime.date>` corresponding to ``January 1, 2020``.
    
    :returns: a :py:class:`string <str>` description of music media in all music libraries on the Plex_ server. If there is no Plex_ server or music library, returns ``None``.
    :rtype: str

    .. seealso:: :py:meth:`get_summary_body <howdy.email.email.get_summary_body>`.
    """
    libraries_dict = core.get_libraries( token, fullURL = fullURL, do_full = True )
    if libraries_dict is None: return None
    keynums = set(filter(lambda keynum: libraries_dict[ keynum ][ 1 ] == 'artist', libraries_dict ) )
    if len( keynums ) == 0: return None
    # sinceDate = core.get_current_date_newsletter( )
    datas = list(map(lambda keynum: core.get_library_stats( keynum, token, fullURL = fullURL ), keynums))
    music_summ = {
        'current_date_string' : datetime.datetime.now( ).date( ).strftime( '%B %d, %Y' ),
        'num_songs' : f'{sum(list(map(lambda data: data[ "num_songs" ], datas))):,}',
        'num_artists' : f'{sum(list(map(lambda data: data[ "num_artists" ], datas))):,}',
        'num_albums' : f'{sum(list(map(lambda data: data[ "num_albums" ], datas))):,}',
        'formatted_size' : get_formatted_size(sum(list(map(lambda data: data[ 'totsize' ], datas)))),
        'formatted_duration' : get_formatted_duration(sum(list(map(lambda data: data[ 'totdur' ], datas)))) }
    #
    ## now since sinceDate
    datas_since = list(filter(
        lambda data_since: data_since[ 'num_songs' ] > 0,
        map(lambda keynum: core.get_library_stats(
            keynum, token, fullURL = fullURL, sinceDate = sinceDate ), keynums ) ) )
    music_summ[ 'len_datas_since' ] = len( datas_since )
    if len( datas_since ) > 0:
        music_summ[ 'since_date_string' ] = sinceDate.strftime( '%B %d, %Y' )
        music_summ[ 'num_songs_since' ] = f'{sum(list(map(lambda data_since: data_since[ "num_songs" ], datas_since))):,}'
        music_summ[ 'num_artists_since' ] = f'{sum(list(map(lambda data_since: data_since[ "num_artists" ], datas_since))):,}'
        music_summ[ 'num_albums_since' ] = f'{sum(list(map(lambda data_since: data_since[ "num_albums" ], datas_since))):,}'
        music_summ[ 'formatted_size_since' ] = get_formatted_size( sum(list(map(lambda data_since: data_since[ 'totsize'], datas_since))))
        music_summ[ 'formatted_duration_since' ] = get_formatted_duration( sum(list(map(lambda data_since: data_since[ 'totdur' ], datas_since))) )
    #
    env = Environment( loader = FileSystemLoader( resourceDir ) )
    template = env.get_template( 'summary_data_music_template.rst' )
    musicstring = template.render( music_summ = music_summ )
    return musicstring
예제 #5
0
파일: email.py 프로젝트: tanimislam/howdy
def get_summary_data_television_remote(
    token, fullURL = 'http://localhost:32400',
    sinceDate = datetime.datetime.strptime('January 1, 2020', '%B %d, %Y' ).date( ) ):
    """
    This returns summary information on TV media from all television libraries on the Plex_ server, for use as part of the Plex_ newsletter sent out to one's Plex_ server friends. The email first summarizes ALL the TV data, and then summarizes the TV data uploaded and processed since a previous date. For example,

       As of December 29, 2020, there are 25,195 TV episodes in 298 TV shows. The total size of TV media is 6.690 TB. The total duration of TV media is 1 year, 5 months, 19 days, 9 hours, 29 minutes, and 13.919 seconds.

       Since January 01, 2020, I have added 5,005 TV epsisodes in 298 TV shows. The total size of TV media that I have added is 1.571 TB. The total duration of TV media that I have added is 3 months, 16 days, 4 hours, 52 minutes, and 15.406 seconds.

    :param str token: the Plex_ access token.
    :param str fullURL: the Plex_ server URL.
    :param date sinceDate: the :py:class:`datetime <datetime.date>` from which we have added songs. Default is :py:class:`date <datetime.date>` corresponding to ``January 1, 2020``.
    
    :returns: a :py:class:`string <str>` description of TV media in all TV libraries on the Plex_ server. If there is no Plex_ server or TV library, returns ``None``.
    :rtype: str

    .. seealso:: :py:meth:`get_summary_body <howdy.email.email.get_summary_body>`.
    """
    libraries_dict = core.get_libraries( token, fullURL = fullURL, do_full = True )
    if libraries_dict is None: return None
    keynums = set(filter(lambda keynum: libraries_dict[ keynum ][ 1 ] == 'show', libraries_dict ) )
    if len( keynums ) == 0: return None
    #
    # sinceDate = core.get_current_date_newsletter( )
    datas = list(map(lambda keynum: core.get_library_stats( keynum, token, fullURL = fullURL ), keynums))
    tv_summ = {
        'current_date_string' : datetime.datetime.now( ).date( ).strftime( '%B %d, %Y' ),
        'num_episodes' : f'{sum(list(map(lambda data: data[ "num_tveps" ], datas))):,}',
        'num_shows' : f'{sum(list(map(lambda data: data[ "num_tvshows" ], datas))):,}',
        'formatted_size' : get_formatted_size(sum(list(map(lambda data: data[ 'totsize' ], datas)))),
        'formatted_duration' : get_formatted_duration(sum(list(map(lambda data: data[ 'totdur' ], datas)))) }
    datas_since = list(filter(
        lambda data_since: data_since[ 'num_tveps' ] > 0,
        map(lambda keynum: core.get_library_stats(
            keynum, token, fullURL = fullURL, sinceDate = sinceDate ), keynums) ) )
    tv_summ[ 'len_datas_since' ] = len( datas_since )
    if len( datas_since ) > 0:
        tv_summ[ 'since_date_string' ] = sinceDate.strftime( '%B %d, %Y' )
        tv_summ[ 'num_episodes_since' ] = f'{sum(list(map(lambda data_since: data_since[ "num_tveps" ], datas_since))):,}'
        tv_summ[ 'num_shows_since' ] = f'{sum(list(map(lambda data_since: data_since[ "num_tvshows" ], datas_since))):,}'
        tv_summ[ 'formatted_size_since' ] = get_formatted_size( sum(list(map(lambda data_since: data_since[ 'totsize'], datas_since))))
        tv_summ[ 'formatted_duration_since' ] = get_formatted_duration( sum(list(map(lambda data_since: data_since[ 'totdur' ], datas_since))) )
    env = Environment( loader = FileSystemLoader( resourceDir ) )
    template = env.get_template( 'summary_data_tv_template.rst' )
    tvstring = template.render( tv_summ = tv_summ )
    return tvstring
예제 #6
0
def get_tvdata(tvlibraryname, fullURL, token, doCheck=True):
    if doCheck:
        try:
            all_libraries = core.get_libraries(token=token,
                                               fullURL=fullURL,
                                               do_full=True)
        except:
            return return_error_raw(
                "Error, bad token or URL may have been given.")
        key_found = list(
            filter(lambda key: all_libraries[key][0] == tvlibraryname,
                   all_libraries))
        if key_found is None:
            return return_error_raw("Error, %s library does not exist." %
                                    tvlibraryname)
        key_found = min(key_found)
        if all_libraries[key_found][1] != 'show':
            return return_error_raw("Error, %s library is not a TV library." %
                                    tvlibraryname)
    #
    ## now get data
    tvdata = core.get_library_data(tvlibraryname, token=token, fullURL=fullURL)
    return tvdata, 'SUCCESS'
예제 #7
0
def main():
    time0 = time.time()
    parser = ArgumentParser()
    parser.add_argument('--noverify',
                        dest='do_verify',
                        action='store_false',
                        default=True,
                        help='If chosen, do not verify the SSL connection.')
    parser.add_argument('--local',
                        dest='do_local',
                        action='store_true',
                        default=False,
                        help='Check for locally running plex server.')
    parser.add_argument('--info',
                        dest='do_info',
                        action='store_true',
                        default=False,
                        help='If chosen, run with INFO logging mode.')
    args = parser.parse_args()
    logger = logging.getLogger()
    if args.do_info: logger.setLevel(logging.INFO)

    #
    ## function to do the processing

    step = 0
    print('%d, started on %s' %
          (step, datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p')))

    #
    ## get plex server token
    dat = core.checkServerCredentials(doLocal=args.do_local,
                                      verify=args.do_verify)
    if dat is None:
        step += 1
        print('\n'.join([
            '%d, error, could not access local Plex server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0),
            '%d, finished on %s.' %
            (step + 1,
             datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        ]))
        return
    fullURL, token = dat
    #
    ## first find out which libraries are the TV show ones
    library_dict = core.get_libraries(token, fullURL=fullURL, do_full=True)
    if library_dict is None:
        step += 1
        print('\n'.join([
            '%d, error, could not access libraries in plex server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0),
            '%d, finished on %s.' %
            (step + 1,
             datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        ]))
        return
    #
    valid_keys = list(
        filter(lambda key: library_dict[key][-1] == 'show', library_dict))
    if len(valid_keys) == 0:
        step += 1
        print('\n'.join([
            '%d, Error, could not find a TV show library in %0.3f seconds. Exiting...'
            % (time.time() - time0, step),
            '%d, finished on %s.' %
            (step + 1,
             datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        ]))
        return
    tvlib_title = library_dict[max(valid_keys)][0]
    step += 1
    nowdate = datetime.datetime.now().date()
    print('%d, found TV library: %s.' % (step, tvlib_title))
    #
    ## now get the future TV shows
    tvdata = core.get_library_data(tvlib_title, token=token, fullURL=fullURL)
    showsToExclude = tv.get_shows_to_exclude(tvdata)
    if len(showsToExclude) != 0:
        step += 1
        print('%d, excluding these TV shows: %s.' %
              (step, '; '.join(showsToExclude)))

    future_shows_dict = tv.get_future_info_shows(tvdata,
                                                 verify=args.do_verify,
                                                 showsToExclude=showsToExclude,
                                                 fromDate=nowdate)
    for show in future_shows_dict:
        tdelta = future_shows_dict[show]['start_date'] - nowdate
        future_shows_dict[show]['days_to_new_season'] = tdelta.days

    if len(future_shows_dict) == 0:
        step += 1
        print('%d, found no TV shows with new seasons.' % step)
        print('%d,  finished on %s.' %
              (step + 1,
               datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p')))
        return
    step += 1
    print(
        '%d, Found %d TV shows with new seasons after %s, in %0.3f seconds.' %
        (step, len(future_shows_dict), nowdate.strftime('%B %d, %Y'),
         time.time() - time0))
    print('\n')
    all_new_show_data = list(
        map(
            lambda show:
            (show, future_shows_dict[show]['max_last_season'],
             future_shows_dict[show]['min_next_season'], future_shows_dict[
                 show]['start_date'].strftime('%B %d, %Y'), future_shows_dict[
                     show]['days_to_new_season']),
            sorted(future_shows_dict,
                   key=lambda shw:
                   (future_shows_dict[shw]['start_date'], shw))))
    print('%s\n' % tabulate.tabulate(all_new_show_data,
                                     headers=[
                                         'SHOW', 'LAST SEASON', 'NEXT SEASON',
                                         'AIR DATE', 'DAYS TO NEW SEASON'
                                     ]))

    step += 1
    print('\n'.join([
        '%d, processed everything in %0.3f seconds.' %
        (step, time.time() - time0),
        '%d, finished everything on %s.' %
        (step + 1, datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
    ]))
예제 #8
0
def main():
    time0 = time.time()
    default_time = 1000
    default_iters = 2
    default_num_threads = 2 * multiprocessing.cpu_count()
    #
    parser = ArgumentParser()
    parser.add_argument(
        '--maxtime',
        dest='maxtime_in_secs',
        type=int,
        action='store',
        default=default_time,
        help=' '.join([
            'The maximum amount of time to spend (in seconds),',
            'per candidate magnet link,', 'trying to download a TV show.',
            'Default is %d seconds.' % default_time
        ]))
    parser.add_argument(
        '--num',
        dest='num_iters',
        type=int,
        action='store',
        default=default_iters,
        help=' '.join([
            'The maximum number of different magnet links to try',
            'before giving up. Default is %d.' % default_iters
        ]))
    parser.add_argument(
        '--token',
        dest='token',
        type=str,
        action='store',
        help='Optional argument. If chosen, user provided Plex access token.')
    parser.add_argument(
        '--debuglevel',
        dest='debug_level',
        action='store',
        type=str,
        default='None',
        choices=['None', 'info', 'debug'],
        help=
        'Choose the debug level for the system logger. Default is None (no logging). Can be one of None (no logging), info, or debug.'
    )
    parser.add_argument(
        '--numthreads',
        dest='numthreads',
        type=int,
        action='store',
        default=default_num_threads,
        help=
        'Number of threads over which to search for TV shows in my library. Default is %d.'
        % default_num_threads)
    parser.add_argument(
        '--nomax',
        dest='do_restrict_maxsize',
        action='store_false',
        default=True,
        help='If chosen, do not restrict maximum size of downloaded file.')
    parser.add_argument(
        '--nomin',
        dest='do_restrict_minsize',
        action='store_false',
        default=True,
        help='If chosen, do not restrict minimum size of downloaded file.')
    parser.add_argument(
        '--raw',
        dest='do_raw',
        action='store_true',
        default=False,
        help='If chosen, then use the raw string to specify TV show torrents.')
    args = parser.parse_args()
    #
    logger = logging.getLogger()
    if args.debug_level == 'info': logger.setLevel(logging.INFO)
    if args.debug_level == 'debug': logger.setLevel(logging.DEBUG)
    assert (args.maxtime_in_secs >=
            60), 'error, max time must be >= 60 seconds.'
    assert (args.num_iters >=
            1), 'error, must have a positive number of maximum iterations.'
    step = 0
    print('%d, started on %s' %
          (step, datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p')))
    step += 1
    #
    ## get plex server token
    dat = core.checkServerCredentials(doLocal=True)
    if dat is None:
        print('\n'.join([
            '%d, error, could not access local Plex server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0),
            finish_statement(step)
        ]))
        return
    fullURL, token = dat
    if args.token is not None: token = args.token
    #
    ## first find out which libraries are the TV show ones
    library_dict = core.get_libraries(token, fullURL=fullURL, do_full=True)
    if library_dict is None:
        print('\n'.join([
            '%d, error, could not access libraries in plex server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0),
            finish_statement(step)
        ]))
        return
    #
    valid_keys = list(
        filter(lambda key: library_dict[key][-1] == 'show', library_dict))
    if len(valid_keys) == 0:
        print('\n'.join([
            '%d, Error, could not find a TV show library in %0.3f seconds. Exiting...'
            % (time.time() - time0, step),
            finish_statement(step)
        ]))
        return
    tvlib_title = library_dict[max(valid_keys)][0]
    print('%d, found TV library: %s.' % (step, tvlib_title))
    step += 1
    #
    ## now get the TV shows
    time0 = time.time()
    tvdata = core.get_library_data(tvlib_title,
                                   token=token,
                                   num_threads=args.numthreads)
    print('%d, found %d shows in the TV library, in %0.3f seconds.' %
          (step, len(tvdata), time.time() - time0))
    step += 1
    showsToExclude = tv.get_shows_to_exclude(tvdata)
    if len(showsToExclude) != 0:
        print('%d, excluding these TV shows: %s.' %
              (step, '; '.join(showsToExclude)))
        step += 1
    tvdb_token = get_token()
    if tvdb_token is None:
        print('\n'.join([
            '%d, error, could not access the TVDB API server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0)
        ]))
        return
    toGet = tv.get_remaining_episodes(tvdata,
                                      showSpecials=False,
                                      showsToExclude=showsToExclude,
                                      num_threads=args.numthreads)
    if len(toGet) == 0:
        print('\n'.join([
            '%d, no episodes to download in %0.3f seconds. Exiting...' %
            (step, time.time() - time0),
            finish_statement(step)
        ]))
        return
    print('%d, took %0.3f seconds to get list of %d episodes to download.' %
          (step, time.time() - time0,
           sum(map(lambda tvshow: len(toGet[tvshow]['episodes']), toGet))))
    step += 1
    #
    ## now download these episodes
    tvTorUnits, newdirs = tv.create_tvTorUnits(
        toGet,
        restrictMaxSize=args.do_restrict_maxsize,
        restrictMinSize=args.do_restrict_minsize,
        do_raw=args.do_raw)
    print('%d, here are the %d episodes to get: %s.' %
          (step, len(tvTorUnits), ', '.join(
              map(lambda tvTorUnit: tvTorUnit['torFname'], tvTorUnits))))
    step += 1
    tv.download_batched_tvtorrent_shows(tvTorUnits,
                                        newdirs=newdirs,
                                        maxtime_in_secs=args.maxtime_in_secs,
                                        num_iters=args.num_iters)
    print('\n'.join([
        '%d, everything done in %0.3f seconds.' % (step, time.time() - time0),
        finish_statement(step)
    ]))
예제 #9
0
파일: email.py 프로젝트: tanimislam/howdy
def get_summary_data_movies_remote(
    token, fullURL = 'http://localhost:32400',
    sinceDate = datetime.datetime.strptime('January 1, 2020', '%B %d, %Y' ).date( ) ):
    """
    This returns summary information on movie media from all movie libraries on the Plex_ server, for use as part of the Plex_ newsletter sent out to one's Plex_ server friends. The email first summarizes ALL the movie data, and then summarizes the movie data uploaded and processed since the last newsletter's date. Unlike :py:meth:`get_summary_data_music_remote <howdy.email.email.get_summary_data_music_remote>` and :py:meth:`get_summary_data_television_remote <howdy.email.email.get_summary_data_television_remote>`, this returns a :py:class:`list` of strings rather than a string.

    :param str token: the Plex_ access token.
    :param str fullURL: the Plex_ server URL.
    
    :returns: a :py:class:`string <str>` description of TV media in all TV libraries on the Plex_ server. If there is no Plex_ server or TV library, returns ``None``.
    :rtype: list

    .. seealso:: :py:meth:`get_summary_body <howdy.email.email.get_summary_body>`.
    """
    libraries_dict = core.get_libraries( token, fullURL = fullURL, do_full = True )
    if libraries_dict is None:
        return None
    keynums = set(filter(lambda keynum: libraries_dict[ keynum ][ 1 ] == 'movie', libraries_dict ) )
    if len( keynums ) == 0:
        return None
    #
    # sinceDate = core.get_current_date_newsletter( )
    #
    ## hard coding (for now) how to join by genres
    join_genres = { 'action' : [ 'thriller', 'western' ], 'comedy' : [ 'family', ], 'drama' : [ 'drame', ] }
    def _join_by_genre( sort_by_genre, join_genres ):
        alljoins = list(chain.from_iterable(map(lambda genre: join_genres[ genre ], join_genres ) ) )
        assert( len( alljoins ) == len( set( alljoins ) ) )
        assert( len( set( alljoins ) & set( join_genres ) ) == 0 )
        for genre in join_genres:
            g2s = set( join_genres[ genre ] ) & set( sort_by_genre )
            if len( g2s ) == 0: continue
            if genre not in sort_by_genre:
                sort_by_genre[ genre ][ 'totnum' ] = 0
                sort_by_genre[ genre ][ 'totdur' ] = 0.0
                sort_by_genre[ genre ][ 'totsize' ] = 0.0
            for g2 in g2s:
                sort_by_genre[ genre ][ 'totnum' ] += sort_by_genre[ g2 ][ 'totnum' ]
                sort_by_genre[ genre ][ 'totdur' ] += sort_by_genre[ g2 ][ 'totdur' ]
                sort_by_genre[ genre ][ 'totsize' ] += sort_by_genre[ g2 ][ 'totsize' ]
            for g2 in g2s:
                sort_by_genre.pop( g2 )
    #
    current_date_string = datetime.datetime.now( ).date( ).strftime( '%B %d, %Y' )
    datas = list(map(lambda keynum: core.get_library_stats( keynum, token, fullURL = fullURL ), keynums ) )
    num_movies_since = -1
    sorted_by_genres = { }
    sorted_by_genres_since = { }
    for data in datas:
        data_sorted_by_genre = data[ 'genres' ]
        for genre in data_sorted_by_genre:
            if genre not in sorted_by_genres:
                sorted_by_genres[ genre ] = data_sorted_by_genre[ genre ].copy( )
                continue
            sorted_by_genres[ genre ][ 'totum'  ] += data_sorted_by_genre[ genre ][ 'totnum'  ]
            sorted_by_genres[ genre ][ 'totdur' ] += data_sorted_by_genre[ genre ][ 'totdur'  ]
            sorted_by_genres[ genre ][ 'totsize'] += data_sorted_by_genre[ genre ][ 'totsize' ]
    _join_by_genre( sorted_by_genres, join_genres )
    categories = set( sorted_by_genres )
    num_movies = f'{sum(list(map(lambda data: data[ "num_movies" ], datas ) ) ):,}'
    totdur = get_formatted_duration( sum(list(map(lambda data: data[ 'totdur' ], datas ) ) ) )
    totsize = get_formatted_size( sum(list(map(lambda data: data[ 'totsize' ], datas ) ) ) )
    movie_summ = {
        'current_date_string' : current_date_string,
        'num_movies' : num_movies,
        'num_categories' : len( categories ),
        'formatted_size' : totsize,
        'formatted_duration' : totdur }
    #
    datas_since = list(filter(
        lambda data_since: data_since[ 'num_movies' ] > 0,
        map(lambda keynum: core.get_library_stats(
            keynum, token, fullURL = fullURL, sinceDate = sinceDate ), keynums ) ) )
    movie_summ[ 'len_datas_since' ] = len( datas_since )
    if len( datas_since ) != 0:
        for data_since in datas_since:
            data_since_sorted_by_genre = data_since[ 'genres' ]
            for genre in data_since_sorted_by_genre:
                if genre not in sorted_by_genres_since:
                    sorted_by_genres_since[ genre ] = data_since_sorted_by_genre[ genre ].copy( )
                    continue
                sorted_by_genres_since[ genre ][ 'totum'  ] += data_since_sorted_by_genre[ genre ][ 'totnum'  ]
                sorted_by_genres_since[ genre ][ 'totdur' ] += data_since_sorted_by_genre[ genre ][ 'totdur'  ]
                sorted_by_genres_since[ genre ][ 'totsize'] += data_since_sorted_by_genre[ genre ][ 'totsize' ]
        _join_by_genre( sorted_by_genres_since, join_genres )
        num_movies_since = f'{sum(list(map(lambda data_since: data_since[ "num_movies" ], datas_since ) ) ):,}'
        categories_since = set( sorted_by_genres_since )
        totsize_since = get_formatted_size( sum(list(map(lambda data_since: data_since[ 'totsize' ], datas_since ) ) ) )
        totdur_since = get_formatted_duration( sum(list(map(lambda data_since: data_since[ 'totdur' ], datas_since ) ) ) )
        movie_summ[ 'since_date_string' ] = sinceDate.strftime( '%B %d, %Y' )
        movie_summ[ 'num_movies_since' ] = num_movies_since
        movie_summ[ 'num_categories_since' ] = len( categories_since )
        movie_summ[ 'formatted_size_since' ] = totsize_since
        movie_summ[ 'formatted_duration_since' ] =  totdur_since
        
    #
    ## get last 7 movies that I have added, to pass to JINJA template
    lastN_movies = core.get_lastN_movies( 7, token, fullURL = fullURL, useLastNewsletterDate = False )
    last_N_movies = [ ]
    def _get_nth_movie( lastN_entry ):
        title, year, date, url = lastN_entry
        if url is None:
            return {
                'hasURL' : False,
                'name' : title,
                'year' : year,
                'added_date_string' : date.strftime( '%B %d, %Y' ),
                'url' : '' }
        return {
            'hasURL' : True,
            'name' : title,
            'year' : year,
            'added_date_string' : date.strftime( '%B %d, %Y' ),
            'url' : url }
    last_N_movies = list(map(_get_nth_movie, lastN_movies ) )
    #
    ## catmovstrings list to pass to JINJA template
    template_mainstring = Template(' '.join([
        'As of ``{{ current_date_string }}``, there are {{ num_movies }} movies in this category.',
        'The total size of movie media here is {{ totsize }}.',
        'The total duration of movie media here is {{ totdur }}.' ]) )
    template_sincestring = Template(' '.join([
        'Since ``{{ since_date_string }}``, I have added {{ num_movies_since }} movies in this category.',
        'The total size of movie media I added here is {{ totsize_since }}.',
        'The total duration of movie media I added here is {{ totdur_since }}.' ] ) )
    def _get_category_entry( cat ):
        mainstring = template_mainstring.render(
            current_date_string = current_date_string,
            num_movies = f'{sorted_by_genres[ cat ][ "totnum" ]:,}',
            totsize = get_formatted_size( sorted_by_genres[ cat ][ 'totsize' ] ),
            totdur = get_formatted_duration( sorted_by_genres[ cat ][ 'totdur' ] ) )
        if cat in sorted_by_genres_since and sorted_by_genres_since[ cat ][ 'totnum' ] > 0:
            num_movies_since = f'{sorted_by_genres_since[ cat ][ "totnum" ]:,}'
            totsize_since    = get_formatted_size( sorted_by_genres_since[ cat ][ 'totsize' ] )
            totdur_since     = get_formatted_duration( sorted_by_genres_since[ cat ][ 'totdur'  ] )
            mainstring_since = template_sincestring.render(
                since_date_string = sinceDate.strftime( '%B %d, %Y' ),
                num_movies_since = num_movies_since,
                totsize_since = totsize_since,
                totdur_since = totdur_since )
            description = ' '.join([ mainstring, mainstring_since ])
            return { 'category' : cat, 'description' : description }
        return { 'category' : cat, 'description' : mainstring }
    catmovs = list(map(_get_category_entry, sorted( sorted_by_genres ) ) )
    env = Environment( loader = FileSystemLoader( resourceDir ) )
    template = env.get_template( 'summary_data_movie_template.rst' )
    movstring = template.render( movie_summ = movie_summ, last_N_movies = last_N_movies, catmovs = catmovs )
    return movstring
예제 #10
0
def main():
    time0 = time.time()
    parser = ArgumentParser()
    parser.add_argument(
        '--years',
        dest='s_years',
        action='store',
        type=str,
        help='Give a list of years as a string, such as "1980,1981". Optional.'
    )
    parser.add_argument('--local',
                        dest='do_local',
                        action='store_true',
                        default=False,
                        help='Check for locally running plex server.')
    parser.add_argument(
        '--dirname',
        dest='dirname',
        action='store',
        type=str,
        default=os.getcwd(),
        help='Directory into which to store those plots. Default is %s.' %
        os.getcwd())
    parser.add_argument('--noverify',
                        dest='do_verify',
                        action='store_false',
                        default=True,
                        help='If chosen, do not verify SSL connections.')
    args = parser.parse_args()
    #
    ## function to do the processing
    step = 0
    print('%d, started on %s' %
          (step, datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p')))
    if args.s_years is not None:
        try:
            years = sorted(
                set(map(lambda tok: int(tok), args.s_years.split(','))))
        except:
            step += 1
            print('%d, did not give a valid set of years.' % step)
            years = []
    else:
        years = []

    #
    ## get plex server token
    dat = core.checkServerCredentials(doLocal=args.do_local,
                                      verify=args.do_verify)
    if dat is None:
        step += 1
        print('\n'.join([
            '%d, error, could not access local Plex server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0),
            '%d, finished on %s.' %
            (step + 1,
             datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        ]))
        return
    fullURL, token = dat
    #
    ## first find out which libraries are the TV show ones
    library_dict = core.get_libraries(token, fullURL=fullURL, do_full=True)
    if library_dict is None:
        step += 1
        print('\n'.join([
            '%d, error, could not access libraries in plex server in %0.3f seconds. Exiting...'
            % (step, time.time() - time0),
            '%d, finished on %s.' %
            (step + 1,
             datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        ]))
        return
    #
    valid_keys = list(
        filter(lambda key: library_dict[key][-1] == 'show', library_dict))
    if len(valid_keys) == 0:
        step += 1
        print('\n'.join([
            '%d, Error, could not find a TV show library in %0.3f seconds. Exiting...'
            % (time.time() - time0, step),
            '%d, finished on %s.' %
            (step + 1,
             datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        ]))
        return
    tvlib_title = library_dict[max(valid_keys)][0]
    step += 1
    print('%d, found TV library: %s.' % (step, tvlib_title))
    #
    ## now get the TV shows
    tvdata = core.get_library_data(tvlib_title,
                                   token=token,
                                   fullURL=fullURL,
                                   num_threads=16)
    showsToExclude = tv.get_shows_to_exclude(tvdata)
    if len(showsToExclude) != 0:
        step += 1
        print('%d, excluding these TV shows: %s.' %
              (step, '; '.join(showsToExclude)))

    #
    ## now actual meat of the computation
    tvdata_date_dict = tv.get_tvdata_ordered_by_date(tvdata)
    min_year = min(tvdata_date_dict.keys()).year
    max_year = max(tvdata_date_dict.keys()).year
    possible_years_set = set(map(lambda date: date.year, tvdata_date_dict))
    step += 1
    if len(years) == 0:
        years = sorted(possible_years_set)
        print('%d, no years specified. We will use %s total: %s.' %
              (step, _print_years(len(years)), ', '.join(
                  map(lambda year: '%d' % year, years))))
    else:
        cand_years = sorted(set(years) & possible_years_set)
        if len(cand_years) == 0:
            print('\n'.join([
                '%d, no intersection between the %s chosen (%s) and the %d years in the library.'
                % (step, _print_years(len(years)), ', '.join(
                    lambda yr: '%d' % year, years), len(possible_years_set)),
                'Instead, we will use %s total: %s.' %
                (_print_years(len(possible_years_set)), ', '.join(
                    map(lambda year: '%d' % year, sorted(possible_years_set))))
            ]))
            years = sorted(possible_years_set)
        else:
            print('%d, we found %s to use: %s.' %
                  (step, _print_years(len(cand_years)), ', '.join(
                      map(lambda year: '%d' % year, cand_years))))
            years = cand_years

    step += 1
    print('%d, started processing %s of TV shows after %0.3f seconds.' %
          (step, _print_years(len(years)), time.time() - time0))
    manager = Manager()
    shared_step = manager.Value('step', step)
    num_procced = manager.Value('nump', 0)
    lock = manager.RLock()
    pool = Pool(processes=cpu_count())

    def _process_year(year):
        tv.create_plot_year_tvdata(tvdata_date_dict,
                                   year,
                                   shouldPlot=True,
                                   dirname=args.dirname)
        lock.acquire()
        shared_step.value += 1
        num_procced.value += 1
        print(
            '%d, finished processing year = %d (%02d / %02d) in %0.3f seconds.'
            % (shared_step.value, year, num_procced.value, len(years),
               time.time() - time0))
        lock.release()

    _ = list(pool.map(_process_year, years))
    step = shared_step.value + 1
    print('\n'.join([
        '%d, processed all %s in %0.3f seconds.' %
        (step, _print_years(len(years)), time.time() - time0),
        '%d, finished everything on %s.' %
        (step + 1, datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
    ]))
예제 #11
0
def get_summary_data_thisamericanlife_remote(token,
                                             fullURL='http://localhost:32400'):
    libraries_dict = core.get_libraries(token, fullURL=fullURL)
    keynum = max([
        key for key in libraries_dict
        if libraries_dict[key] == 'This American Life'
    ])
    sinceDate = core.get_current_date_newsletter()
    key, song_data = core._get_library_data_artist(keynum,
                                                   token,
                                                   fullURL=fullURL)
    num_episodes = 0
    totdur = 0.0
    totsizebytes = 0.0
    for key in song_data:
        for key2 in song_data[key]:
            num_episodes += len(song_data[key][key2])
            for track in song_data[key][key2]:
                name, dt, dur, sizebytes = track
                totdur += dur
                totsizebytes += sizebytes
    mainstring = 'There are %d episodes in %d series in This American Life.' % (
        num_episodes, len(song_data))
    sizestring = 'The total size of This American Life media is %s.' % \
        get_formatted_size( totsizebytes )
    durstring = 'The total duration of This American Life media is %s.' % \
        get_formatted_duration( totdur )
    if sinceDate is None:
        pristrings = [
            ' '.join([mainstring, sizestring, durstring]),
        ]
    else:
        key, song_data_since = core._get_library_data_artist(
            keynum, token, fullURL=fullURL, sinceDate=sinceDate)
        num_episodes_since = 0
        totdur_since = 0.0
        totsizebytes_since = 0.0
        for key in song_data_since:
            for key2 in song_data_since[key]:
                num_episodes_since += len(song_data_since[key][key2])
                for track in song_data_since[key][key2]:
                    name, dt, dur, sizebytes = track
                    totdur_since += dur
                    totsizebytes_since += sizebytes
        if num_episodes_since > 0:
            mainstring_since = ' '.join([
                'Since %s, I have added %d new This American Life episodes.' %
                (sinceDate.strftime('%B %d, %Y'), num_episodes_since),
                'The total size of This American Life media I added is %s.' %
                get_formatted_size(totsizebytes_since),
                'The total duration of This American Life media I added is %s.'
                % get_formatted_duration(totdur_since)
            ])
            pristrings = [
                ' '.join([mainstring, sizestring, durstring,
                          mainstring_since]),
            ]
        else:
            pristrings = [
                ' '.join([mainstring, sizestring, durstring]),
            ]
    #
    catpristrings = {}
    for album in song_data:
        if album == 'Ira Glass': actalbum = 'This American Life'
        else: actalbum = album
        totdur = 0.0
        totsizebytes = 0.0
        num_episodes = 0
        for key2 in song_data[album]:
            num_episodes += len(song_data[album][key2])
            for track in song_data[album][key2]:
                name, dt, dur, sizebytes = track
                totdur += dur
                totsizebytes += sizebytes
        mainstring = 'There are %d episodes in this category.' % num_episodes
        sizestring = 'The total size of media here is %s.' % get_formatted_size(
            totsizebytes)
        durstring = 'The total duration of media here is %s.' % get_formatted_duration(
            totdur)
        if sinceDate is None:
            mystring = ' '.join([mainstring, sizestring, durstring])
        else:
            if album not in song_data_since:
                mystring = ' '.join([mainstring, sizestring, durstring])
            else:
                totdur_since = 0.0
                totsizebytes_since = 0.0
                num_episodes_since = 0
                for key2 in song_data_since[album]:
                    num_episodes_since += len(song_data_since[album][key2])
                    for track in song_data_since[album][key2]:
                        name, dt, dur, sizebytes = track
                        totdur_since += dur
                        totsizebytes_since += sizebytes
                if num_episodes_since > 0:
                    mainstring_since = ' '.join([
                        'Since %s, I have added %d new episodes in this category.'
                        %
                        (sinceDate.strftime('%B %d, %Y'), num_episodes_since),
                        'The total size of media I added here is %s.' %
                        get_formatted_size(totsizebytes_since),
                        'The total duration of media I added here is %s.' %
                        get_formatted_duration(totdur_since)
                    ])
                    mystring = ' '.join(
                        [mainstring, sizestring, durstring, mainstring_since])
                else:
                    mystring = ' '.join([mainstring, sizestring, durstring])
        catpristrings[actalbum] = mystring
    pristrings.append(catpristrings)
    return pristrings
예제 #12
0
def main():
    parser = ArgumentParser()
    parser.add_argument(
        '--libraries',
        dest='do_libraries',
        action='store_true',
        default=False,
        help=
        'If chosen, just give the sorted names of all libraries in the Plex server.'
    )
    parser.add_argument(
        '--refresh',
        dest='do_refresh',
        action='store_true',
        default=False,
        help=
        'If chosen, refresh a chosen library in the Plex server. Must give a valid name for the library.'
    )
    parser.add_argument(
        '--summary',
        dest='do_summary',
        action='store_true',
        default=False,
        help=
        'If chosen, perform a summary of the chosen library in the Plex server. Must give a valid name for the library.'
    )
    parser.add_argument('--library',
                        dest='library',
                        type=str,
                        action='store',
                        help='Name of a (valid) library in the Plex server.')
    parser.add_argument('--servername',
                        dest='servername',
                        action='store',
                        type=str,
                        help='Optional name of the server to check for.')
    parser.add_argument(
        '--servernames',
        dest='do_servernames',
        action='store_true',
        default=False,
        help='If chosen, print out all the servers owned by the user.')
    parser.add_argument('--noverify',
                        dest='do_verify',
                        action='store_false',
                        default=True,
                        help='Do not verify SSL transactions if chosen.')
    args = parser.parse_args()
    #
    ##
    _, token = core.checkServerCredentials(doLocal=False,
                                           verify=args.do_verify)
    #
    ## only one of possible actions
    assert( len( list( filter( lambda tok: tok is True, (
        args.do_libraries, args.do_refresh, args.do_summary, args.do_servernames ) ) ) ) == 1 ), \
        "error, must choose one of --libraries, --refresh, --summary, --servernames"

    #
    ## if list of servernames, --servernames
    if args.do_servernames:
        server_dicts = core.get_all_servers(token, verify=args.do_verify)
        if server_dicts is None:
            print('COULD FIND NO SERVERS ACCESIBLE TO USER.')
            return
        server_formatted_data = list(
            map(
                lambda name:
                (name, server_dicts[name]['owned'], server_dicts[name]['url']),
                server_dicts))
        print('\n%s\n' % tabulate(server_formatted_data,
                                  headers=['Name', 'Is Owned', 'URL']))
        return

    #
    ## check that server name we choose is owned by us.
    server_dicts = core.get_all_servers(token, verify=args.do_verify)
    server_names_owned = sorted(
        set(filter(lambda name: server_dicts[name]['owned'], server_dicts)))
    assert (len(server_names_owned) >
            0), "error, none of these Plex servers is owned by us."
    if args.servername is None:
        args.servername = max(server_names_owned)

    assert (args.servername in server_names_owned
            ), "error, server %s not in list of owned servers: %s." % (
                args.servername, server_names_owned)

    #
    ## get URL and token from server_dicts
    fullURL = server_dicts[args.servername]['url']
    token = server_dicts[args.servername]['access token']

    #
    ## if get list of libraries, --libraries
    if args.do_libraries:
        library_dict = core.get_libraries(token, fullURL=fullURL, do_full=True)
        print('\nHere are the %d libraries in this Plex server: %s.' %
              (len(library_dict), args.servername))
        libraries_library_dict = dict(
            map(
                lambda keynum:
                (library_dict[keynum][0], library_dict[keynum][1]),
                library_dict.keys()))
        library_names = sorted(libraries_library_dict)
        libraries_formatted_data = list(
            map(lambda name: (name, libraries_library_dict[name]),
                library_names))
        print('\n%s\n' % tabulate(libraries_formatted_data,
                                  headers=['Name', 'Library Type']))
        return

    #
    ## now gone through here, must define a --library
    assert (args.library is not None), "error, library must be defined."
    library_dict = core.get_libraries(token, fullURL=fullURL, do_full=True)
    library_names = sorted(
        map(lambda keynum: library_dict[keynum][0], library_dict.keys()))
    assert (args.library in library_names
            ), "error, library = %s not in %s." % (args.library, library_names)
    library_key = max(
        filter(lambda keynum: library_dict[keynum][0] == args.library,
               library_dict))

    #
    ## if summary is chosen, --summary
    if args.do_summary:
        _print_summary(library_key, library_dict, token, fullURL)
        return

    #
    ## otherwise refresh is chosen, --refresh
    if args.do_refresh:
        core.refresh_library(library_key,
                             library_dict,
                             fullURL=fullURL,
                             token=token)
        print('refreshed library %s.' % args.library)
        return
예제 #13
0
파일: tv_gui.py 프로젝트: tanimislam/howdy
    def run(self):
        self.progress_dialog.show()
        time0 = self.progress_dialog.t0
        final_data_out = {}
        mytxt = '0, started loading in data on %s.' % (
            datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        logging.info(mytxt)
        self.emitString.emit(mytxt)
        #
        libraries_dict = core.get_libraries(self.token,
                                            fullURL=self.fullURL,
                                            do_full=True)
        if not any(
                map(lambda value: 'show' in value[-1],
                    libraries_dict.values())):
            raise ValueError('Error, could not find TV shows.')
        library_name = max(
            map(
                lambda key: libraries_dict[key][0],
                filter(lambda key: libraries_dict[key][1] == 'show',
                       libraries_dict)))
        final_data_out['library_name'] = library_name
        mytxt = '1, found TV library in %0.3f seconds.' % (time.time() - time0)
        logging.info(mytxt)
        self.emitString.emit(mytxt)
        #
        if self.tvdata_on_plex is None:
            self.tvdata_on_plex = core.get_library_data(
                library_name,
                fullURL=self.fullURL,
                token=self.token,
                num_threads=self.num_threads)
        if self.tvdata_on_plex is None:
            raise ValueError('Error, could not find TV shows on the server.')
        mytxt = '2, loaded TV data from Plex server in %0.3f seconds.' % (
            time.time() - time0)
        logging.info(mytxt)
        self.emitString.emit(mytxt)
        #
        ## using a stupid-ass pattern to shave some seconds off...
        manager = Manager()
        shared_list = manager.list()
        myLock = manager.RLock()
        myStage = manager.Value('stage', 2)

        #
        def _process_didend():
            if self.didend is not None:
                shared_list.append(('didend', self.didend))
                return
            didEnd = tv.get_all_series_didend(self.tvdata_on_plex,
                                              verify=self.verify,
                                              tvdb_token=self.tvdb_token)
            myLock.acquire()
            myStage.value += 1
            mytxt = '%d, added information on whether shows ended in %0.3f seconds.' % (
                myStage.value, time.time() - time0)
            logging.info(mytxt)
            self.emitString.emit(mytxt)
            myLock.release()
            shared_list.append(('didend', didEnd))

        def _process_missing():
            if self.toGet is not None:
                shared_list.append(('toGet', self.toGet))
                return
            toGet = tv.get_remaining_episodes(
                self.tvdata_on_plex,
                showSpecials=False,
                showsToExclude=self.showsToExclude,
                verify=self.verify,
                token=self.tvdb_token)
            myLock.acquire()
            myStage.value += 1
            mytxt = '%d, found missing episodes in %0.3f seconds.' % (
                myStage.value, time.time() - time0)
            logging.info(mytxt)
            self.emitString.emit(mytxt)
            myLock.release()
            shared_list.append(('toGet', toGet))

        def _process_plot_tvshowstats():
            tvdata_date_dict = tv.get_tvdata_ordered_by_date(
                self.tvdata_on_plex)
            years_have = set(map(lambda date: date.year, tvdata_date_dict))
            with multiprocessing.Pool(
                    processes=multiprocessing.cpu_count()) as pool:
                figdictdata = dict(
                    pool.map(
                        lambda year:
                        (year,
                         tv.create_plot_year_tvdata(
                             tvdata_date_dict, year, shouldPlot=False)),
                        years_have))
            myLock.acquire()
            myStage.value += 1
            mytxt = '%d, made plots of tv shows added in %d years in %0.3f seconds.' % (
                myStage.value, len(years_have), time.time() - time0)
            logging.info(mytxt)
            self.emitString.emit(mytxt)
            myLock.release()
            shared_list.append(('plotYears', figdictdata))

        jobs = [
            Process(target=_process_didend),
            Process(target=_process_missing)
        ]
        #         Process( target = _process_plot_tvshowstats ) ]
        for process in jobs:
            process.start()
        for process in jobs:
            process.join()
        #
        final_data = dict(shared_list)
        assert (set(final_data) == set(['didend', 'toGet']))
        didend = final_data['didend']
        toGet = final_data['toGet']
        for seriesName in self.tvdata_on_plex:
            self.tvdata_on_plex[seriesName]['didEnd'] = didend[seriesName]
        final_data_out['tvdata_on_plex'] = self.tvdata_on_plex
        mytxt = '%d, finished loading in all data on %s.' % (
            myStage.value + 1,
            datetime.datetime.now().strftime('%B %d, %Y @ %I:%M:%S %p'))
        logging.info(mytxt)
        self.emitString.emit(mytxt)
        missing_eps = dict(
            map(
                lambda seriesName: (seriesName, toGet[seriesName]['episodes']),
                set(self.tvdata_on_plex)
                & set(toGet) - set(self.showsToExclude)))
        final_data_out['missing_eps'] = missing_eps
        self.finalData.emit(final_data_out)
        self.stopDialog.emit()  # now stop everything