def get_plex_downloads(self): logger.debug("Tautulli PlexTV :: Retrieving current server version.") pms_connect = pmsconnect.PmsConnect() pms_connect.set_server_version() update_channel = pms_connect.get_server_update_channel() logger.debug("Tautulli PlexTV :: Plex update channel is %s." % update_channel) plex_downloads = self.get_plextv_downloads(plexpass=(update_channel == 'beta')) try: available_downloads = json.loads(plex_downloads) except Exception as e: logger.warn("Tautulli PlexTV :: Unable to load JSON for get_plex_updates.") return {} # Get the updates for the platform pms_platform = common.PMS_PLATFORM_NAME_OVERRIDES.get(plexpy.CONFIG.PMS_PLATFORM, plexpy.CONFIG.PMS_PLATFORM) platform_downloads = available_downloads.get('computer').get(pms_platform) or \ available_downloads.get('nas').get(pms_platform) if not platform_downloads: logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s." % pms_platform) return {} v_old = helpers.cast_to_int("".join(v.zfill(4) for v in plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')[:4])) v_new = helpers.cast_to_int("".join(v.zfill(4) for v in platform_downloads.get('version', '').split('-')[0].split('.')[:4])) if not v_old: logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s." % plexpy.CONFIG.PMS_VERSION) return {} if not v_new: logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s." % platform_downloads.get('version')) return {} # Get proper download releases = platform_downloads.get('releases', [{}]) release = next((r for r in releases if r['distro'] == plexpy.CONFIG.PMS_UPDATE_DISTRO and r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0]) download_info = {'update_available': v_new > v_old, 'platform': platform_downloads.get('name'), 'release_date': platform_downloads.get('release_date'), 'version': platform_downloads.get('version'), 'requirements': platform_downloads.get('requirements'), 'extra_info': platform_downloads.get('extra_info'), 'changelog_added': platform_downloads.get('items_added'), 'changelog_fixed': platform_downloads.get('items_fixed'), 'label': release.get('label'), 'distro': release.get('distro'), 'distro_build': release.get('build'), 'download_url': release.get('url'), } return download_info
def get_plex_downloads(self): logger.debug(u"PlexPy PlexTV :: Retrieving current server version.") pmsconnect.PmsConnect().set_server_version() logger.debug(u"PlexPy PlexTV :: Plex update channel is %s." % plexpy.CONFIG.PMS_UPDATE_CHANNEL) plex_downloads = self.get_plextv_downloads(plexpass=(plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass')) try: available_downloads = json.loads(plex_downloads) except Exception as e: logger.warn(u"PlexPy PlexTV :: Unable to load JSON for get_plex_updates.") return {} # Get the updates for the platform pms_platform = common.PMS_PLATFORM_NAME_OVERRIDES.get(plexpy.CONFIG.PMS_PLATFORM, plexpy.CONFIG.PMS_PLATFORM) platform_downloads = available_downloads.get('computer').get(pms_platform) or \ available_downloads.get('nas').get(pms_platform) if not platform_downloads: logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s." % pms_platform) return {} v_old = helpers.cast_to_int("".join(v.zfill(4) for v in plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')[:4])) v_new = helpers.cast_to_int("".join(v.zfill(4) for v in platform_downloads.get('version', '').split('-')[0].split('.')[:4])) if not v_old: logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s." % plexpy.CONFIG.PMS_VERSION) return {} if not v_new: logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s." % platform_downloads.get('version')) return {} # Get proper download releases = platform_downloads.get('releases', [{}]) release = next((r for r in releases if r['distro'] == plexpy.CONFIG.PMS_UPDATE_DISTRO and r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0]) download_info = {'update_available': v_new > v_old, 'platform': platform_downloads.get('name'), 'release_date': platform_downloads.get('release_date'), 'version': platform_downloads.get('version'), 'requirements': platform_downloads.get('requirements'), 'extra_info': platform_downloads.get('extra_info'), 'changelog_added': platform_downloads.get('items_added'), 'changelog_fixed': platform_downloads.get('items_fixed'), 'label': release.get('label'), 'distro': release.get('distro'), 'distro_build': release.get('build'), 'download_url': release.get('url'), } return download_info
def refresh_users(): logger.info("Tautulli Users :: Requesting users list refresh...") result = plextv.PlexTV().get_full_users_list() server_id = plexpy.CONFIG.PMS_IDENTIFIER if not server_id: logger.error( "Tautulli Users :: No PMS identifier, cannot refresh users. Verify server in settings." ) return if result: monitor_db = database.MonitorDatabase() # Keep track of user_id to update is_active status user_ids = [0] # Local user always considered active for item in result: user_ids.append(helpers.cast_to_int(item['user_id'])) if item.get('shared_libraries'): item['shared_libraries'] = ';'.join(item['shared_libraries']) elif item.get('server_token'): libs = libraries.Libraries().get_sections() item['shared_libraries'] = ';'.join( [str(l['section_id']) for l in libs]) keys_dict = {"user_id": item.pop('user_id')} # Check if we've set a custom avatar if so don't overwrite it. if keys_dict['user_id']: avatar_urls = monitor_db.select( 'SELECT thumb, custom_avatar_url ' 'FROM users WHERE user_id = ?', [keys_dict['user_id']]) if avatar_urls: if not avatar_urls[0]['custom_avatar_url'] or \ avatar_urls[0]['custom_avatar_url'] == avatar_urls[0]['thumb']: item['custom_avatar_url'] = item['thumb'] else: item['custom_avatar_url'] = item['thumb'] # Check if title is the same as the username if item['title'] == item['username']: item['title'] = None monitor_db.upsert('users', key_dict=keys_dict, value_dict=item) query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format( ', '.join(['?'] * len(user_ids))) monitor_db.action(query=query, args=user_ids) logger.info("Tautulli Users :: Users list refreshed.") return True else: logger.warn("Tautulli Users :: Unable to refresh users list.") return False
def settings(self, getConfigVal=None, paramName=None, paramVal=None): if (not paramName is None) and (not paramVal is None): try: if not str(pacvert.CONFIG.__getattr__(paramName)) is paramVal: if type(_CONFIG_DEFINITIONS[paramName][2]) is dict: result = {} if len(paramVal) > 0: firstSplit = str(paramVal).split(",") for elem in firstSplit: secondSplit = elem.split(":") result[secondSplit[0]] = str(secondSplit[1]) pacvert.CONFIG.__setattr__(paramName, result) elif type(_CONFIG_DEFINITIONS[paramName][2]) is list: pacvert.CONFIG.__setattr__(paramName, paramVal.split(",")) elif type(_CONFIG_DEFINITIONS[paramName][2]) is float: pacvert.CONFIG.__setattr__(paramName, cast_to_float(paramVal)) elif type(_CONFIG_DEFINITIONS[paramName][2]) is int: pacvert.CONFIG.__setattr__(paramName, cast_to_int(paramVal)) else: pacvert.CONFIG.__setattr__(paramName, paramVal) pacvert.CONFIG.FIRST_RUN_COMPLETE = True return "OK." except: return "Nope." if not getConfigVal is None: tempConfig = {'General': {}, 'CodecSettings': {}, 'Advanced': {}} for element in _CONFIG_DEFINITIONS: if type(_CONFIG_DEFINITIONS[element][2]) is str: thistype = "str" elif type(_CONFIG_DEFINITIONS[element][2]) is int: thistype = "int" elif type(_CONFIG_DEFINITIONS[element][2]) is float: thistype = "float" elif type(_CONFIG_DEFINITIONS[element][2]) is dict: thistype = "dict" elif type(_CONFIG_DEFINITIONS[element][2]) is bool: thistype = "bool" elif type(_CONFIG_DEFINITIONS[element][2]) is list: thistype = "list" else: thistype = "unknown" tempConfig[_CONFIG_DEFINITIONS[element][1]][element] = {'value': pacvert.CONFIG.__getattr__(element), 'type': thistype} return json.dumps(tempConfig) return "Nope."#serve_template(templatename="settings.html", title="Settings")
def _validate_config(self, config=None, default=None): if config is None: return default new_config = {} for k, v in default.items(): if isinstance(v, int): new_config[k] = helpers.cast_to_int(config.get(k, v)) elif isinstance(v, list): c = config.get(k, v) if not isinstance(c, list): new_config[k] = [c] else: new_config[k] = c else: new_config[k] = config.get(k, v) return new_config
def createThumbs(self): """ Create thumbnails for crop-rectangle analysis """ c = Converter() try: frame_count = helpers.getFrameCountFromMediainfo(self.mediainfo) if frame_count == -1: logger.error("We got a negative frame count from mediainfo.") raise ValueError("We got a negative frame count from mediainfo.") frame_rate = helpers.getFrameRateFromMediaInfo(self.mediainfo) chunks = helpers.genChunks(frame_count,10) filedirectory = helpers.fullpathToPath(self.fullpath) for i in range(10): logger.debug("Creating thumb #"+str(i)+" for "+self.fullpath) c.thumbnail(self.fullpath,helpers.cast_to_int(chunks[i]/frame_rate),filedirectory+'/'+str(i)+'.jpg', None, 5) except Exception as e: logger.error("ffmpeg: " +e.message + " with command: "+ e.cmd)
def process(self): if self.is_item(): global RECENTLY_ADDED_QUEUE rating_key = self.get_rating_key() media_types = { 1: 'movie', 2: 'show', 3: 'season', 4: 'episode', 8: 'artist', 9: 'album', 10: 'track' } identifier = self.timeline.get('identifier') state_type = self.timeline.get('state') media_type = media_types.get(self.timeline.get('type')) section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0)) title = self.timeline.get('title', 'Unknown') metadata_state = self.timeline.get('metadataState') media_state = self.timeline.get('mediaState') queue_size = self.timeline.get('queueSize') # Return if it is not a library event (i.e. DVR EPG event) if identifier != 'com.plexapp.plugins.library': return # Add a new media item to the recently added queue if media_type and section_id > 0 and \ ((state_type == 0 and metadata_state == 'created')): # or \ #(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_UPGRADE and state_type in (1, 5) and \ #media_state == 'analyzing' and queue_size is None)): if media_type in ('episode', 'track'): metadata = self.get_metadata() if metadata: grandparent_title = metadata['grandparent_title'] grandparent_rating_key = int( metadata['grandparent_rating_key']) parent_rating_key = int(metadata['parent_rating_key']) grandparent_set = RECENTLY_ADDED_QUEUE.get( grandparent_rating_key, set()) grandparent_set.add(parent_rating_key) RECENTLY_ADDED_QUEUE[ grandparent_rating_key] = grandparent_set parent_set = RECENTLY_ADDED_QUEUE.get( parent_rating_key, set()) parent_set.add(rating_key) RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set RECENTLY_ADDED_QUEUE[rating_key] = set( [grandparent_rating_key]) logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) added to recently added queue." % (title, str(rating_key), str(grandparent_rating_key))) # Schedule a callback to clear the recently added queue schedule_callback( 'rating_key-{}'.format(grandparent_rating_key), func=clear_recently_added_queue, args=[grandparent_rating_key, grandparent_title], seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) elif media_type in ('season', 'album'): metadata = self.get_metadata() if metadata: parent_title = metadata['parent_title'] parent_rating_key = int(metadata['parent_rating_key']) parent_set = RECENTLY_ADDED_QUEUE.get( parent_rating_key, set()) parent_set.add(rating_key) RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) added to recently added queue." % (title, str(rating_key), str(parent_rating_key))) # Schedule a callback to clear the recently added queue schedule_callback( 'rating_key-{}'.format(parent_rating_key), func=clear_recently_added_queue, args=[parent_rating_key, parent_title], seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) else: queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set()) RECENTLY_ADDED_QUEUE[rating_key] = queue_set logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s) added to recently added queue." % (title, str(rating_key))) # Schedule a callback to clear the recently added queue schedule_callback( 'rating_key-{}'.format(rating_key), func=clear_recently_added_queue, args=[rating_key, title], seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) # A movie, show, or artist is done processing elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \ state_type == 5 and metadata_state is None and queue_size is None and \ rating_key in RECENTLY_ADDED_QUEUE: logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s) done processing metadata." % (title, str(rating_key))) # An item was deleted, make sure it is removed from the queue elif state_type == 9 and metadata_state == 'deleted': if rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[ rating_key]: logger.debug( "Tautulli TimelineHandler :: Library item %s removed from recently added queue." % str(rating_key)) del_keys(rating_key) # Remove the callback if the item is removed schedule_callback('rating_key-{}'.format(rating_key), remove_job=True)
def get_media_info_file_sizes(self, section_id=None, rating_key=None): if not session.allow_session_library(section_id): return False if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid section_id provided." ) return False elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid rating_key provided." ) return False # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return False if library_details['section_type'] == 'photo': return False rows = [] # Import media info cache from json file if rating_key: #logger.debug(u"PlexPy Libraries :: Getting file sizes for rating_key %s." % rating_key) try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: Getting file sizes for section_id %s." % section_id) try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # Get the total file size for each item pms_connect = pmsconnect.PmsConnect() for item in rows: if item['rating_key'] and not item['file_size']: file_size = 0 child_metadata = pms_connect.get_metadata_children_details( rating_key=item['rating_key'], get_children=True, get_media_info=True) metadata_list = child_metadata['metadata'] for child_metadata in metadata_list: file_size += helpers.cast_to_int( child_metadata.get('file_size', 0)) item['file_size'] = file_size # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) if rating_key: #logger.debug(u"PlexPy Libraries :: File sizes updated for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: File sizes updated for section_id %s." % section_id) return True
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None): default_return = { 'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.' } if not session.allow_session_library(section_id): return default_return if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called but invalid section_id provided." ) return default_return elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called but invalid rating_key provided." ) return default_return elif not section_id and not rating_key: logger.warn( u"PlexPy Libraries :: Datatable media info called but no input provided." ) return default_return # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return default_return if not section_type: section_type = library_details['section_type'] # Get play counts from the database monitor_db = database.MonitorDatabase() if plexpy.CONFIG.GROUP_HISTORY_TABLES: count_by = 'reference_id' else: count_by = 'id' if section_type == 'show' or section_type == 'artist': group_by = 'grandparent_rating_key' elif section_type == 'season' or section_type == 'album': group_by = 'parent_rating_key' else: group_by = 'rating_key' try: query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \ 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \ 'FROM session_history ' \ 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \ 'WHERE session_history_metadata.section_id = ? ' \ 'GROUP BY session_history.%s ' % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn( u"PlexPy Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) return default_return watched_list = {} for item in result: watched_list[str(item[group_by])] = { 'last_played': item['last_played'], 'play_count': item['play_count'] } rows = [] # Import media info cache from json file if rating_key: try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # If no cache was imported, get all library children items cached_items = {d['rating_key']: d['file_size'] for d in rows} if not refresh else {} if refresh or not rows: pms_connect = pmsconnect.PmsConnect() if rating_key: library_children = pms_connect.get_library_children_details( rating_key=rating_key, get_media_info=True) elif section_id: library_children = pms_connect.get_library_children_details( section_id=section_id, section_type=section_type, get_media_info=True) if library_children: library_count = library_children['library_count'] children_list = library_children['childern_list'] else: logger.warn( u"PlexPy Libraries :: Unable to get a list of library items." ) return default_return new_rows = [] for item in children_list: cached_file_size = cached_items.get(item['rating_key'], None) file_size = cached_file_size if cached_file_size else item.get( 'file_size', '') row = { 'section_id': library_details['section_id'], 'section_type': library_details['section_type'], 'added_at': item['added_at'], 'media_type': item['media_type'], 'rating_key': item['rating_key'], 'parent_rating_key': item['parent_rating_key'], 'grandparent_rating_key': item['grandparent_rating_key'], 'title': item['title'], 'year': item['year'], 'media_index': item['media_index'], 'parent_media_index': item['parent_media_index'], 'thumb': item['thumb'], 'container': item.get('container', ''), 'bitrate': item.get('bitrate', ''), 'video_codec': item.get('video_codec', ''), 'video_resolution': item.get('video_resolution', ''), 'video_framerate': item.get('video_framerate', ''), 'audio_codec': item.get('audio_codec', ''), 'audio_channels': item.get('audio_channels', ''), 'file_size': file_size } new_rows.append(row) rows = new_rows if not rows: return default_return # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id) # Update the last_played and play_count for item in rows: watched_item = watched_list.get(item['rating_key'], None) if watched_item: item['last_played'] = watched_item['last_played'] item['play_count'] = watched_item['play_count'] else: item['last_played'] = None item['play_count'] = None results = [] # Get datatables JSON data if kwargs.get('json_data'): json_data = helpers.process_json_kwargs( json_kwargs=kwargs.get('json_data')) #print json_data # Search results search_value = json_data['search']['value'].lower() if search_value: searchable_columns = [ d['data'] for d in json_data['columns'] if d['searchable'] ] for row in rows: for k, v in row.iteritems(): if k in searchable_columns and search_value in v.lower(): results.append(row) break else: results = rows filtered_count = len(results) # Sort results results = sorted(results, key=lambda k: k['title']) sort_order = json_data['order'] for order in reversed(sort_order): sort_key = json_data['columns'][int(order['column'])]['data'] reverse = True if order['dir'] == 'desc' else False if rating_key and sort_key == 'title': results = sorted( results, key=lambda k: helpers.cast_to_int(k['media_index']), reverse=reverse) elif sort_key == 'file_size' or sort_key == 'bitrate': results = sorted( results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse) elif sort_key == 'video_resolution': results = sorted( results, key=lambda k: helpers.cast_to_int(k[sort_key].replace( '4k', '2160p').rstrip('p')), reverse=reverse) else: results = sorted(results, key=lambda k: k[sort_key], reverse=reverse) total_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) # Paginate results results = results[json_data['start']:(json_data['start'] + json_data['length'])] filtered_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) dict = { 'recordsFiltered': filtered_count, 'recordsTotal': library_count, 'data': results, 'draw': int(json_data['draw']), 'filtered_file_size': filtered_file_size, 'total_file_size': total_file_size } return dict
def refresh_libraries(): logger.info("Tautulli Libraries :: Requesting libraries list refresh...") server_id = plexpy.CONFIG.PMS_IDENTIFIER if not server_id: logger.error("Tautulli Libraries :: No PMS identifier, cannot refresh libraries. Verify server in settings.") return library_sections = pmsconnect.PmsConnect().get_library_details() if library_sections: monitor_db = database.MonitorDatabase() library_keys = [] new_keys = [] # Keep track of section_id to update is_active status section_ids = [common.LIVE_TV_SECTION_ID] # Live TV library always considered active for section in library_sections: section_ids.append(helpers.cast_to_int(section['section_id'])) section_keys = {'server_id': server_id, 'section_id': section['section_id']} section_values = {'server_id': server_id, 'section_id': section['section_id'], 'section_name': section['section_name'], 'section_type': section['section_type'], 'agent': section['agent'], 'thumb': section['thumb'], 'art': section['art'], 'count': section['count'], 'parent_count': section.get('parent_count', None), 'child_count': section.get('child_count', None), 'is_active': section['is_active'] } result = monitor_db.upsert('library_sections', key_dict=section_keys, value_dict=section_values) library_keys.append(section['section_id']) if result == 'insert': new_keys.append(section['section_id']) add_live_tv_library(refresh=True) query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \ 'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids))) monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids) if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']: plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys) plexpy.CONFIG.write() else: new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys) plexpy.CONFIG.write() logger.info("Tautulli Libraries :: Libraries list refreshed.") return True else: logger.warn("Tautulli Libraries :: Unable to refresh libraries list.") return False
def _get_recently_added(self, media_type=None): from plexpy.notification_handler import format_group_index pms_connect = pmsconnect.PmsConnect() recently_added = [] done = False start = 0 while not done: recent_items = pms_connect.get_recently_added_details( start=str(start), count='10', media_type=media_type) filtered_items = [ i for i in recent_items['recently_added'] if self.start_time < helpers.cast_to_int(i['added_at']) ] if len(filtered_items) < 10: done = True else: start += 10 recently_added.extend(filtered_items) if media_type in ('movie', 'other_video'): movie_list = [] for item in recently_added: # Filter included libraries if item['section_id'] not in self.config['incl_libraries']: continue if self.start_time < helpers.cast_to_int( item['added_at']) < self.end_time: movie_list.append(item) recently_added = movie_list if media_type == 'show': shows_list = [] show_rating_keys = [] for item in recently_added: # Filter included libraries if item['section_id'] not in self.config['incl_libraries']: continue if item['media_type'] == 'show': show_rating_key = item['rating_key'] elif item['media_type'] == 'season': show_rating_key = item['parent_rating_key'] elif item['media_type'] == 'episode': show_rating_key = item['grandparent_rating_key'] if show_rating_key in show_rating_keys: continue show_metadata = pms_connect.get_metadata_details( show_rating_key, media_info=False) children = pms_connect.get_item_children( show_rating_key, get_grandchildren=True) filtered_children = [ i for i in children['children_list'] if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time ] filtered_children.sort( key=lambda x: helpers.cast_to_int(x['parent_media_index'])) if not filtered_children: continue seasons = [] for (index, title), children in groupby( filtered_children, key=lambda x: (x['parent_media_index'], x['parent_title'])): episodes = list(children) num, num00 = format_group_index([ helpers.cast_to_int(d['media_index']) for d in episodes ]) seasons.append({ 'media_index': index, 'title': title, 'episode_range': num00, 'episode_count': len(episodes), 'episode': episodes }) num, num00 = format_group_index( [helpers.cast_to_int(d['media_index']) for d in seasons]) show_metadata['season_range'] = num00 show_metadata['season_count'] = len(seasons) show_metadata['season'] = seasons shows_list.append(show_metadata) show_rating_keys.append(show_rating_key) recently_added = shows_list if media_type == 'artist': artists_list = [] artist_rating_keys = [] for item in recently_added: # Filter included libraries if item['section_id'] not in self.config['incl_libraries']: continue if item['media_type'] == 'artist': artist_rating_key = item['rating_key'] elif item['media_type'] == 'album': artist_rating_key = item['parent_rating_key'] elif item['media_type'] == 'track': artist_rating_key = item['grandparent_rating_key'] if artist_rating_key in artist_rating_keys: continue artist_metadata = pms_connect.get_metadata_details( artist_rating_key, media_info=False) children = pms_connect.get_item_children(artist_rating_key) filtered_children = [ i for i in children['children_list'] if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time ] filtered_children.sort(key=lambda x: x['added_at']) if not filtered_children: continue albums = [] for a in filtered_children: album_metadata = pms_connect.get_metadata_details( a['rating_key'], media_info=False) album_metadata['track_count'] = helpers.cast_to_int( album_metadata['children_count']) albums.append(album_metadata) artist_metadata['album_count'] = len(albums) artist_metadata['album'] = albums artists_list.append(artist_metadata) artist_rating_keys.append(artist_rating_key) recently_added = artists_list return recently_added
def get_media_info_file_sizes(self, section_id=None, rating_key=None): if not session.allow_session_library(section_id): return False if section_id and not str(section_id).isdigit(): logger.warn(u"PlexPy Libraries :: Datatable media info file size called by invalid section_id provided.") return False elif rating_key and not str(rating_key).isdigit(): logger.warn(u"PlexPy Libraries :: Datatable media info file size called by invalid rating_key provided.") return False # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug(u"PlexPy Libraries :: Library section_id %s not found." % section_id) return False if library_details['section_type'] == 'photo': return False rows = [] # Import media info cache from json file if rating_key: #logger.debug(u"PlexPy Libraries :: Getting file sizes for rating_key %s." % rating_key) try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: logger.debug(u"PlexPy Libraries :: Getting file sizes for section_id %s." % section_id) try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # Get the total file size for each item pms_connect = pmsconnect.PmsConnect() for item in rows: if item['rating_key'] and not item['file_size']: file_size = 0 child_metadata = pms_connect.get_metadata_children_details(rating_key=item['rating_key'], get_children=True, get_media_info=True) metadata_list = child_metadata['metadata'] for child_metadata in metadata_list: file_size += helpers.cast_to_int(child_metadata.get('file_size', 0)) item['file_size'] = file_size # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug(u"PlexPy Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug(u"PlexPy Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) if rating_key: #logger.debug(u"PlexPy Libraries :: File sizes updated for rating_key %s." % rating_key) pass elif section_id: logger.debug(u"PlexPy Libraries :: File sizes updated for section_id %s." % section_id) return True
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None): default_return = {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.'} if not session.allow_session_library(section_id): return default_return if section_id and not str(section_id).isdigit(): logger.warn(u"PlexPy Libraries :: Datatable media info called but invalid section_id provided.") return default_return elif rating_key and not str(rating_key).isdigit(): logger.warn(u"PlexPy Libraries :: Datatable media info called but invalid rating_key provided.") return default_return elif not section_id and not rating_key: logger.warn(u"PlexPy Libraries :: Datatable media info called but no input provided.") return default_return # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug(u"PlexPy Libraries :: Library section_id %s not found." % section_id) return default_return if not section_type: section_type = library_details['section_type'] # Get play counts from the database monitor_db = database.MonitorDatabase() if plexpy.CONFIG.GROUP_HISTORY_TABLES: count_by = 'reference_id' else: count_by = 'id' if section_type == 'show' or section_type == 'artist': group_by = 'grandparent_rating_key' elif section_type == 'season' or section_type == 'album': group_by = 'parent_rating_key' else: group_by = 'rating_key' try: query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \ 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \ 'FROM session_history ' \ 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \ 'WHERE session_history_metadata.section_id = ? ' \ 'GROUP BY session_history.%s ' % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) return default_return watched_list = {} for item in result: watched_list[str(item[group_by])] = {'last_played': item['last_played'], 'play_count': item['play_count']} rows = [] # Import media info cache from json file if rating_key: try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # If no cache was imported, get all library children items cached_items = {d['rating_key']: d['file_size'] for d in rows} if refresh or not rows: pms_connect = pmsconnect.PmsConnect() if rating_key: library_children = pms_connect.get_library_children_details(rating_key=rating_key, get_media_info=True) elif section_id: library_children = pms_connect.get_library_children_details(section_id=section_id, section_type=section_type, get_media_info=True) if library_children: library_count = library_children['library_count'] children_list = library_children['childern_list'] else: logger.warn(u"PlexPy Libraries :: Unable to get a list of library items.") return default_return new_rows = [] for item in children_list: cached_file_size = cached_items.get(item['rating_key'], None) file_size = cached_file_size if cached_file_size else item.get('file_size', '') row = {'section_id': library_details['section_id'], 'section_type': library_details['section_type'], 'added_at': item['added_at'], 'media_type': item['media_type'], 'rating_key': item['rating_key'], 'parent_rating_key': item['parent_rating_key'], 'grandparent_rating_key': item['grandparent_rating_key'], 'title': item['title'], 'year': item['year'], 'media_index': item['media_index'], 'parent_media_index': item['parent_media_index'], 'thumb': item['thumb'], 'container': item.get('container', ''), 'bitrate': item.get('bitrate', ''), 'video_codec': item.get('video_codec', ''), 'video_resolution': item.get('video_resolution', ''), 'video_framerate': item.get('video_framerate', ''), 'audio_codec': item.get('audio_codec', ''), 'audio_channels': item.get('audio_channels', ''), 'file_size': file_size } new_rows.append(row) rows = new_rows if not rows: return default_return # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug(u"PlexPy Libraries :: Unable to create cache file for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug(u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id) # Update the last_played and play_count for item in rows: watched_item = watched_list.get(item['rating_key'], None) if watched_item: item['last_played'] = watched_item['last_played'] item['play_count'] = watched_item['play_count'] else: item['last_played'] = None item['play_count'] = None results = [] # Get datatables JSON data if kwargs.get('json_data'): json_data = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data')) #print json_data # Search results search_value = json_data['search']['value'].lower() if search_value: searchable_columns = [d['data'] for d in json_data['columns'] if d['searchable']] for row in rows: for k,v in row.iteritems(): if k in searchable_columns and search_value in v.lower(): results.append(row) break else: results = rows filtered_count = len(results) # Sort results results = sorted(results, key=lambda k: k['title']) sort_order = json_data['order'] for order in reversed(sort_order): sort_key = json_data['columns'][int(order['column'])]['data'] reverse = True if order['dir'] == 'desc' else False if rating_key and sort_key == 'title': results = sorted(results, key=lambda k: helpers.cast_to_int(k['media_index']), reverse=reverse) elif sort_key == 'file_size' or sort_key == 'bitrate': results = sorted(results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse) else: results = sorted(results, key=lambda k: k[sort_key], reverse=reverse) total_file_size = sum([helpers.cast_to_int(d['file_size']) for d in results]) # Paginate results results = results[json_data['start']:(json_data['start'] + json_data['length'])] filtered_file_size = sum([helpers.cast_to_int(d['file_size']) for d in results]) dict = {'recordsFiltered': filtered_count, 'recordsTotal': library_count, 'data': results, 'draw': int(json_data['draw']), 'filtered_file_size': filtered_file_size, 'total_file_size': total_file_size } return dict
def process(self): if self.is_item(): global RECENTLY_ADDED_QUEUE rating_key = self.get_rating_key() parent_rating_key = helpers.cast_to_int( self.timeline.get('parentItemID')) or None grandparent_rating_key = helpers.cast_to_int( self.timeline.get('rootItemID')) or None identifier = self.timeline.get('identifier') state_type = self.timeline.get('state') media_type = common.MEDIA_TYPE_VALUES.get( self.timeline.get('type')) section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0)) title = self.timeline.get('title', 'Unknown') metadata_state = self.timeline.get('metadataState') media_state = self.timeline.get('mediaState') queue_size = self.timeline.get('queueSize') # Return if it is not a library event (i.e. DVR EPG event) if identifier != 'com.plexapp.plugins.library': return # Add a new media item to the recently added queue if media_type and section_id > 0 and state_type == 0 and metadata_state == 'created': if media_type in ('episode', 'track'): grandparent_set = RECENTLY_ADDED_QUEUE.get( grandparent_rating_key, set()) grandparent_set.add(parent_rating_key) RECENTLY_ADDED_QUEUE[ grandparent_rating_key] = grandparent_set parent_set = RECENTLY_ADDED_QUEUE.get( parent_rating_key, set()) parent_set.add(rating_key) RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set RECENTLY_ADDED_QUEUE[rating_key] = {grandparent_rating_key} logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) " "added to recently added queue." % (title, str(rating_key), str(grandparent_rating_key))) # Schedule a callback to clear the recently added queue schedule_callback( 'rating_key-{}'.format(grandparent_rating_key), func=clear_recently_added_queue, args=[grandparent_rating_key], seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) elif media_type in ('season', 'album'): parent_set = RECENTLY_ADDED_QUEUE.get( parent_rating_key, set()) parent_set.add(rating_key) RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) " "added to recently added queue." % (title, str(rating_key), str(parent_rating_key))) # Schedule a callback to clear the recently added queue schedule_callback( 'rating_key-{}'.format(parent_rating_key), func=clear_recently_added_queue, args=[parent_rating_key], seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) elif media_type in ('movie', 'show', 'artist'): queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set()) RECENTLY_ADDED_QUEUE[rating_key] = queue_set logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s) " "added to recently added queue." % (title, str(rating_key))) # Schedule a callback to clear the recently added queue schedule_callback( 'rating_key-{}'.format(rating_key), func=clear_recently_added_queue, args=[rating_key], seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) # A movie, show, or artist is done processing elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \ state_type == 5 and metadata_state is None and queue_size is None and \ rating_key in RECENTLY_ADDED_QUEUE: logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s) " "done processing metadata." % (title, str(rating_key))) # An item was deleted, make sure it is removed from the queue elif state_type == 9 and metadata_state == 'deleted': if rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[ rating_key]: logger.debug("Tautulli TimelineHandler :: Library item %s " "removed from recently added queue." % str(rating_key)) del_keys(rating_key) # Remove the callback if the item is removed schedule_callback('rating_key-{}'.format(rating_key), remove_job=True)
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): section_id = session[ 'section_id'] if not is_import else import_metadata['section_id'] if not is_import: user_data = users.Users() user_details = user_data.get_details(user_id=session['user_id']) library_data = libraries.Libraries() library_details = library_data.get_details(section_id=section_id) # Return false if failed to retrieve user or library details if not user_details or not library_details: return False if session: logging_enabled = False # Reload json from raw stream info if session.get('raw_stream_info'): raw_stream_info = json.loads(session['raw_stream_info']) # Don't overwrite id, session_key, stopped, view_offset raw_stream_info.pop('id', None) raw_stream_info.pop('session_key', None) raw_stream_info.pop('stopped', None) raw_stream_info.pop('view_offset', None) session.update(raw_stream_info) session = defaultdict(str, session) if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = helpers.timestamp() elif session['stopped']: stopped = int(session['stopped']) else: stopped = helpers.timestamp() self.set_session_state(session_key=session['session_key'], state='stopped', stopped=stopped) if not is_import: self.write_continued_session(user_id=session['user_id'], machine_id=session['machine_id'], media_type=session['media_type'], stopped=stopped) if str(session['rating_key']).isdigit( ) and session['media_type'] in ('movie', 'episode', 'track'): logging_enabled = True else: logger.debug( "Tautulli ActivityProcessor :: Session %s ratingKey %s not logged. " "Does not meet logging criteria. Media type is '%s'" % (session['session_key'], session['rating_key'], session['media_type'])) return session['id'] real_play_time = stopped - helpers.cast_to_int( session['started']) - helpers.cast_to_int( session['paused_counter']) if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( "Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs " "which is less than %s seconds, so we're not logging it." % (session['session_key'], session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if not is_import and session['media_type'] == 'track': if real_play_time < 15 and helpers.cast_to_int( session['duration']) >= 30: logging_enabled = False logger.debug( "Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs, " "looks like it was skipped so we're not logging it" % (session['session_key'], session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( "Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " "seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not is_import and not user_details['keep_history']: logging_enabled = False logger.debug( "Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) elif not is_import and not library_details['keep_history']: logging_enabled = False logger.debug( "Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # Fetch metadata first so we can return false if it fails if not is_import: logger.debug( "Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() if session['live']: metadata = pms_connect.get_metadata_details( rating_key=str(session['rating_key']), cache_key=session['session_key'], return_cache=True) else: metadata = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) if not metadata: return False else: media_info = {} if 'media_info' in metadata and len( metadata['media_info']) > 0: media_info = metadata['media_info'][0] else: metadata = import_metadata ## TODO: Fix media info from imports. Temporary media info from import session. media_info = session # logger.debug("Tautulli ActivityProcessor :: Attempting to write sessionKey %s to session_history table..." # % session['session_key']) keys = {'id': None} values = { 'started': session['started'], 'stopped': stopped, 'rating_key': session['rating_key'], 'parent_rating_key': session['parent_rating_key'], 'grandparent_rating_key': session['grandparent_rating_key'], 'media_type': session['media_type'], 'user_id': session['user_id'], 'user': session['user'], 'ip_address': session['ip_address'], 'paused_counter': session['paused_counter'], 'player': session['player'], 'product': session['product'], 'product_version': session['product_version'], 'platform': session['platform'], 'platform_version': session['platform_version'], 'profile': session['profile'], 'machine_id': session['machine_id'], 'bandwidth': session['bandwidth'], 'location': session['location'], 'quality_profile': session['quality_profile'], 'view_offset': session['view_offset'], 'secure': session['secure'], 'relayed': session['relayed'] } # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..." # % session['session_key']) self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values) # Get the last insert row id last_id = self.db.last_insert_id() new_session = prev_session = None prev_progress_percent = media_watched_percent = 0 if session['live']: # Check if we should group the session, select the last guid from the user query = 'SELECT session_history.id, session_history_metadata.guid, session_history.reference_id ' \ 'FROM session_history ' \ 'JOIN session_history_metadata ON session_history.id == session_history_metadata.id ' \ 'WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 ' args = [session['user_id']] result = self.db.select(query=query, args=args) if len(result) > 0: new_session = { 'id': last_id, 'guid': metadata['guid'], 'reference_id': last_id } prev_session = { 'id': result[0]['id'], 'guid': result[0]['guid'], 'reference_id': result[0]['reference_id'] } else: # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, view_offset, reference_id FROM session_history ' \ 'WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id'], session['rating_key']] result = self.db.select(query=query, args=args) if len(result) > 1: new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'view_offset': result[0]['view_offset'], 'reference_id': result[0]['reference_id'] } prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'view_offset': result[1]['view_offset'], 'reference_id': result[1]['reference_id'] } watched_percent = { 'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT, 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT, 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT } prev_progress_percent = helpers.get_percent( prev_session['view_offset'], session['duration']) media_watched_percent = watched_percent.get( session['media_type'], 0) query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If previous session view offset less than watched percent, # and new session view offset is greater, # then set the reference_id to the previous row, # else set the reference_id to the new id if prev_session is None and new_session is None: args = [last_id, last_id] elif prev_progress_percent < media_watched_percent and \ prev_session['view_offset'] <= new_session['view_offset'] or \ session['live'] and prev_session['guid'] == new_session['guid']: args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_media_info table..." # % session['session_key']) keys = {'id': last_id} values = { 'rating_key': session['rating_key'], 'video_decision': session['video_decision'], 'audio_decision': session['audio_decision'], 'transcode_decision': session['transcode_decision'], 'duration': session['duration'], 'container': session['container'], 'bitrate': session['bitrate'], 'width': session['width'], 'height': session['height'], 'video_bit_depth': session['video_bit_depth'], 'video_bitrate': session['video_bitrate'], 'video_codec': session['video_codec'], 'video_codec_level': session['video_codec_level'], 'video_width': session['video_width'], 'video_height': session['video_height'], 'video_resolution': session['video_resolution'], 'video_framerate': session['video_framerate'], 'video_scan_type': session['video_scan_type'], 'video_full_resolution': session['video_full_resolution'], 'video_dynamic_range': session['video_dynamic_range'], 'aspect_ratio': session['aspect_ratio'], 'audio_codec': session['audio_codec'], 'audio_bitrate': session['audio_bitrate'], 'audio_channels': session['audio_channels'], 'audio_language': session['audio_language'], 'audio_language_code': session['audio_language_code'], 'subtitle_codec': session['subtitle_codec'], 'transcode_protocol': session['transcode_protocol'], 'transcode_container': session['transcode_container'], 'transcode_video_codec': session['transcode_video_codec'], 'transcode_audio_codec': session['transcode_audio_codec'], 'transcode_audio_channels': session['transcode_audio_channels'], 'transcode_width': session['transcode_width'], 'transcode_height': session['transcode_height'], 'transcode_hw_requested': session['transcode_hw_requested'], 'transcode_hw_full_pipeline': session['transcode_hw_full_pipeline'], 'transcode_hw_decoding': session['transcode_hw_decoding'], 'transcode_hw_decode': session['transcode_hw_decode'], 'transcode_hw_decode_title': session['transcode_hw_decode_title'], 'transcode_hw_encoding': session['transcode_hw_encoding'], 'transcode_hw_encode': session['transcode_hw_encode'], 'transcode_hw_encode_title': session['transcode_hw_encode_title'], 'stream_container': session['stream_container'], 'stream_container_decision': session['stream_container_decision'], 'stream_bitrate': session['stream_bitrate'], 'stream_video_decision': session['stream_video_decision'], 'stream_video_bitrate': session['stream_video_bitrate'], 'stream_video_codec': session['stream_video_codec'], 'stream_video_codec_level': session['stream_video_codec_level'], 'stream_video_bit_depth': session['stream_video_bit_depth'], 'stream_video_height': session['stream_video_height'], 'stream_video_width': session['stream_video_width'], 'stream_video_resolution': session['stream_video_resolution'], 'stream_video_framerate': session['stream_video_framerate'], 'stream_video_scan_type': session['stream_video_scan_type'], 'stream_video_full_resolution': session['stream_video_full_resolution'], 'stream_video_dynamic_range': session['stream_video_dynamic_range'], 'stream_audio_decision': session['stream_audio_decision'], 'stream_audio_codec': session['stream_audio_codec'], 'stream_audio_bitrate': session['stream_audio_bitrate'], 'stream_audio_channels': session['stream_audio_channels'], 'stream_audio_language': session['stream_audio_language'], 'stream_audio_language_code': session['stream_audio_language_code'], 'stream_subtitle_decision': session['stream_subtitle_decision'], 'stream_subtitle_codec': session['stream_subtitle_codec'], 'stream_subtitle_container': session['stream_subtitle_container'], 'stream_subtitle_forced': session['stream_subtitle_forced'], 'subtitles': session['subtitles'], 'synced_version': session['synced_version'], 'synced_version_profile': session['synced_version_profile'], 'optimized_version': session['optimized_version'], 'optimized_version_profile': session['optimized_version_profile'], 'optimized_version_title': session['optimized_version_title'] } # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..." # % session['session_key']) self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values) # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) labels = ";".join(metadata['labels']) # logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..." # % session['session_key']) keys = {'id': last_id} values = { 'rating_key': session['rating_key'], 'parent_rating_key': session['parent_rating_key'], 'grandparent_rating_key': session['grandparent_rating_key'], 'title': session['title'], 'parent_title': session['parent_title'], 'grandparent_title': session['grandparent_title'], 'original_title': session['original_title'], 'full_title': session['full_title'], 'media_index': metadata['media_index'], 'parent_media_index': metadata['parent_media_index'], 'section_id': metadata['section_id'], 'thumb': metadata['thumb'], 'parent_thumb': metadata['parent_thumb'], 'grandparent_thumb': metadata['grandparent_thumb'], 'art': metadata['art'], 'media_type': session['media_type'], 'year': metadata['year'], 'originally_available_at': metadata['originally_available_at'], 'added_at': metadata['added_at'], 'updated_at': metadata['updated_at'], 'last_viewed_at': metadata['last_viewed_at'], 'content_rating': metadata['content_rating'], 'summary': metadata['summary'], 'tagline': metadata['tagline'], 'rating': metadata['rating'], 'duration': metadata['duration'], 'guid': metadata['guid'], 'directors': directors, 'writers': writers, 'actors': actors, 'genres': genres, 'studio': metadata['studio'], 'labels': labels, 'live': session['live'], 'channel_call_sign': media_info.get('channel_call_sign', ''), 'channel_identifier': media_info.get('channel_identifier', ''), 'channel_thumb': media_info.get('channel_thumb', '') } # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..." # % session['session_key']) self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values) # Return the session row id when the session is successfully written to the database return session['id']