def refresh_users(): logger.info(u"Tautulli Users :: Requesting users list refresh...") result = plextv.PlexTV().get_full_users_list() monitor_db = database.MonitorDatabase() user_data = Users() if result: for item in result: shared_libraries = '' user_tokens = user_data.get_tokens(user_id=item['user_id']) if user_tokens and user_tokens['server_token']: pms_connect = pmsconnect.PmsConnect( token=user_tokens['server_token']) library_details = pms_connect.get_server_children() if library_details: shared_libraries = ';'.join( d['section_id'] for d in library_details['libraries_list']) else: shared_libraries = '' control_value_dict = {"user_id": item['user_id']} new_value_dict = { "username": item['username'], "thumb": item['thumb'], "email": item['email'], "is_admin": item['is_admin'], "is_home_user": item['is_home_user'], "is_allow_sync": item['is_allow_sync'], "is_restricted": item['is_restricted'], "shared_libraries": shared_libraries, "filter_all": item['filter_all'], "filter_movies": item['filter_movies'], "filter_tv": item['filter_tv'], "filter_music": item['filter_music'], "filter_photos": item['filter_photos'] } # Check if we've set a custom avatar if so don't overwrite it. if item['user_id']: avatar_urls = monitor_db.select( 'SELECT thumb, custom_avatar_url ' 'FROM users WHERE user_id = ?', [item['user_id']]) if avatar_urls: if not avatar_urls[0]['custom_avatar_url'] or \ avatar_urls[0]['custom_avatar_url'] == avatar_urls[0]['thumb']: new_value_dict['custom_avatar_url'] = item['thumb'] else: new_value_dict['custom_avatar_url'] = item['thumb'] monitor_db.upsert('users', new_value_dict, control_value_dict) logger.info(u"Tautulli Users :: Users list refreshed.") return True else: logger.warn(u"Tautulli Users :: Unable to refresh users list.") return False
def check_server_access(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() global ext_ping_count # Check for remote access if server_response: mapping_state = server_response['mapping_state'] mapping_error = server_response['mapping_error'] # Check if the port is mapped if not mapping_state == 'mapped': ext_ping_count += 1 logger.warn(u"Tautulli Monitor :: Plex remote access port not mapped, ping attempt %s." \ % str(ext_ping_count)) # Check if the port is open elif mapping_error == 'unreachable': ext_ping_count += 1 logger.warn(u"Tautulli Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \ % str(ext_ping_count)) # Reset external ping counter else: if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD: logger.info( u"Tautulli Monitor :: Plex remote access is back up.") plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup'}) ext_ping_count = 0 if ext_ping_count == plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD: plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown'})
def check_server_updates(): with monitor_lock: logger.info(u"PlexPy Monitor :: Checking for PMS updates...") pms_connect = pmsconnect.PmsConnect() server_identity = pms_connect.get_server_identity() update_status = pms_connect.get_update_staus() if server_identity and update_status: version = server_identity['version'] logger.info(u"PlexPy Monitor :: Current PMS version: %s", version) if update_status['state'] == 'available': update_version = update_status['version'] logger.info( u"PlexPy Monitor :: PMS update available version: %s", update_version) # Check if any notification agents have notifications enabled if any(d['on_pmsupdate'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='pmsupdate')).start() else: logger.info(u"PlexPy Monitor :: No PMS update available.")
def get_plex_downloads(self): logger.debug("Tautulli PlexTV :: Retrieving current server version.") pms_connect = pmsconnect.PmsConnect() pms_connect.set_server_version() update_channel = pms_connect.get_server_update_channel() logger.debug("Tautulli PlexTV :: Plex update channel is %s." % update_channel) plex_downloads = self.get_plextv_downloads(plexpass=(update_channel == 'beta')) try: available_downloads = json.loads(plex_downloads) except Exception as e: logger.warn("Tautulli PlexTV :: Unable to load JSON for get_plex_updates.") return {} # Get the updates for the platform pms_platform = common.PMS_PLATFORM_NAME_OVERRIDES.get(plexpy.CONFIG.PMS_PLATFORM, plexpy.CONFIG.PMS_PLATFORM) platform_downloads = available_downloads.get('computer').get(pms_platform) or \ available_downloads.get('nas').get(pms_platform) if not platform_downloads: logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s." % pms_platform) return {} v_old = helpers.cast_to_int("".join(v.zfill(4) for v in plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')[:4])) v_new = helpers.cast_to_int("".join(v.zfill(4) for v in platform_downloads.get('version', '').split('-')[0].split('.')[:4])) if not v_old: logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s." % plexpy.CONFIG.PMS_VERSION) return {} if not v_new: logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s." % platform_downloads.get('version')) return {} # Get proper download releases = platform_downloads.get('releases', [{}]) release = next((r for r in releases if r['distro'] == plexpy.CONFIG.PMS_UPDATE_DISTRO and r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0]) download_info = {'update_available': v_new > v_old, 'platform': platform_downloads.get('name'), 'release_date': platform_downloads.get('release_date'), 'version': platform_downloads.get('version'), 'requirements': platform_downloads.get('requirements'), 'extra_info': platform_downloads.get('extra_info'), 'changelog_added': platform_downloads.get('items_added'), 'changelog_fixed': platform_downloads.get('items_fixed'), 'label': release.get('label'), 'distro': release.get('distro'), 'distro_build': release.get('build'), 'download_url': release.get('url'), } return download_info
def get_metadata(self): pms_connect = pmsconnect.PmsConnect() metadata = pms_connect.get_metadata_details(self.get_rating_key()) if metadata: return metadata return None
def process(self): # Check if remote access is enabled if not self.remote_access_enabled(): return # Do nothing if remote access is still up and hasn't changed if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP: return pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() if server_response: # Waiting for port mapping if server_response['mapping_state'] == 'waiting': logger.warn( "Tautulli ReachabilityHandler :: Remote access waiting for port mapping." ) elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response[ 'reason']: logger.warn( "Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason']) logger.info( "Tautulli ReachabilityHandler :: Plex remote access is down." ) plexpy.PLEX_REMOTE_ACCESS_UP = False if not ACTIVITY_SCHED.get_job('on_extdown'): logger.debug( "Tautulli ReachabilityHandler :: Schedule remote access down callback in %d seconds.", plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD) schedule_callback( 'on_extdown', func=self.on_down, args=[server_response], seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD) elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response[ 'reason']: logger.info( "Tautulli ReachabilityHandler :: Plex remote access is back up." ) plexpy.PLEX_REMOTE_ACCESS_UP = True if ACTIVITY_SCHED.get_job('on_extdown'): logger.debug( "Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback." ) schedule_callback('on_extdown', remove_job=True) else: self.on_up(server_response) elif plexpy.PLEX_REMOTE_ACCESS_UP is None: plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
def get_live_session(self): pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() for session in session_list['sessions']: if int(session['session_key']) == self.get_session_key(): return session return None
def get_metadata(self, skip_cache=False): cache_key = None if skip_cache else self.get_session_key() pms_connect = pmsconnect.PmsConnect() metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key) if metadata: return metadata return None
def _getMetadata(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata(rating_key, 'dict') if result: self.data = result return result else: self.msg = 'Unable to retrive metadata %s' % rating_key logger.warn('Unable to retrieve data.')
def get_live_session(self): pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() if session_list: for session in session_list['sessions']: if int(session['session_key']) == self.get_session_key(): # Live sessions don't have rating keys in sessions # Get it from the websocket data if not session['rating_key']: session['rating_key'] = self.get_rating_key() return session return None
def check_server_access(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() global ext_ping_count global ext_ping_error # Check for remote access if server_response: log = (server_response['mapping_error'] != ext_ping_error) if server_response['reason']: ext_ping_count += 1 ext_ping_error = server_response['mapping_error'] if log: logger.warn( u"Tautulli Monitor :: Remote access failed: %s, ping attempt %s." % (server_response['reason'], str(ext_ping_count))) # Waiting for port mapping elif server_response['mapping_state'] == 'waiting': ext_ping_error = server_response['mapping_error'] if log: logger.warn( u"Tautulli Monitor :: Remote access waiting for port mapping, ping attempt %s." % str(ext_ping_count)) # Reset external ping counter else: if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD: logger.info( u"Tautulli Monitor :: Plex remote access is back up.") plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_extup', 'remote_access_info': server_response }) ext_ping_count = 0 ext_ping_error = None if ext_ping_count == plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD: logger.info(u"Tautulli Monitor: Plex remote access is down.") plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_extdown', 'remote_access_info': server_response })
def check_server_response(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() global ext_ping_count # Check for remote access if server_response: mapping_state = server_response['mapping_state'] mapping_error = server_response['mapping_error'] # Check if the port is mapped if not mapping_state == 'mapped': ext_ping_count += 1 logger.warn(u"PlexPy Monitor :: Plex remote access port not mapped, ping attempt %s." \ % str(ext_ping_count)) # Check if the port is open elif mapping_error == 'unreachable': ext_ping_count += 1 logger.warn(u"PlexPy Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \ % str(ext_ping_count)) # Reset external ping counter else: if ext_ping_count >= 3: logger.info( u"PlexPy Monitor :: Plex remote access is back up.") # Check if any notification agents have notifications enabled if any(d['on_extup'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='extup')).start() ext_ping_count = 0 if ext_ping_count == 3: # Check if any notification agents have notifications enabled if any(d['on_extdown'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='extdown')).start()
def on_created(rating_key, **kwargs): pms_connect = pmsconnect.PmsConnect() metadata = pms_connect.get_metadata_details(rating_key) logger.debug( "Tautulli TimelineHandler :: Library item '%s' (%s) added to Plex.", metadata['full_title'], str(rating_key)) if metadata: notify = True # now = helpers.timestamp() # # if helpers.cast_to_int(metadata['added_at']) < now - 86400: # Updated more than 24 hours ago # logger.debug("Tautulli TimelineHandler :: Library item %s added more than 24 hours ago. Not notifying." # % str(rating_key)) # notify = False data_factory = datafactory.DataFactory() if 'child_keys' not in kwargs: if data_factory.get_recently_added_item(rating_key): logger.debug( "Tautulli TimelineHandler :: Library item %s added already. Not notifying again." % str(rating_key)) notify = False if notify: data = {'timeline_data': metadata, 'notify_action': 'on_created'} data.update(kwargs) plexpy.NOTIFY_QUEUE.put(data) all_keys = [rating_key] if 'child_keys' in kwargs: all_keys.extend(kwargs['child_keys']) for key in all_keys: data_factory.set_recently_added_item(key) logger.debug("Added %s items to the recently_added database table." % str(len(all_keys))) else: logger.error( "Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
def process(self): # Check if remote access is enabled if not self.remote_access_enabled(): return # Do nothing if remote access is still up and hasn't changed if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP: return pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() if server_response: # Waiting for port mapping if server_response['mapping_state'] == 'waiting': logger.warn( "Tautulli Monitor :: Remote access waiting for port mapping." ) elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response[ 'reason']: logger.warn("Tautulli Monitor :: Remote access failed: %s" % server_response['reason']) logger.info("Tautulli Monitor :: Plex remote access is down.") plexpy.PLEX_REMOTE_ACCESS_UP = False plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_extdown', 'remote_access_info': server_response }) elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response[ 'reason']: logger.info( "Tautulli Monitor :: Plex remote access is back up.") plexpy.PLEX_REMOTE_ACCESS_UP = True plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_extup', 'remote_access_info': server_response }) elif plexpy.PLEX_REMOTE_ACCESS_UP is None: plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
def _getSync(self, machine_id=None, user_id=None, **kwargs): pms_connect = pmsconnect.PmsConnect() server_id = pms_connect.get_server_identity() plex_tv = plextv.PlexTV() if not machine_id: result = plex_tv.get_synced_items( machine_id=server_id['machine_identifier'], user_id=user_id) else: result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id) if result: self.data = result return result else: self.msg = 'Unable to retrieve sync data for user' logger.warn('Unable to retrieve sync data for user.')
def check_active_sessions(ws_request=False): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = database.MonitorDatabase() monitor_process = activity_processor.ActivityProcessor() # logger.debug(u"PlexPy Monitor :: Checking for active streams.") global int_ping_count if session_list: if int_ping_count >= 3: logger.info( u"PlexPy Monitor :: The Plex Media Server is back up.") # Check if any notification agents have notifications enabled if any(d['on_intup'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='intup')).start() int_ping_count = 0 media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams db_streams = monitor_db.select('SELECT * FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': logger.debug( u"PlexPy Monitor :: Session %s has been paused." % stream['session_key']) # Check if any notification agents have notifications enabled if any(d['on_pause'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='pause' )).start() if session['state'] == 'playing' and stream[ 'state'] == 'paused': logger.debug( u"PlexPy Monitor :: Session %s has been resumed." % stream['session_key']) # Check if any notification agents have notifications enabled if any(d['on_resume'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='resume' )).start() if stream['state'] == 'paused' and not ws_request: # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int( stream['paused_counter'] ) + plexpy.CONFIG.MONITORING_INTERVAL monitor_db.action( 'UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [ paused_counter, stream['session_key'], stream['rating_key'] ]) if session[ 'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action( 'UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select( 'SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) if buffer_values[0][ 'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0][ 'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD: logger.info( u"PlexPy Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check if any notification agents have notifications enabled if any(d['on_buffer'] for d in notifiers. available_notification_agents( )): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler. notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexpy.CONFIG.BUFFER_WAIT: logger.info( u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check if any notification agents have notifications enabled if any(d['on_buffer'] for d in notifiers. available_notification_agents( )): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler .notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() logger.debug( u"PlexPy Monitor :: Session %s is buffering. Count is now %s. Last triggered %s." % (stream['session_key'], buffer_values[0]['buffer_count'], buffer_values[0]['buffer_last_triggered']) ) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['view_offset'] and session[ 'duration'] and session[ 'state'] != 'buffering': if helpers.get_percent( session['view_offset'], session['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Check if any notification agents have notifications enabled if any(d['on_watched'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched' )).start() else: # The user has stopped playing a stream if stream['state'] != 'stopped': logger.debug( u"PlexPy Monitor :: Session %s has stopped." % stream['session_key']) # Set the stream stop time stream['stopped'] = int(time.time()) monitor_db.action( 'UPDATE sessions SET stopped = ?, state = ? ' 'WHERE session_key = ? AND rating_key = ?', [ stream['stopped'], 'stopped', stream['session_key'], stream['rating_key'] ]) # Check if the user has reached the offset in the media we defined as the "watched" percent if stream['view_offset'] and stream['duration']: if helpers.get_percent( stream['view_offset'], stream['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Check if any notification agents have notifications enabled if any(d['on_watched'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='watched')).start() # Check if any notification agents have notifications enabled if any(d['on_stop'] for d in notifiers.available_notification_agents()): # Push any notifications - Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='stop')).start() # Write the item history on playback stop success = monitor_process.write_session_history( session=stream) if success: # If session is written to the databaase successfully, remove the session from the session table logger.debug( u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) else: stream['write_attempts'] += 1 if stream[ 'write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS: logger.warn(u"PlexPy Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \ "Will try again on the next pass. Write attempt %s." % (stream['session_key'], stream['rating_key'], str(stream['write_attempts']))) monitor_db.action( 'UPDATE sessions SET write_attempts = ? ' 'WHERE session_key = ? AND rating_key = ?', [ stream['write_attempts'], stream['session_key'], stream['rating_key'] ]) else: logger.warn(u"PlexPy Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \ "Removing session from the database. Write attempt %s." % (stream['session_key'], stream['rating_key'], str(stream['write_attempts']))) logger.debug( u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) # Process the newly received session data for session in media_container: new_session = monitor_process.write_session(session) if new_session: logger.debug( u"PlexPy Monitor :: Session %s has started with ratingKey %s." % (session['session_key'], session['rating_key'])) else: logger.debug(u"PlexPy Monitor :: Unable to read session list.") int_ping_count += 1 logger.warn(u"PlexPy Monitor :: Unable to get an internal response from the server, ping attempt %s." \ % str(int_ping_count)) if int_ping_count == 3: # Check if any notification agents have notifications enabled if any(d['on_intdown'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='intdown')).start()
def remote_access_enabled(self): pms_connect = pmsconnect.PmsConnect() pref = pms_connect.get_server_pref(pref='PublishServerOnPlexOnlineKey') return helpers.bool_true(pref)
def check_recently_added(): with monitor_lock: # add delay to allow for metadata processing delay = plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY time_threshold = int(time.time()) - delay time_interval = plexpy.CONFIG.MONITORING_INTERVAL pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details( count='10') library_data = libraries.Libraries() if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: library_details = library_data.get_details( section_id=item['section_id']) if not library_details['do_notify_created']: continue metadata = [] if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'movie': metadata = pms_connect.get_metadata_details( item['rating_key']) if metadata: metadata = [metadata] else: logger.error(u"Tautulli Monitor :: Unable to retrieve metadata for rating_key %s" \ % str(item['rating_key'])) else: metadata = pms_connect.get_metadata_children_details( item['rating_key']) if not metadata: logger.error(u"Tautulli Monitor :: Unable to retrieve children metadata for rating_key %s" \ % str(item['rating_key'])) if metadata: if not plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED: for item in metadata: library_details = library_data.get_details( section_id=item['section_id']) if 0 < time_threshold - int( item['added_at']) <= time_interval: logger.debug( u"Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key'])) plexpy.NOTIFY_QUEUE.put({ 'timeline_data': item.copy(), 'notify_action': 'on_created' }) else: item = max(metadata, key=lambda x: x['added_at']) if 0 < time_threshold - int( item['added_at']) <= time_interval: if item['media_type'] == 'episode' or item[ 'media_type'] == 'track': metadata = pms_connect.get_metadata_details( item['grandparent_rating_key']) if metadata: item = metadata else: logger.error(u"Tautulli Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \ % str(item['rating_key'])) logger.debug( u"Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key'])) # Check if any notification agents have notifications enabled plexpy.NOTIFY_QUEUE.put({ 'timeline_data': item.copy(), 'notify_action': 'on_created' })
def get_media_info_file_sizes(self, section_id=None, rating_key=None): if not session.allow_session_library(section_id): return False if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid section_id provided." ) return False elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid rating_key provided." ) return False # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return False if library_details['section_type'] == 'photo': return False rows = [] # Import media info cache from json file if rating_key: #logger.debug(u"PlexPy Libraries :: Getting file sizes for rating_key %s." % rating_key) try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: Getting file sizes for section_id %s." % section_id) try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # Get the total file size for each item pms_connect = pmsconnect.PmsConnect() for item in rows: if item['rating_key'] and not item['file_size']: file_size = 0 child_metadata = pms_connect.get_metadata_children_details( rating_key=item['rating_key'], get_children=True, get_media_info=True) metadata_list = child_metadata['metadata'] for child_metadata in metadata_list: file_size += helpers.cast_to_int( child_metadata.get('file_size', 0)) item['file_size'] = file_size # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) if rating_key: #logger.debug(u"PlexPy Libraries :: File sizes updated for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: File sizes updated for section_id %s." % section_id) return True
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None): default_return = { 'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.' } if not session.allow_session_library(section_id): return default_return if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called but invalid section_id provided." ) return default_return elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called but invalid rating_key provided." ) return default_return elif not section_id and not rating_key: logger.warn( u"PlexPy Libraries :: Datatable media info called but no input provided." ) return default_return # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return default_return if not section_type: section_type = library_details['section_type'] # Get play counts from the database monitor_db = database.MonitorDatabase() if plexpy.CONFIG.GROUP_HISTORY_TABLES: count_by = 'reference_id' else: count_by = 'id' if section_type == 'show' or section_type == 'artist': group_by = 'grandparent_rating_key' elif section_type == 'season' or section_type == 'album': group_by = 'parent_rating_key' else: group_by = 'rating_key' try: query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \ 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \ 'FROM session_history ' \ 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \ 'WHERE session_history_metadata.section_id = ? ' \ 'GROUP BY session_history.%s ' % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn( u"PlexPy Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) return default_return watched_list = {} for item in result: watched_list[str(item[group_by])] = { 'last_played': item['last_played'], 'play_count': item['play_count'] } rows = [] # Import media info cache from json file if rating_key: try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # If no cache was imported, get all library children items cached_items = {d['rating_key']: d['file_size'] for d in rows} if not refresh else {} if refresh or not rows: pms_connect = pmsconnect.PmsConnect() if rating_key: library_children = pms_connect.get_library_children_details( rating_key=rating_key, get_media_info=True) elif section_id: library_children = pms_connect.get_library_children_details( section_id=section_id, section_type=section_type, get_media_info=True) if library_children: library_count = library_children['library_count'] children_list = library_children['childern_list'] else: logger.warn( u"PlexPy Libraries :: Unable to get a list of library items." ) return default_return new_rows = [] for item in children_list: cached_file_size = cached_items.get(item['rating_key'], None) file_size = cached_file_size if cached_file_size else item.get( 'file_size', '') row = { 'section_id': library_details['section_id'], 'section_type': library_details['section_type'], 'added_at': item['added_at'], 'media_type': item['media_type'], 'rating_key': item['rating_key'], 'parent_rating_key': item['parent_rating_key'], 'grandparent_rating_key': item['grandparent_rating_key'], 'title': item['title'], 'year': item['year'], 'media_index': item['media_index'], 'parent_media_index': item['parent_media_index'], 'thumb': item['thumb'], 'container': item.get('container', ''), 'bitrate': item.get('bitrate', ''), 'video_codec': item.get('video_codec', ''), 'video_resolution': item.get('video_resolution', ''), 'video_framerate': item.get('video_framerate', ''), 'audio_codec': item.get('audio_codec', ''), 'audio_channels': item.get('audio_channels', ''), 'file_size': file_size } new_rows.append(row) rows = new_rows if not rows: return default_return # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id) # Update the last_played and play_count for item in rows: watched_item = watched_list.get(item['rating_key'], None) if watched_item: item['last_played'] = watched_item['last_played'] item['play_count'] = watched_item['play_count'] else: item['last_played'] = None item['play_count'] = None results = [] # Get datatables JSON data if kwargs.get('json_data'): json_data = helpers.process_json_kwargs( json_kwargs=kwargs.get('json_data')) #print json_data # Search results search_value = json_data['search']['value'].lower() if search_value: searchable_columns = [ d['data'] for d in json_data['columns'] if d['searchable'] ] for row in rows: for k, v in row.iteritems(): if k in searchable_columns and search_value in v.lower(): results.append(row) break else: results = rows filtered_count = len(results) # Sort results results = sorted(results, key=lambda k: k['title']) sort_order = json_data['order'] for order in reversed(sort_order): sort_key = json_data['columns'][int(order['column'])]['data'] reverse = True if order['dir'] == 'desc' else False if rating_key and sort_key == 'title': results = sorted( results, key=lambda k: helpers.cast_to_int(k['media_index']), reverse=reverse) elif sort_key == 'file_size' or sort_key == 'bitrate': results = sorted( results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse) elif sort_key == 'video_resolution': results = sorted( results, key=lambda k: helpers.cast_to_int(k[sort_key].replace( '4k', '2160p').rstrip('p')), reverse=reverse) else: results = sorted(results, key=lambda k: k[sort_key], reverse=reverse) total_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) # Paginate results results = results[json_data['start']:(json_data['start'] + json_data['length'])] filtered_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) dict = { 'recordsFiltered': filtered_count, 'recordsTotal': library_count, 'data': results, 'draw': int(json_data['draw']), 'filtered_file_size': filtered_file_size, 'total_file_size': total_file_size } return dict
def update_section_ids(): plexpy.CONFIG.UPDATE_SECTION_IDS = -1 monitor_db = database.MonitorDatabase() try: query = 'SELECT id, rating_key, grandparent_rating_key, media_type ' \ 'FROM session_history_metadata WHERE section_id IS NULL' history_results = monitor_db.select(query=query) query = 'SELECT section_id, section_type FROM library_sections' library_results = monitor_db.select(query=query) except Exception as e: logger.warn( u"PlexPy Libraries :: Unable to execute database query for update_section_ids: %s." % e) logger.warn( u"PlexPy Libraries :: Unable to update section_id's in database.") plexpy.CONFIG.UPDATE_SECTION_IDS = 1 plexpy.CONFIG.write() return None if not history_results: plexpy.CONFIG.UPDATE_SECTION_IDS = 0 plexpy.CONFIG.write() return None logger.debug(u"PlexPy Libraries :: Updating section_id's in database.") # Get rating_key: section_id mapping pairs key_mappings = {} pms_connect = pmsconnect.PmsConnect() for library in library_results: section_id = library['section_id'] section_type = library['section_type'] if section_type != 'photo': library_children = pms_connect.get_library_children_details( section_id=section_id, section_type=section_type) if library_children: children_list = library_children['childern_list'] key_mappings.update({ child['rating_key']: child['section_id'] for child in children_list }) else: logger.warn( u"PlexPy Libraries :: Unable to get a list of library items for section_id %s." % section_id) error_keys = set() for item in history_results: rating_key = item['grandparent_rating_key'] if item[ 'media_type'] != 'movie' else item['rating_key'] section_id = key_mappings.get(str(rating_key), None) if section_id: try: section_keys = {'id': item['id']} section_values = {'section_id': section_id} monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values) except: error_keys.add(item['rating_key']) else: error_keys.add(item['rating_key']) if error_keys: logger.info( u"PlexPy Libraries :: Updated all section_id's in database except for rating_keys: %s." % ', '.join(str(key) for key in error_keys)) else: logger.info( u"PlexPy Libraries :: Updated all section_id's in database.") plexpy.CONFIG.UPDATE_SECTION_IDS = 0 plexpy.CONFIG.write() return True
def update_labels(): plexpy.CONFIG.UPDATE_LABELS = -1 monitor_db = database.MonitorDatabase() try: query = 'SELECT section_id, section_type FROM library_sections' library_results = monitor_db.select(query=query) except Exception as e: logger.warn( u"PlexPy Libraries :: Unable to execute database query for update_labels: %s." % e) logger.warn( u"PlexPy Libraries :: Unable to update labels in database.") plexpy.CONFIG.UPDATE_LABELS = 1 plexpy.CONFIG.write() return None if not library_results: plexpy.CONFIG.UPDATE_LABELS = 0 plexpy.CONFIG.write() return None logger.debug(u"PlexPy Libraries :: Updating labels in database.") # Get rating_key: section_id mapping pairs key_mappings = {} pms_connect = pmsconnect.PmsConnect() for library in library_results: section_id = library['section_id'] section_type = library['section_type'] if section_type != 'photo': library_children = [] library_labels = pms_connect.get_library_label_details( section_id=section_id) if library_labels: for label in library_labels: library_children = pms_connect.get_library_children_details( section_id=section_id, section_type=section_type, label_key=label['label_key']) if library_children: children_list = library_children['childern_list'] # rating_key_list = [child['rating_key'] for child in children_list] for rating_key in [ child['rating_key'] for child in children_list ]: if key_mappings.get(rating_key): key_mappings[rating_key].append( label['label_title']) else: key_mappings[rating_key] = [ label['label_title'] ] else: logger.warn( u"PlexPy Libraries :: Unable to get a list of library items for section_id %s." % section_id) error_keys = set() for rating_key, labels in key_mappings.iteritems(): try: labels = ';'.join(labels) monitor_db.action( 'UPDATE session_history_metadata SET labels = ? ' 'WHERE rating_key = ? OR parent_rating_key = ? OR grandparent_rating_key = ? ', args=[labels, rating_key, rating_key, rating_key]) except: error_keys.add(rating_key) if error_keys: logger.info( u"PlexPy Libraries :: Updated all labels in database except for rating_keys: %s." % ', '.join(str(key) for key in error_keys)) else: logger.info(u"PlexPy Libraries :: Updated all labels in database.") plexpy.CONFIG.UPDATE_LABELS = 0 plexpy.CONFIG.write() return True
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): section_id = session[ 'section_id'] if not is_import else import_metadata['section_id'] if not is_import: user_data = users.Users() user_details = user_data.get_details(user_id=session['user_id']) library_data = libraries.Libraries() library_details = library_data.get_details(section_id=section_id) # Return false if failed to retrieve user or library details if not user_details or not library_details: return False if session: logging_enabled = False # Reload json from raw stream info if session.get('raw_stream_info'): raw_stream_info = json.loads(session['raw_stream_info']) # Don't overwrite id, session_key, stopped raw_stream_info.pop('id', None) raw_stream_info.pop('session_key', None) raw_stream_info.pop('stopped', None) session.update(raw_stream_info) session = defaultdict(str, session) if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) elif session['stopped']: stopped = int(session['stopped']) else: stopped = int(time.time()) self.set_session_state(session_key=session['session_key'], state='stopped', stopped=stopped) if str(session['rating_key']).isdigit( ) and session['media_type'] in ('movie', 'episode', 'track'): logging_enabled = True else: logger.debug( u"Tautulli ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) return session['id'] if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if not is_import and session['media_type'] == 'track': if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not is_import and not user_details['keep_history']: logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) elif not is_import and not library_details['keep_history']: logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # Fetch metadata first so we can return false if it fails if not is_import: logger.debug( u"Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() metadata = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) if not metadata: return False else: media_info = {} if 'media_info' in metadata and len( metadata['media_info']) > 0: media_info = metadata['media_info'][0] else: metadata = import_metadata ## TODO: Fix media info from imports. Temporary media info from import session. media_info = session # logger.debug(u"Tautulli ActivityProcessor :: Attempting to write sessionKey %s to session_history table..." # % session['session_key']) keys = {'id': None} values = { 'started': session['started'], 'stopped': stopped, 'rating_key': session['rating_key'], 'parent_rating_key': session['parent_rating_key'], 'grandparent_rating_key': session['grandparent_rating_key'], 'media_type': session['media_type'], 'user_id': session['user_id'], 'user': session['user'], 'ip_address': session['ip_address'], 'paused_counter': session['paused_counter'], 'player': session['player'], 'product': session['product'], 'product_version': session['product_version'], 'platform': session['platform'], 'platform_version': session['platform_version'], 'profile': session['profile'], 'machine_id': session['machine_id'], 'bandwidth': session['bandwidth'], 'location': session['location'], 'quality_profile': session['quality_profile'], 'view_offset': session['view_offset'] } # logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..." # % session['session_key']) self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, view_offset, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = prev_session = None # Get the last insert row id last_id = self.db.last_insert_id() if len(result) > 1: new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'view_offset': result[0]['view_offset'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id'] } prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'view_offset': result[1]['view_offset'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id'] } query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if prev_session is None and new_session is None: args = [last_id, last_id] elif prev_session['rating_key'] == new_session[ 'rating_key'] and prev_session[ 'view_offset'] <= new_session['view_offset']: args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_media_info table..." # % session['session_key']) keys = {'id': last_id} values = { 'rating_key': session['rating_key'], 'video_decision': session['video_decision'], 'audio_decision': session['audio_decision'], 'transcode_decision': session['transcode_decision'], 'duration': session['duration'], 'container': session['container'], 'bitrate': session['bitrate'], 'width': session['width'], 'height': session['height'], 'video_bit_depth': session['video_bit_depth'], 'video_bitrate': session['video_bitrate'], 'video_codec': session['video_codec'], 'video_codec_level': session['video_codec_level'], 'video_width': session['video_width'], 'video_height': session['video_height'], 'video_resolution': session['video_resolution'], 'video_framerate': session['video_framerate'], 'aspect_ratio': session['aspect_ratio'], 'audio_codec': session['audio_codec'], 'audio_bitrate': session['audio_bitrate'], 'audio_channels': session['audio_channels'], 'subtitle_codec': session['subtitle_codec'], 'transcode_protocol': session['transcode_protocol'], 'transcode_container': session['transcode_container'], 'transcode_video_codec': session['transcode_video_codec'], 'transcode_audio_codec': session['transcode_audio_codec'], 'transcode_audio_channels': session['transcode_audio_channels'], 'transcode_width': session['transcode_width'], 'transcode_height': session['transcode_height'], 'transcode_hw_requested': session['transcode_hw_requested'], 'transcode_hw_full_pipeline': session['transcode_hw_full_pipeline'], 'transcode_hw_decoding': session['transcode_hw_decoding'], 'transcode_hw_decode': session['transcode_hw_decode'], 'transcode_hw_decode_title': session['transcode_hw_decode_title'], 'transcode_hw_encoding': session['transcode_hw_encoding'], 'transcode_hw_encode': session['transcode_hw_encode'], 'transcode_hw_encode_title': session['transcode_hw_encode_title'], 'stream_container': session['stream_container'], 'stream_container_decision': session['stream_container_decision'], 'stream_bitrate': session['stream_bitrate'], 'stream_video_decision': session['stream_video_decision'], 'stream_video_bitrate': session['stream_video_bitrate'], 'stream_video_codec': session['stream_video_codec'], 'stream_video_codec_level': session['stream_video_codec_level'], 'stream_video_bit_depth': session['stream_video_bit_depth'], 'stream_video_height': session['stream_video_height'], 'stream_video_width': session['stream_video_width'], 'stream_video_resolution': session['stream_video_resolution'], 'stream_video_framerate': session['stream_video_framerate'], 'stream_audio_decision': session['stream_audio_decision'], 'stream_audio_codec': session['stream_audio_codec'], 'stream_audio_bitrate': session['stream_audio_bitrate'], 'stream_audio_channels': session['stream_audio_channels'], 'stream_subtitle_decision': session['stream_subtitle_decision'], 'stream_subtitle_codec': session['stream_subtitle_codec'], 'stream_subtitle_container': session['stream_subtitle_container'], 'stream_subtitle_forced': session['stream_subtitle_forced'], 'subtitles': session['subtitles'], 'synced_version': session['synced_version'], 'synced_version_profile': session['synced_version_profile'], 'optimized_version': session['optimized_version'], 'optimized_version_profile': session['optimized_version_profile'], 'optimized_version_title': session['optimized_version_title'] } # logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..." # % session['session_key']) self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values) # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) labels = ";".join(metadata['labels']) # logger.debug(u"Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..." # % session['session_key']) keys = {'id': last_id} values = { 'rating_key': session['rating_key'], 'parent_rating_key': session['parent_rating_key'], 'grandparent_rating_key': session['grandparent_rating_key'], 'title': session['title'], 'parent_title': session['parent_title'], 'grandparent_title': session['grandparent_title'], 'full_title': session['full_title'], 'media_index': metadata['media_index'], 'parent_media_index': metadata['parent_media_index'], 'section_id': metadata['section_id'], 'thumb': metadata['thumb'], 'parent_thumb': metadata['parent_thumb'], 'grandparent_thumb': metadata['grandparent_thumb'], 'art': metadata['art'], 'media_type': session['media_type'], 'year': metadata['year'], 'originally_available_at': metadata['originally_available_at'], 'added_at': metadata['added_at'], 'updated_at': metadata['updated_at'], 'last_viewed_at': metadata['last_viewed_at'], 'content_rating': metadata['content_rating'], 'summary': metadata['summary'], 'tagline': metadata['tagline'], 'rating': metadata['rating'], 'duration': metadata['duration'], 'guid': metadata['guid'], 'directors': directors, 'writers': writers, 'actors': actors, 'genres': genres, 'studio': metadata['studio'], 'labels': labels } # logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..." # % session['session_key']) self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values) # Return the session row id when the session is successfully written to the database return session['id']
def refresh_libraries(): logger.info("Tautulli Libraries :: Requesting libraries list refresh...") server_id = plexpy.CONFIG.PMS_IDENTIFIER if not server_id: logger.error("Tautulli Libraries :: No PMS identifier, cannot refresh libraries. Verify server in settings.") return library_sections = pmsconnect.PmsConnect().get_library_details() if library_sections: monitor_db = database.MonitorDatabase() library_keys = [] new_keys = [] # Keep track of section_id to update is_active status section_ids = [common.LIVE_TV_SECTION_ID] # Live TV library always considered active for section in library_sections: section_ids.append(helpers.cast_to_int(section['section_id'])) section_keys = {'server_id': server_id, 'section_id': section['section_id']} section_values = {'server_id': server_id, 'section_id': section['section_id'], 'section_name': section['section_name'], 'section_type': section['section_type'], 'agent': section['agent'], 'thumb': section['thumb'], 'art': section['art'], 'count': section['count'], 'parent_count': section.get('parent_count', None), 'child_count': section.get('child_count', None), 'is_active': section['is_active'] } result = monitor_db.upsert('library_sections', key_dict=section_keys, value_dict=section_values) library_keys.append(section['section_id']) if result == 'insert': new_keys.append(section['section_id']) add_live_tv_library(refresh=True) query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \ 'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids))) monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids) if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']: plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys) plexpy.CONFIG.write() else: new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys) plexpy.CONFIG.write() logger.info("Tautulli Libraries :: Libraries list refreshed.") return True else: logger.warn("Tautulli Libraries :: Unable to refresh libraries list.") return False
def _get_recently_added(self, media_type=None): from plexpy.notification_handler import format_group_index pms_connect = pmsconnect.PmsConnect() recently_added = [] done = False start = 0 while not done: recent_items = pms_connect.get_recently_added_details( start=str(start), count='10', media_type=media_type) filtered_items = [ i for i in recent_items['recently_added'] if self.start_time < helpers.cast_to_int(i['added_at']) ] if len(filtered_items) < 10: done = True else: start += 10 recently_added.extend(filtered_items) if media_type in ('movie', 'other_video'): movie_list = [] for item in recently_added: # Filter included libraries if item['section_id'] not in self.config['incl_libraries']: continue if self.start_time < helpers.cast_to_int( item['added_at']) < self.end_time: movie_list.append(item) recently_added = movie_list if media_type == 'show': shows_list = [] show_rating_keys = [] for item in recently_added: # Filter included libraries if item['section_id'] not in self.config['incl_libraries']: continue if item['media_type'] == 'show': show_rating_key = item['rating_key'] elif item['media_type'] == 'season': show_rating_key = item['parent_rating_key'] elif item['media_type'] == 'episode': show_rating_key = item['grandparent_rating_key'] if show_rating_key in show_rating_keys: continue show_metadata = pms_connect.get_metadata_details( show_rating_key, media_info=False) children = pms_connect.get_item_children( show_rating_key, get_grandchildren=True) filtered_children = [ i for i in children['children_list'] if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time ] filtered_children.sort( key=lambda x: helpers.cast_to_int(x['parent_media_index'])) if not filtered_children: continue seasons = [] for (index, title), children in groupby( filtered_children, key=lambda x: (x['parent_media_index'], x['parent_title'])): episodes = list(children) num, num00 = format_group_index([ helpers.cast_to_int(d['media_index']) for d in episodes ]) seasons.append({ 'media_index': index, 'title': title, 'episode_range': num00, 'episode_count': len(episodes), 'episode': episodes }) num, num00 = format_group_index( [helpers.cast_to_int(d['media_index']) for d in seasons]) show_metadata['season_range'] = num00 show_metadata['season_count'] = len(seasons) show_metadata['season'] = seasons shows_list.append(show_metadata) show_rating_keys.append(show_rating_key) recently_added = shows_list if media_type == 'artist': artists_list = [] artist_rating_keys = [] for item in recently_added: # Filter included libraries if item['section_id'] not in self.config['incl_libraries']: continue if item['media_type'] == 'artist': artist_rating_key = item['rating_key'] elif item['media_type'] == 'album': artist_rating_key = item['parent_rating_key'] elif item['media_type'] == 'track': artist_rating_key = item['grandparent_rating_key'] if artist_rating_key in artist_rating_keys: continue artist_metadata = pms_connect.get_metadata_details( artist_rating_key, media_info=False) children = pms_connect.get_item_children(artist_rating_key) filtered_children = [ i for i in children['children_list'] if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time ] filtered_children.sort(key=lambda x: x['added_at']) if not filtered_children: continue albums = [] for a in filtered_children: album_metadata = pms_connect.get_metadata_details( a['rating_key'], media_info=False) album_metadata['track_count'] = helpers.cast_to_int( album_metadata['children_count']) albums.append(album_metadata) artist_metadata['album_count'] = len(albums) artist_metadata['album'] = albums artists_list.append(artist_metadata) artist_rating_keys.append(artist_rating_key) recently_added = artists_list return recently_added
def check_active_sessions(ws_request=False): with monitor_lock: monitor_db = database.MonitorDatabase() monitor_process = activity_processor.ActivityProcessor() db_streams = monitor_process.get_sessions() # Clear the metadata cache for stream in db_streams: activity_handler.delete_metadata_cache(stream['session_key']) pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() logger.debug(u"Tautulli Monitor :: Checking for active streams.") if session_list: media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': logger.debug( u"Tautulli Monitor :: Session %s paused." % stream['session_key']) plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_pause' }) if session['state'] == 'playing' and stream[ 'state'] == 'paused': logger.debug( u"Tautulli Monitor :: Session %s resumed." % stream['session_key']) plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_resume' }) if stream['state'] == 'paused' and not ws_request: # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int( stream['paused_counter'] ) + plexpy.CONFIG.MONITORING_INTERVAL monitor_db.action( 'UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [ paused_counter, stream['session_key'], stream['rating_key'] ]) if session[ 'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action( 'UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select( 'SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) if buffer_values[0][ 'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0][ 'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD: logger.info( u"Tautulli Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_buffer' }) else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexpy.CONFIG.BUFFER_WAIT: logger.info( u"Tautulli Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_buffer' }) logger.debug( u"Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s." % (stream['session_key'], buffer_values[0]['buffer_count'], buffer_values[0]['buffer_last_triggered']) ) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['state'] != 'buffering': progress_percent = helpers.get_percent( session['view_offset'], session['duration']) notify_states = notification_handler.get_notify_state( session=session) if (session['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or session['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or session['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \ and not any(d['notify_action'] == 'on_watched' for d in notify_states): plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_watched' }) else: # The user has stopped playing a stream if stream['state'] != 'stopped': logger.debug( u"Tautulli Monitor :: Session %s stopped." % stream['session_key']) if not stream['stopped']: # Set the stream stop time stream['stopped'] = int(time.time()) monitor_db.action( 'UPDATE sessions SET stopped = ?, state = ? ' 'WHERE session_key = ? AND rating_key = ?', [ stream['stopped'], 'stopped', stream['session_key'], stream['rating_key'] ]) progress_percent = helpers.get_percent( stream['view_offset'], stream['duration']) notify_states = notification_handler.get_notify_state( session=stream) if (stream['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or stream['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or stream['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \ and not any(d['notify_action'] == 'on_watched' for d in notify_states): plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_watched' }) plexpy.NOTIFY_QUEUE.put({ 'stream_data': stream.copy(), 'notify_action': 'on_stop' }) # Write the item history on playback stop row_id = monitor_process.write_session_history( session=stream) if row_id: # If session is written to the databaase successfully, remove the session from the session table logger.debug( u"Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_process.delete_session(row_id=row_id) else: stream['write_attempts'] += 1 if stream[ 'write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS: logger.warn(u"Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \ "Will try again on the next pass. Write attempt %s." % (stream['session_key'], stream['rating_key'], str(stream['write_attempts']))) monitor_process.increment_write_attempts( session_key=stream['session_key']) else: logger.warn(u"Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \ "Removing session from the database. Write attempt %s." % (stream['session_key'], stream['rating_key'], str(stream['write_attempts']))) logger.debug( u"Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_process.delete_session( session_key=stream['session_key']) # Process the newly received session data for session in media_container: new_session = monitor_process.write_session(session) if new_session: logger.debug( u"Tautulli Monitor :: Session %s started by user %s with ratingKey %s." % (session['session_key'], session['user_id'], session['rating_key'])) else: logger.debug(u"Tautulli Monitor :: Unable to read session list.")
def check_recently_added(): with monitor_lock: # add delay to allow for metadata processing delay = plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY time_threshold = int(time.time()) - delay time_interval = plexpy.CONFIG.MONITORING_INTERVAL pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details( count='10') library_data = libraries.Libraries() if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: library_details = library_data.get_details( section_id=item['section_id']) if not library_details['do_notify_created']: continue metadata = [] if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'movie': metadata_list = pms_connect.get_metadata_details( item['rating_key']) if metadata_list: metadata = [metadata_list['metadata']] else: logger.error(u"PlexPy Monitor :: Unable to retrieve metadata for rating_key %s" \ % str(item['rating_key'])) else: metadata_list = pms_connect.get_metadata_children_details( item['rating_key']) if metadata_list: metadata = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve children metadata for rating_key %s" \ % str(item['rating_key'])) if metadata: if not plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT: for item in metadata: library_details = library_data.get_details( section_id=item['section_id']) if 0 < time_threshold - int( item['added_at']) <= time_interval: logger.debug( u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Check if any notification agents have notifications enabled if any(d['on_created'] for d in notifiers. available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler. notify_timeline, kwargs=dict( timeline_data=item, notify_action='created')).start() else: item = max(metadata, key=lambda x: x['added_at']) if 0 < time_threshold - int( item['added_at']) <= time_interval: if item['media_type'] == 'episode' or item[ 'media_type'] == 'track': metadata_list = pms_connect.get_metadata_details( item['grandparent_rating_key']) if metadata_list: item = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \ % str(item['rating_key'])) logger.debug( u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Check if any notification agents have notifications enabled if any(d['on_created'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler. notify_timeline, kwargs=dict( timeline_data=item, notify_action='created')).start()
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): section_id = session[ 'section_id'] if not is_import else import_metadata['section_id'] if not is_import: user_data = users.Users() user_details = user_data.get_details(user_id=session['user_id']) library_data = libraries.Libraries() library_details = library_data.get_details(section_id=section_id) # Return false if failed to retrieve user or library details if not user_details or not library_details: return False if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) elif session['stopped']: stopped = int(session['stopped']) else: stopped = int(time.time()) self.set_session_state(session_key=session['session_key'], state='stopped', stopped=stopped) if plexpy.CONFIG.MOVIE_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'movie': logging_enabled = True elif plexpy.CONFIG.TV_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'episode': logging_enabled = True elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"PlexPy ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if not is_import and session['media_type'] == 'track': if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not is_import and not user_details['keep_history']: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) elif not is_import and not library_details['keep_history']: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # Fetch metadata first so we can return false if it fails if not is_import: logger.debug( u"PlexPy ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) if result: metadata = result['metadata'] else: return False else: metadata = import_metadata # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history transaction...") self.db.action(query=query, args=args) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, view_offset, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = prev_session = last_id = None if len(result) > 1: new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'view_offset': result[0]['view_offset'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id'] } prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'view_offset': result[1]['view_offset'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id'] } else: # Get the last insert row id result = self.db.select( query='SELECT last_insert_rowid() AS last_id') last_id = result[0]['last_id'] if result else None query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if prev_session == new_session == None: args = [last_id, last_id] elif prev_session['rating_key'] == new_session[ 'rating_key'] and prev_session[ 'view_offset'] <= new_session['view_offset']: args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"PlexPy ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # Generate a combined transcode decision value if session['video_decision'] == 'transcode' or session[ 'audio_decision'] == 'transcode': transcode_decision = 'transcode' elif session['video_decision'] == 'copy' or session[ 'audio_decision'] == 'copy': transcode_decision = 'copy' else: transcode_decision = 'direct play' # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height, transcode_decision) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'], transcode_decision ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) labels = ";".join(metadata['labels']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, section_id, thumb, parent_thumb, grandparent_thumb, art, media_type, ' \ 'year, originally_available_at, added_at, updated_at, last_viewed_at, content_rating, ' \ 'summary, tagline, rating, duration, guid, directors, writers, actors, genres, studio, labels) ' \ 'VALUES (last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['media_index'], metadata['parent_media_index'], metadata['section_id'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'], labels ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args) # Return true when the session is successfully written to the database return True