def check_recently_added(): with monitor_lock: # add delay to allow for metadata processing delay = plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY time_threshold = int(time.time()) - delay time_interval = plexpy.CONFIG.MONITORING_INTERVAL pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details(count='10') if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: metadata = [] if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'movie': metadata_list = pms_connect.get_metadata_details(item['rating_key']) if metadata_list: metadata = [metadata_list['metadata']] else: logger.error(u"PlexPy Monitor :: Unable to retrieve metadata for rating_key %s" \ % str(item['rating_key'])) else: metadata_list = pms_connect.get_metadata_children_details(item['rating_key']) if metadata_list: metadata = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve children metadata for rating_key %s" \ % str(item['rating_key'])) if metadata: if not plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT: for item in metadata: if 0 < time_threshold - int(item['added_at']) <= time_interval: logger.debug(u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start() else: item = max(metadata, key=lambda x:x['added_at']) if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'episode' or item['media_type'] == 'track': metadata_list = pms_connect.get_metadata_details(item['grandparent_rating_key']) if metadata_list: item = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \ % str(item['rating_key'])) logger.debug(u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start()
def check_server_updates(): with monitor_lock: logger.info(u"PlexPy Monitor :: Checking for PMS updates...") pms_connect = pmsconnect.PmsConnect() server_identity = pms_connect.get_server_identity() update_status = pms_connect.get_update_staus() if server_identity and update_status: version = server_identity['version'] logger.info(u"PlexPy Monitor :: Current PMS version: %s", version) if update_status['state'] == 'available': update_version = update_status['version'] logger.info( u"PlexPy Monitor :: PMS update available version: %s", update_version) # Check if any notification agents have notifications enabled if any(d['on_pmsupdate'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='pmsupdate')).start() else: logger.info(u"PlexPy Monitor :: No PMS update available.")
def pms_image_proxy(self, img='', width='0', height='0', fallback=None, **kwargs): if img != '': try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_image(img, width, height) cherrypy.response.headers['Content-type'] = result[1] return result[0] except: logger.warn('Image proxy queried but errors occured.') if fallback == 'poster': logger.info('Trying fallback image...') try: fallback_image = open( self.interface_dir + common.DEFAULT_POSTER_THUMB, 'rb') cherrypy.response.headers['Content-type'] = 'image/png' return fallback_image except IOError, e: logger.error('Unable to read fallback image. %s' % e) return None
def get_metadata(self): pms_connect = pmsconnect.PmsConnect() metadata_list = pms_connect.get_metadata_details(self.get_rating_key()) if metadata_list: return metadata_list['metadata'] return None
def get_current_activity_header(self, **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_current_activity() except IOError, e: return serve_template(templatename="current_activity_header.html", data=None)
def get_recently_added(self, count='0', **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_recently_added_details(count) except IOError, e: return serve_template(templatename="recently_added.html", data=None)
def get_live_session(self): pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() for session in session_list['sessions']: if int(session['session_key']) == self.get_session_key(): return session return None
def get_servers(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_server_list(output_format='json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.')
def get_servers_info(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_servers_info() if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.')
def get_metadata_xml(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata(rating_key) if result: cherrypy.response.headers['Content-type'] = 'application/xml' return result else: logger.warn('Unable to retrieve data.')
def get_recently_added_json(self, count='0', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_recently_added(count, 'json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.')
def get_episode_list_json(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_episode_list(rating_key, 'json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.')
def get_pms_sessions_json(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_sessions('json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') return False
def _getMetadata(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata(rating_key, 'dict') if result: self.data = result return result else: self.msg = 'Unable to retrive metadata %s' % rating_key logger.warn('Unable to retrieve data.')
def get_children(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_season_children(rating_key) if result: return serve_template(templatename="info_episode_list.html", data=result, title="Episode List") else: return serve_template(templatename="info_episode_list.html", data=None, title="Episode List") logger.warn('Unable to retrieve data.')
def info(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details(rating_key) if result: return serve_template(templatename="info.html", data=result['metadata'], title="Info") else: return serve_template(templatename="info.html", data=None, title="Info") logger.warn('Unable to retrieve data.')
def get_server_resources(self, pms_identifier='', pms_ip='', pms_port=32400, include_https=True, **kwargs): logger.info(u"Tautulli PlexTV :: Requesting resources for server...") server = { 'pms_ip': pms_ip, 'pms_port': pms_port, } result = self.get_server_connections(pms_identifier=pms_identifier, pms_ip=pms_ip, pms_port=pms_port, include_https=True) if result: connections = result.pop('connections', []) presence = result.pop('pms_presence', 0) server.update(result) else: connections = [] presence = 0 if connections: # Get connection with matching address, otherwise return first connection conn = next( (c for c in connections if c['address'] == pms_ip and c['port'] == str(pms_port)), connections[0]) server['pms_is_remote'] = int(not int(conn['local'])) server['pms_ssl'] = (1 if conn['protocol'] == 'https' else 0) pms_connect = pmsconnect.PmsConnect(url=conn['uri']) server['pms_ssl_pref'] = int( pms_connect.get_server_pref('secureConnections')) scheme = ('https' if server['pms_ssl'] else 'http') pms_url = '{scheme}://{hostname}:{port}'.format(scheme=scheme, hostname=pms_ip, port=pms_port) server['pms_url'] = pms_url server['pms_is_cloud'] = int(server['pms_is_cloud']) return server
def get_current_activity(self, **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_current_activity() except: return serve_template(templatename="current_activity.html", data=None) if result: return serve_template(templatename="current_activity.html", data=result) else: return serve_template(templatename="current_activity.html", data=None) logger.warn('Unable to retrieve data.')
def check_server_response(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() global int_ping_count global ext_ping_count # Check for internal server response if not server_response: int_ping_count += 1 logger.warn(u"PlexPy Monitor :: Unable to get an internal response from the server, ping attempt %s." \ % str(int_ping_count)) # Reset internal ping counter else: int_ping_count = 0 # Check for remote access if server_response and plexpy.CONFIG.MONITOR_REMOTE_ACCESS: mapping_state = server_response['mapping_state'] mapping_error = server_response['mapping_error'] # Check if the port is mapped if not mapping_state == 'mapped': ext_ping_count += 1 logger.warn(u"PlexPy Monitor :: Plex remote access port not mapped, ping attempt %s." \ % str(ext_ping_count)) # Check if the port is open elif mapping_error == 'unreachable': ext_ping_count += 1 logger.warn(u"PlexPy Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \ % str(ext_ping_count)) # Reset external ping counter else: ext_ping_count = 0 if int_ping_count == 3: # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='intdown')).start() if ext_ping_count == 3: # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='extdown')).start()
def _getSync(self, machine_id=None, user_id=None, **kwargs): pms_connect = pmsconnect.PmsConnect() server_id = pms_connect.get_server_identity() plex_tv = plextv.PlexTV() if not machine_id: result = plex_tv.get_synced_items(machine_id=server_id['machine_identifier'], user_id=user_id) else: result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id) if result: self.data = result return result else: self.msg = 'Unable to retrieve sync data for user' logger.warn('Unable to retrieve sync data for user.')
def check_server_response(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() global ext_ping_count # Check for remote access if server_response: mapping_state = server_response['mapping_state'] mapping_error = server_response['mapping_error'] # Check if the port is mapped if not mapping_state == 'mapped': ext_ping_count += 1 logger.warn(u"PlexPy Monitor :: Plex remote access port not mapped, ping attempt %s." \ % str(ext_ping_count)) # Check if the port is open elif mapping_error == 'unreachable': ext_ping_count += 1 logger.warn(u"PlexPy Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \ % str(ext_ping_count)) # Reset external ping counter else: if ext_ping_count >= 3: logger.info( u"PlexPy Monitor :: Plex remote access is back up.") # Check if any notification agents have notifications enabled if any(d['on_extup'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='extup')).start() ext_ping_count = 0 if ext_ping_count == 3: # Check if any notification agents have notifications enabled if any(d['on_extdown'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='extdown')).start()
def get_sync(self, machine_id=None, user_id=None, **kwargs): pms_connect = pmsconnect.PmsConnect() server_id = pms_connect.get_server_identity() plex_tv = plextv.PlexTV() if not machine_id: result = plex_tv.get_synced_items( machine_id=server_id['machine_identifier'], user_id=user_id) else: result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id) if result: output = {"data": result} else: logger.warn('Unable to retrieve sync data for user.') output = {"data": []} cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(output)
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): from plexpy import users user_data = users.Users() user_details = user_data.get_user_friendly_name(user=session['user']) if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) else: stopped = int(time.time()) if plexpy.CONFIG.MOVIE_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'movie': logging_enabled = True elif plexpy.CONFIG.TV_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'episode': logging_enabled = True elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"PlexPy ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if session['media_type'] == 'track' and not is_import: if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not user_details['keep_history'] and not is_import: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % session['user']) if logging_enabled: # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history transaction...") self.db.action(query=query, args=args) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = { 'id': result[0][0], 'rating_key': result[0][1], 'user_id': result[0][2], 'reference_id': result[0][3] } if len(result) == 1: prev_session = None else: prev_session = { 'id': result[1][0], 'rating_key': result[1][1], 'user_id': result[1][2], 'reference_id': result[1][3] } query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key']): args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"PlexPy ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) if not is_import: logger.debug( u"PlexPy ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) metadata = result['metadata'] else: metadata = import_metadata # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \ 'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \ 'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args)
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None): from plexpy import pmsconnect import json, os default_return = { 'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': None, 'error': 'Unable to execute database query.' } if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called by invalid section_id provided." ) return default_return elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called by invalid rating_key provided." ) return default_return # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return default_return if not section_type: section_type = library_details['section_type'] # Get play counts from the database monitor_db = database.MonitorDatabase() if plexpy.CONFIG.GROUP_HISTORY_TABLES: count_by = 'reference_id' else: count_by = 'id' if section_type == 'show' or section_type == 'artist': group_by = 'grandparent_rating_key' elif section_type == 'season' or section_type == 'album': group_by = 'parent_rating_key' else: group_by = 'rating_key' try: query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \ 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \ 'FROM session_history ' \ 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \ 'WHERE session_history_metadata.section_id = ? ' \ 'GROUP BY session_history.%s ' % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn( u"PlexPy Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) return default_return watched_list = {} for item in result: watched_list[str(item[group_by])] = { 'last_played': item['last_played'], 'play_count': item['play_count'] } rows = [] # Import media info cache from json file if rating_key: try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # If no cache was imported, get all library children items cached_items = {d['rating_key']: d['file_size'] for d in rows} if refresh or not rows: pms_connect = pmsconnect.PmsConnect() if rating_key: library_children = pms_connect.get_library_children_details( rating_key=rating_key, get_media_info=True) elif section_id: library_children = pms_connect.get_library_children_details( section_id=section_id, section_type=section_type, get_media_info=True) if library_children: library_count = library_children['library_count'] children_list = library_children['childern_list'] else: logger.warn( u"PlexPy Libraries :: Unable to get a list of library items." ) return default_return new_rows = [] for item in children_list: cached_file_size = cached_items.get(item['rating_key'], None) file_size = cached_file_size if cached_file_size else item.get( 'file_size', '') row = { 'section_id': library_details['section_id'], 'section_type': library_details['section_type'], 'added_at': item['added_at'], 'media_type': item['media_type'], 'rating_key': item['rating_key'], 'parent_rating_key': item['parent_rating_key'], 'grandparent_rating_key': item['grandparent_rating_key'], 'title': item['title'], 'year': item['year'], 'media_index': item['media_index'], 'parent_media_index': item['parent_media_index'], 'thumb': item['thumb'], 'container': item.get('container', ''), 'bitrate': item.get('bitrate', ''), 'video_codec': item.get('video_codec', ''), 'video_resolution': item.get('video_resolution', ''), 'video_framerate': item.get('video_framerate', ''), 'audio_codec': item.get('audio_codec', ''), 'audio_channels': item.get('audio_channels', ''), 'file_size': file_size } new_rows.append(row) rows = new_rows if not rows: return default_return # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id) # Update the last_played and play_count for item in rows: watched_item = watched_list.get(item['rating_key'], None) if watched_item: item['last_played'] = watched_item['last_played'] item['play_count'] = watched_item['play_count'] else: item['last_played'] = None item['play_count'] = None results = [] # Get datatables JSON data if kwargs.get('json_data'): json_data = helpers.process_json_kwargs( json_kwargs=kwargs.get('json_data')) #print json_data # Search results search_value = json_data['search']['value'].lower() if search_value: searchable_columns = [ d['data'] for d in json_data['columns'] if d['searchable'] ] for row in rows: for k, v in row.iteritems(): if k in searchable_columns and search_value in v.lower(): results.append(row) break else: results = rows filtered_count = len(results) # Sort results results = sorted(results, key=lambda k: k['title']) sort_order = json_data['order'] for order in reversed(sort_order): sort_key = json_data['columns'][int(order['column'])]['data'] reverse = True if order['dir'] == 'desc' else False if rating_key and sort_key == 'title': results = sorted( results, key=lambda k: helpers.cast_to_int(k['media_index']), reverse=reverse) elif sort_key == 'file_size' or sort_key == 'bitrate': results = sorted( results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse) else: results = sorted(results, key=lambda k: k[sort_key], reverse=reverse) total_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) # Paginate results results = results[json_data['start']:(json_data['start'] + json_data['length'])] filtered_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) dict = { 'recordsFiltered': filtered_count, 'recordsTotal': library_count, 'data': results, 'draw': int(json_data['draw']), 'filtered_file_size': filtered_file_size, 'total_file_size': total_file_size } return dict
def check_active_sessions(ws_request=False): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = database.MonitorDatabase() monitor_process = activity_processor.ActivityProcessor() # logger.debug(u"PlexPy Monitor :: Checking for active streams.") if session_list: media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams db_streams = monitor_db.select( 'SELECT started, session_key, rating_key, media_type, title, parent_title, ' 'grandparent_title, user_id, user, friendly_name, ip_address, player, ' 'platform, machine_id, parent_rating_key, grandparent_rating_key, state, ' 'view_offset, duration, video_decision, audio_decision, width, height, ' 'container, video_codec, audio_codec, bitrate, video_resolution, ' 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, ' 'transcode_container, transcode_video_codec, transcode_audio_codec, ' 'transcode_audio_channels, transcode_width, transcode_height, ' 'paused_counter, last_paused ' 'FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='pause')).start() if session['state'] == 'playing' and stream[ 'state'] == 'paused': # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='resume')).start() if stream['state'] == 'paused' and not ws_request: # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int( stream['paused_counter'] ) + plexpy.CONFIG.MONITORING_INTERVAL monitor_db.action( 'UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [ paused_counter, stream['session_key'], stream['rating_key'] ]) if session[ 'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action( 'UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select( 'SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) if buffer_values[0][ 'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0][ 'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD: logger.info( u"PlexPy Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='buffer' )).start() else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexpy.CONFIG.BUFFER_WAIT: logger.info( u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) threading.Thread( target=notification_handler. notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() logger.debug( u"PlexPy Monitor :: Stream buffering. Count is now %s. Last triggered %s." % (buffer_values[0][0], buffer_values[0][1])) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['view_offset'] and session[ 'duration'] and session[ 'state'] != 'buffering': if helpers.get_percent( session['view_offset'], session['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='watched')).start() else: # The user has stopped playing a stream logger.debug( u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) # Check if the user has reached the offset in the media we defined as the "watched" percent if stream['view_offset'] and stream['duration']: if helpers.get_percent( stream['view_offset'], stream['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched')).start() # Push any notifications - Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='stop')).start() # Write the item history on playback stop monitor_process.write_session_history(session=stream) # Process the newly received session data for session in media_container: monitor_process.write_session(session) else: logger.debug(u"PlexPy Monitor :: Unable to read session list.")
def get_media_info_file_sizes(self, section_id=None, rating_key=None): from plexpy import pmsconnect import json, os if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid section_id provided." ) return False elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid rating_key provided." ) return False # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return False if library_details['section_type'] == 'photo': return False rows = [] # Import media info cache from json file if rating_key: #logger.debug(u"PlexPy Libraries :: Getting file sizes for rating_key %s." % rating_key) try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: Getting file sizes for section_id %s." % section_id) try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # Get the total file size for each item pms_connect = pmsconnect.PmsConnect() for item in rows: if item['rating_key'] and not item['file_size']: file_size = 0 child_metadata = pms_connect.get_metadata_children_details( rating_key=item['rating_key'], get_children=True, get_media_info=True) metadata_list = child_metadata['metadata'] for child_metadata in metadata_list: file_size += helpers.cast_to_int( child_metadata.get('file_size', 0)) item['file_size'] = file_size # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) if rating_key: #logger.debug(u"PlexPy Libraries :: File sizes updated for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: File sizes updated for section_id %s." % section_id) return True
def check_active_sessions(ws_request=False): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = database.MonitorDatabase() monitor_process = activity_processor.ActivityProcessor() # logger.debug(u"PlexPy Monitor :: Checking for active streams.") global int_ping_count if session_list: if int_ping_count >= 3: logger.info( u"PlexPy Monitor :: The Plex Media Server is back up.") # Check if any notification agents have notifications enabled if any(d['on_intup'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='intup')).start() int_ping_count = 0 media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams db_streams = monitor_db.select('SELECT * FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': logger.debug( u"PlexPy Monitor :: Session %s has been paused." % stream['session_key']) # Check if any notification agents have notifications enabled if any(d['on_pause'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='pause' )).start() if session['state'] == 'playing' and stream[ 'state'] == 'paused': logger.debug( u"PlexPy Monitor :: Session %s has been resumed." % stream['session_key']) # Check if any notification agents have notifications enabled if any(d['on_resume'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='resume' )).start() if stream['state'] == 'paused' and not ws_request: # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int( stream['paused_counter'] ) + plexpy.CONFIG.MONITORING_INTERVAL monitor_db.action( 'UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [ paused_counter, stream['session_key'], stream['rating_key'] ]) if session[ 'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action( 'UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select( 'SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) if buffer_values[0][ 'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0][ 'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD: logger.info( u"PlexPy Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check if any notification agents have notifications enabled if any(d['on_buffer'] for d in notifiers. available_notification_agents( )): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler. notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexpy.CONFIG.BUFFER_WAIT: logger.info( u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check if any notification agents have notifications enabled if any(d['on_buffer'] for d in notifiers. available_notification_agents( )): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler .notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() logger.debug( u"PlexPy Monitor :: Session %s is buffering. Count is now %s. Last triggered %s." % (stream['session_key'], buffer_values[0]['buffer_count'], buffer_values[0]['buffer_last_triggered']) ) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['view_offset'] and session[ 'duration'] and session[ 'state'] != 'buffering': if helpers.get_percent( session['view_offset'], session['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Check if any notification agents have notifications enabled if any(d['on_watched'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched' )).start() else: # The user has stopped playing a stream if stream['state'] != 'stopped': logger.debug( u"PlexPy Monitor :: Session %s has stopped." % stream['session_key']) # Set the stream stop time stream['stopped'] = int(time.time()) monitor_db.action( 'UPDATE sessions SET stopped = ?, state = ? ' 'WHERE session_key = ? AND rating_key = ?', [ stream['stopped'], 'stopped', stream['session_key'], stream['rating_key'] ]) # Check if the user has reached the offset in the media we defined as the "watched" percent if stream['view_offset'] and stream['duration']: if helpers.get_percent( stream['view_offset'], stream['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Check if any notification agents have notifications enabled if any(d['on_watched'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='watched')).start() # Check if any notification agents have notifications enabled if any(d['on_stop'] for d in notifiers.available_notification_agents()): # Push any notifications - Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='stop')).start() # Write the item history on playback stop success = monitor_process.write_session_history( session=stream) if success: # If session is written to the databaase successfully, remove the session from the session table logger.debug( u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) else: logger.warn(u"PlexPy Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \ "Will try again on the next pass." % (stream['session_key'], stream['rating_key'])) # Process the newly received session data for session in media_container: new_session = monitor_process.write_session(session) if new_session: logger.debug(u"PlexPy Monitor :: Session %s has started." % session['session_key']) else: logger.debug(u"PlexPy Monitor :: Unable to read session list.") int_ping_count += 1 logger.warn(u"PlexPy Monitor :: Unable to get an internal response from the server, ping attempt %s." \ % str(int_ping_count)) if int_ping_count == 3: # Check if any notification agents have notifications enabled if any(d['on_intdown'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='intdown')).start()
def check_active_sessions(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = MonitorDatabase() if session_list['stream_count'] != '0': media_container = session_list['sessions'] active_streams = [] for session in media_container: session_key = session['session_key'] rating_key = session['rating_key'] media_type = session['type'] friendly_name = session['friendly_name'] platform = session['player'] title = session['title'] parent_title = session['parent_title'] grandparent_title = session['grandparent_title'] write_session = monitor_db.write_session_key( session_key, rating_key, media_type) if write_session == 'insert': # User started playing a stream :: We notify here if media_type == 'track' or media_type == 'episode': item_title = grandparent_title + ' - ' + title elif media_type == 'movie': item_title = title else: item_title = title logger.info('%s (%s) starting playing %s' % (friendly_name, platform, item_title)) pushmessage = '%s (%s) starting playing %s' % ( friendly_name, platform, item_title) notification_handler.push_nofitications( pushmessage, 'PlexPy Playback started', 'Playback Started') keys = {'session_key': session_key, 'rating_key': rating_key} active_streams.append(keys) # Check our temp table for what we must do with the new stream db_streams = monitor_db.select( 'SELECT session_key, rating_key, media_type FROM sessions') for result in db_streams: if any(d['session_key'] == str(result[0]) for d in active_streams): # The user's session is still active pass if any(d['rating_key'] == str(result[1]) for d in active_streams): # The user is still playing the same media item # Here we can check the play states pass else: # The user has stopped playing a stream monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [result[0], result[1]]) else: # The user's session is no longer active monitor_db.action( 'DELETE FROM sessions WHERE session_key = ?', [result[0]]) else: # No sessions exist # monitor_db.action('DELETE FROM sessions') pass
def build_notify_text(session=None, timeline=None, state=None): import re # Get the server name server_name = plexpy.CONFIG.PMS_NAME # Get the server uptime plex_tv = plextv.PlexTV() server_times = plex_tv.get_server_times() if server_times: updated_at = server_times[0]['updated_at'] server_uptime = helpers.human_duration( int(time.time() - helpers.cast_to_float(updated_at))) else: logger.error(u"PlexPy Notifier :: Unable to retrieve server uptime.") server_uptime = 'N/A' # Get metadata feed for item if session: rating_key = session['rating_key'] elif timeline: rating_key = timeline['rating_key'] pms_connect = pmsconnect.PmsConnect() metadata_list = pms_connect.get_metadata_details(rating_key=rating_key) if metadata_list: metadata = metadata_list['metadata'] else: logger.error( u"PlexPy Notifier :: Unable to retrieve metadata for rating_key %s" % str(rating_key)) return [] # Check for exclusion tags if metadata['media_type'] == 'movie': # Regex pattern to remove the text in the tags we don't want pattern = re.compile('<tv>[^>]+.</tv>|<music>[^>]+.</music>', re.IGNORECASE) elif metadata['media_type'] == 'show' or metadata[ 'media_type'] == 'episode': # Regex pattern to remove the text in the tags we don't want pattern = re.compile('<movie>[^>]+.</movie>|<music>[^>]+.</music>', re.IGNORECASE) elif metadata['media_type'] == 'artist' or metadata[ 'media_type'] == 'track': # Regex pattern to remove the text in the tags we don't want pattern = re.compile('<tv>[^>]+.</tv>|<movie>[^>]+.</movie>', re.IGNORECASE) else: pattern = None if metadata['media_type'] == 'movie' \ or metadata['media_type'] == 'show' or metadata['media_type'] == 'episode' \ or metadata['media_type'] == 'artist' or metadata['media_type'] == 'track' \ and pattern: # Remove the unwanted tags and strip any unmatch tags too. on_start_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT)) on_start_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT)) on_stop_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_STOP_SUBJECT_TEXT)) on_stop_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_STOP_BODY_TEXT)) on_pause_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_PAUSE_SUBJECT_TEXT)) on_pause_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_PAUSE_BODY_TEXT)) on_resume_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_RESUME_SUBJECT_TEXT)) on_resume_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_RESUME_BODY_TEXT)) on_buffer_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_BUFFER_SUBJECT_TEXT)) on_buffer_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_BUFFER_BODY_TEXT)) on_watched_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_WATCHED_SUBJECT_TEXT)) on_watched_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT)) on_created_subject = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT)) on_created_body = strip_tag( re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT)) else: on_start_subject = plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT on_start_body = plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT on_stop_subject = plexpy.CONFIG.NOTIFY_ON_STOP_SUBJECT_TEXT on_stop_body = plexpy.CONFIG.NOTIFY_ON_STOP_BODY_TEXT on_pause_subject = plexpy.CONFIG.NOTIFY_ON_PAUSE_SUBJECT_TEXT on_pause_body = plexpy.CONFIG.NOTIFY_ON_PAUSE_BODY_TEXT on_resume_subject = plexpy.CONFIG.NOTIFY_ON_RESUME_SUBJECT_TEXT on_resume_body = plexpy.CONFIG.NOTIFY_ON_RESUME_BODY_TEXT on_buffer_subject = plexpy.CONFIG.NOTIFY_ON_BUFFER_SUBJECT_TEXT on_buffer_body = plexpy.CONFIG.NOTIFY_ON_BUFFER_BODY_TEXT on_watched_subject = plexpy.CONFIG.NOTIFY_ON_WATCHED_SUBJECT_TEXT on_watched_body = plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT on_created_subject = plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT on_created_body = plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT # Create a title if metadata['media_type'] == 'episode' or metadata['media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) else: full_title = metadata['title'] duration = helpers.convert_milliseconds_to_minutes(metadata['duration']) # Default values video_decision = '' audio_decision = '' transcode_decision = '' stream_duration = 0 view_offset = 0 user = '' platform = '' player = '' ip_address = 'N/A' # Session values if session: # Generate a combined transcode decision value video_decision = session['video_decision'].title() audio_decision = session['audio_decision'].title() if session['video_decision'] == 'transcode' or session[ 'audio_decision'] == 'transcode': transcode_decision = 'Transcode' elif session['video_decision'] == 'copy' or session[ 'audio_decision'] == 'copy': transcode_decision = 'Direct Stream' else: transcode_decision = 'Direct Play' if state != 'play': if session['paused_counter']: stream_duration = int( (time.time() - helpers.cast_to_float(session['started']) - helpers.cast_to_float(session['paused_counter'])) / 60) else: stream_duration = int( (time.time() - helpers.cast_to_float(session['started'])) / 60) view_offset = helpers.convert_milliseconds_to_minutes( session['view_offset']) user = session['friendly_name'] platform = session['platform'] player = session['player'] ip_address = session['ip_address'] if session['ip_address'] else 'N/A' progress_percent = helpers.get_percent(view_offset, duration) available_params = { 'server_name': server_name, 'server_uptime': server_uptime, 'user': user, 'platform': platform, 'player': player, 'ip_address': ip_address, 'media_type': metadata['media_type'], 'title': full_title, 'show_name': metadata['grandparent_title'], 'episode_name': metadata['title'], 'artist_name': metadata['grandparent_title'], 'album_name': metadata['parent_title'], 'track_name': metadata['title'], 'season_num': metadata['parent_index'].zfill(1), 'season_num00': metadata['parent_index'].zfill(2), 'episode_num': metadata['index'].zfill(1), 'episode_num00': metadata['index'].zfill(2), 'video_decision': video_decision, 'audio_decision': audio_decision, 'transcode_decision': transcode_decision, 'year': metadata['year'], 'studio': metadata['studio'], 'content_rating': metadata['content_rating'], 'directors': ', '.join(metadata['directors']), 'writers': ', '.join(metadata['writers']), 'actors': ', '.join(metadata['actors']), 'genres': ', '.join(metadata['genres']), 'summary': metadata['summary'], 'tagline': metadata['tagline'], 'rating': metadata['rating'], 'duration': duration, 'stream_duration': stream_duration, 'remaining_duration': duration - view_offset, 'progress': view_offset, 'progress_percent': progress_percent } # Default subject text subject_text = 'PlexPy (%s)' % server_name if state == 'play': # Default body text body_text = '%s (%s) is watching %s' % (session['friendly_name'], session['player'], full_title) if on_start_subject and on_start_body: try: subject_text = unicode(on_start_subject).format( **available_params) except LookupError, e: logger.error( u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) except:
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): from plexpy import datafactory data_factory = datafactory.DataFactory() user_details = data_factory.get_user_friendly_name( user=session['user']) if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = session['stopped'] else: stopped = int(time.time()) else: stopped = int(time.time()) if plexpy.CONFIG.VIDEO_LOGGING_ENABLE and \ (session['media_type'] == 'movie' or session['media_type'] == 'episode'): logging_enabled = True elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"PlexPy Monitor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (int(stopped) - session['started'] < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(int(stopped) - session['started']), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (int(stopped) - session['started'] < int(import_ignore_interval)): logging_enabled = False logger.debug( u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(int(stopped) - session['started']), import_ignore_interval)) if not user_details['keep_history'] and not is_import: logging_enabled = False logger.debug( u"PlexPy Monitor :: History logging for user '%s' is disabled." % session['user']) if logging_enabled: # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"PlexPy Monitor :: Writing session_history transaction...") self.db.action(query=query, args=args) # logger.debug(u"PlexPy Monitor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'] ] # logger.debug(u"PlexPy Monitor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) if not is_import: logger.debug( u"PlexPy Monitor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) metadata = result['metadata'] else: metadata = import_metadata # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \ 'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \ 'rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'] ] # logger.debug(u"PlexPy Monitor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args)