def checkGithub(): plexcs.COMMITS_BEHIND = 0 # Get the latest version available from github logger.info('Retrieving latest version information from GitHub') url = 'https://api.github.com/repos/%s/plex-cs/commits/%s' % ( plexcs.CONFIG.GIT_USER, plexcs.CONFIG.GIT_BRANCH) version = request.request_json(url, timeout=20, validator=lambda x: type(x) == dict) if version is None: logger.warn( 'Could not get the latest version from GitHub. Are you running a local development version?' ) return plexcs.CURRENT_VERSION plexcs.LATEST_VERSION = version['sha'] logger.debug("Latest version is %s", plexcs.LATEST_VERSION) # See how many commits behind we are if not plexcs.CURRENT_VERSION: logger.info( 'You are running an unknown version of Plex:CS. Run the updater to identify your version' ) return plexcs.LATEST_VERSION if plexcs.LATEST_VERSION == plexcs.CURRENT_VERSION: logger.info('Plex:CS is up to date') return plexcs.LATEST_VERSION logger.info( 'Comparing currently installed version with latest GitHub version') url = 'https://api.github.com/repos/%s/plex-cs/compare/%s...%s' % ( plexcs.CONFIG.GIT_USER, plexcs.LATEST_VERSION, plexcs.CURRENT_VERSION) commits = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == dict) if commits is None: logger.warn('Could not get commits behind from GitHub.') return plexcs.LATEST_VERSION try: plexcs.COMMITS_BEHIND = int(commits['behind_by']) logger.debug("In total, %d commits behind", plexcs.COMMITS_BEHIND) except KeyError: logger.info( 'Cannot compare versions. Are you running a local development version?' ) plexcs.COMMITS_BEHIND = 0 if plexcs.COMMITS_BEHIND > 0: logger.info('New version is available. You are %s commits behind' % plexcs.COMMITS_BEHIND) elif plexcs.COMMITS_BEHIND == 0: logger.info('Plex:CS is up to date') return plexcs.LATEST_VERSION
def set_user_profile_url(self, user=None, user_id=None, profile_url=None): if user_id: if profile_url.strip() == '': profile_url = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = {"custom_avatar_url": profile_url} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception as e: logger.debug(u"Uncaught exception %s" % e) if user: if profile_url.strip() == '': profile_url = None monitor_db = database.MonitorDatabase() control_value_dict = {"username": user} new_value_dict = {"custom_avatar_url": profile_url} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception as e: logger.debug(u"Uncaught exception %s" % e)
def set_user_friendly_name(self, user=None, user_id=None, friendly_name=None, do_notify=0, keep_history=1): if user_id: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = {"friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception as e: logger.debug(u"Uncaught exception %s" % e) if user: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"username": user} new_value_dict = {"friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception as e: logger.debug(u"Uncaught exception %s" % e)
def on_stop(self, force_stop=False): if self.is_valid_session(): logger.debug(u"Plex:CS ActivityHandler :: Session %s has stopped." % str(self.get_session_key())) # Set the session last_paused timestamp ap = activity_processor.ActivityProcessor() ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) # Update the session state and viewOffset # Set force_stop to true to disable the state set if not force_stop: ap.set_session_state( session_key=self.get_session_key(), state=self.timeline["state"], view_offset=self.timeline["viewOffset"], ) # Retrieve the session data from our temp table db_session = ap.get_session_by_key(session_key=self.get_session_key()) # Fire off notifications threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=db_session, notify_action="stop") ).start() # Write it to the history table monitor_proc = activity_processor.ActivityProcessor() monitor_proc.write_session_history(session=db_session) # Remove the session from our temp session table ap.delete_session(session_key=self.get_session_key())
def clear_history_tables(): logger.debug(u"Plex:CS Database :: Deleting all session_history records... No turning back now bub.") monitor_db = MonitorDatabase() monitor_db.action('DELETE FROM session_history') monitor_db.action('DELETE FROM session_history_media_info') monitor_db.action('DELETE FROM session_history_metadata') monitor_db.action('VACUUM;')
def check_recently_added(): with monitor_lock: # add delay to allow for metadata processing delay = plexcs.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY time_threshold = int(time.time()) - delay time_interval = plexcs.CONFIG.MONITORING_INTERVAL pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details(count='10') if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: metadata = [] if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'movie': metadata_list = pms_connect.get_metadata_details(item['rating_key']) if metadata_list: metadata = [metadata_list['metadata']] else: logger.error(u"Plex:CS Monitor :: Unable to retrieve metadata for rating_key %s" \ % str(item['rating_key'])) else: metadata_list = pms_connect.get_metadata_children_details(item['rating_key']) if metadata_list: metadata = metadata_list['metadata'] else: logger.error(u"Plex:CS Monitor :: Unable to retrieve children metadata for rating_key %s" \ % str(item['rating_key'])) if metadata: if not plexcs.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT: for item in metadata: if 0 < time_threshold - int(item['added_at']) <= time_interval: logger.debug(u"Plex:CS Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start() else: item = max(metadata, key=lambda x:x['added_at']) if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'episode' or item['media_type'] == 'track': metadata_list = pms_connect.get_metadata_details(item['grandparent_rating_key']) if metadata_list: item = metadata_list['metadata'] else: logger.error(u"Plex:CS Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \ % str(item['rating_key'])) logger.debug(u"Plex:CS Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start()
def run(): from websocket import create_connection uri = 'ws://%s:%s/:/websockets/notifications' % ( plexcs.CONFIG.PMS_IP, plexcs.CONFIG.PMS_PORT ) # Set authentication token (if one is available) if plexcs.CONFIG.PMS_TOKEN: uri += '?X-Plex-Token=' + plexcs.CONFIG.PMS_TOKEN ws_connected = False reconnects = 0 # Try an open the websocket connection - if it fails after 15 retries fallback to polling while not ws_connected and reconnects <= 15: try: logger.info(u'Plex:CS WebSocket :: Opening websocket, connection attempt %s.' % str(reconnects + 1)) ws = create_connection(uri) reconnects = 0 ws_connected = True logger.info(u'Plex:CS WebSocket :: Ready') except IOError as e: logger.error(u'Plex:CS WebSocket :: %s.' % e) reconnects += 1 time.sleep(5) while ws_connected: try: process(*receive(ws)) # successfully received data, reset reconnects counter reconnects = 0 except websocket.WebSocketConnectionClosedException: if reconnects <= 15: reconnects += 1 # Sleep 5 between connection attempts if reconnects > 1: time.sleep(5) logger.warn(u'Plex:CS WebSocket :: Connection has closed, reconnecting...') try: ws = create_connection(uri) except IOError as e: logger.info(u'Plex:CS WebSocket :: %s.' % e) else: ws_connected = False break if not ws_connected: logger.error(u'Plex:CS WebSocket :: Connection unavailable, falling back to polling.') plexcs.POLLING_FAILOVER = True plexcs.initialize_scheduler() logger.debug(u'Plex:CS WebSocket :: Leaving thread.')
def clear_history_tables(): logger.debug( u"Plex:CS Database :: Deleting all session_history records... No turning back now bub." ) monitor_db = MonitorDatabase() monitor_db.action('DELETE FROM session_history') monitor_db.action('DELETE FROM session_history_media_info') monitor_db.action('DELETE FROM session_history_metadata') monitor_db.action('VACUUM;')
def on_created(self): if self.is_item(): logger.debug( u"Plex:CS TimelineHandler :: Library item %s has been added to Plex." % str(self.get_rating_key()) ) # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(timeline_data=self.get_metadata(), notify_action="created"), ).start()
def on_start(self): if self.is_valid_session(): logger.debug(u"Plex:CS ActivityHandler :: Session %s has started." % str(self.get_session_key())) # Fire off notifications threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=self.get_live_session(), notify_action="play"), ).start() # Write the new session to our temp session table self.update_db_session()
def checkGithub(): plexcs.COMMITS_BEHIND = 0 # Get the latest version available from github logger.info("Retrieving latest version information from GitHub") url = "https://api.github.com/repos/%s/plex-cs/commits/%s" % (plexcs.CONFIG.GIT_USER, plexcs.CONFIG.GIT_BRANCH) version = request.request_json(url, timeout=20, validator=lambda x: type(x) == dict) if version is None: logger.warn("Could not get the latest version from GitHub. Are you running a local development version?") return plexcs.CURRENT_VERSION plexcs.LATEST_VERSION = version["sha"] logger.debug("Latest version is %s", plexcs.LATEST_VERSION) # See how many commits behind we are if not plexcs.CURRENT_VERSION: logger.info("You are running an unknown version of Plex:CS. Run the updater to identify your version") return plexcs.LATEST_VERSION if plexcs.LATEST_VERSION == plexcs.CURRENT_VERSION: logger.info("Plex:CS is up to date") return plexcs.LATEST_VERSION logger.info("Comparing currently installed version with latest GitHub version") url = "https://api.github.com/repos/%s/plex-cs/compare/%s...%s" % ( plexcs.CONFIG.GIT_USER, plexcs.LATEST_VERSION, plexcs.CURRENT_VERSION, ) commits = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == dict) if commits is None: logger.warn("Could not get commits behind from GitHub.") return plexcs.LATEST_VERSION try: plexcs.COMMITS_BEHIND = int(commits["behind_by"]) logger.debug("In total, %d commits behind", plexcs.COMMITS_BEHIND) except KeyError: logger.info("Cannot compare versions. Are you running a local development version?") plexcs.COMMITS_BEHIND = 0 if plexcs.COMMITS_BEHIND > 0: logger.info("New version is available. You are %s commits behind" % plexcs.COMMITS_BEHIND) elif plexcs.COMMITS_BEHIND == 0: logger.info("Plex:CS is up to date") return plexcs.LATEST_VERSION
def server_message(response): """ Extract server message from response and log in to logger with DEBUG level. Some servers return extra information in the result. Try to parse it for debugging purpose. Messages are limited to 150 characters, since it may return the whole page in case of normal web page URLs """ message = None # First attempt is to 'read' the response as HTML if "text/html" in response.headers.get("content-type"): try: soup = BeautifulSoup(response.content, "html5lib") except Exception: pass # Find body and cleanup common tags to grab content, which probably # contains the message. message = soup.find("body") elements = ("header", "script", "footer", "nav", "input", "textarea") for element in elements: for tag in soup.find_all(element): tag.replaceWith("") message = message.text if message else soup.text message = message.strip() # Second attempt is to just take the response if message is None: message = response.content.strip() if message: # Truncate message if it is too long. if len(message) > 150: message = message[:150] + "..." logger.debug("Server responded with message: %s", message)
def process(opcode, data): from plexcs import activity_handler if opcode not in opcode_data: return False try: info = json.loads(data) except Exception as ex: logger.warn(u'Plex:CS WebSocket :: Error decoding message from websocket: %s' % ex) logger.debug(data) return False type = info.get('type') if not type: return False if type == 'playing': # logger.debug('%s.playing %s' % (name, info)) try: time_line = info.get('_children') except: logger.debug(u"Plex:CS WebSocket :: Session found but unable to get timeline data.") return False activity = activity_handler.ActivityHandler(timeline=time_line[0]) activity.process() #if type == 'timeline': # try: # time_line = info.get('_children') # except: # logger.debug(u"Plex:CS WebSocket :: Timeline event found but unable to get timeline data.") # return False # activity = activity_handler.TimelineHandler(timeline=time_line[0]) # activity.process() return True
def on_pause(self): if self.is_valid_session(): logger.debug(u"Plex:CS ActivityHandler :: Session %s has been paused." % str(self.get_session_key())) # Set the session last_paused timestamp ap = activity_processor.ActivityProcessor() ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=int(time.time())) # Update the session state and viewOffset ap.set_session_state( session_key=self.get_session_key(), state=self.timeline["state"], view_offset=self.timeline["viewOffset"], ) # Retrieve the session data from our temp table db_session = ap.get_session_by_key(session_key=self.get_session_key()) # Fire off notifications threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=db_session, notify_action="pause") ).start()
def notify_timeline(timeline_data=None, notify_action=None): if timeline_data and notify_action: if (timeline_data['media_type'] == 'movie' and plexcs.CONFIG.MOVIE_NOTIFY_ENABLE) \ or ((timeline_data['media_type'] == 'show' or timeline_data['media_type'] == 'episode') \ and plexcs.CONFIG.TV_NOTIFY_ENABLE) \ or ((timeline_data['media_type'] == 'artist' or timeline_data['media_type'] == 'track') \ and plexcs.CONFIG.MUSIC_NOTIFY_ENABLE): for agent in notifiers.available_notification_agents(): if agent['on_created'] and notify_action == 'created': # Build and send notification notify_strings = build_notify_text(timeline=timeline_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=timeline_data, state=notify_action, agent_info=agent) elif not timeline_data and notify_action: for agent in notifiers.available_notification_agents(): if agent['on_extdown'] and notify_action == 'extdown': # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) if agent['on_intdown'] and notify_action == 'intdown': # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) else: logger.debug( u"Plex:CS Notifier :: Notify timeline called but incomplete data received." )
def set_user_friendly_name(self, user=None, user_id=None, friendly_name=None, do_notify=0, keep_history=1): if user_id: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = { "friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history } try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception as e: logger.debug(u"Uncaught exception %s" % e) if user: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"username": user} new_value_dict = { "friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history } try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception as e: logger.debug(u"Uncaught exception %s" % e)
def notify_timeline(timeline_data=None, notify_action=None): if timeline_data and notify_action: if ( (timeline_data["media_type"] == "movie" and plexcs.CONFIG.MOVIE_NOTIFY_ENABLE) or ( (timeline_data["media_type"] == "show" or timeline_data["media_type"] == "episode") and plexcs.CONFIG.TV_NOTIFY_ENABLE ) or ( (timeline_data["media_type"] == "artist" or timeline_data["media_type"] == "track") and plexcs.CONFIG.MUSIC_NOTIFY_ENABLE ) ): for agent in notifiers.available_notification_agents(): if agent["on_created"] and notify_action == "created": # Build and send notification notify_strings = build_notify_text(timeline=timeline_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=timeline_data, state=notify_action, agent_info=agent) elif not timeline_data and notify_action: for agent in notifiers.available_notification_agents(): if agent["on_extdown"] and notify_action == "extdown": # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification(config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1]) if agent["on_intdown"] and notify_action == "intdown": # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification(config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1]) else: logger.debug(u"Plex:CS Notifier :: Notify timeline called but incomplete data received.")
def runGit(args): if plexcs.CONFIG.GIT_PATH: git_locations = ['"' + plexcs.CONFIG.GIT_PATH + '"'] else: git_locations = ['git'] if platform.system().lower() == 'darwin': git_locations.append('/usr/local/git/bin/git') output = err = None for cur_git in git_locations: cmd = cur_git + ' ' + args try: logger.debug('Trying to execute: "' + cmd + '" with shell in ' + plexcs.PROG_DIR) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=plexcs.PROG_DIR) output, err = p.communicate() output = output.strip() logger.debug('Git output: ' + output) except OSError: logger.debug('Command failed: %s', cmd) continue if 'not found' in output or "not recognized as an internal or external command" in output: logger.debug('Unable to find git with command ' + cmd) output = None elif 'fatal:' in output or err: logger.error( 'Git returned bad info. Are you sure this is a git installation?' ) output = None elif output: break return (output, err)
def import_users(): from plexcs import database logger.debug(u"Plex:CS Importer :: Importing PlexWatch Users...") monitor_db = database.MonitorDatabase() query = 'INSERT OR IGNORE INTO users (user_id, username) ' \ 'SELECT user_id, user ' \ 'FROM session_history WHERE user_id != 1 GROUP BY user_id' try: monitor_db.action(query) logger.debug(u"Plex:CS Importer :: Users imported.") except: logger.debug(u"Plex:CS Importer :: Failed to import users.")
def runGit(args): if plexcs.CONFIG.GIT_PATH: git_locations = ['"' + plexcs.CONFIG.GIT_PATH + '"'] else: git_locations = ["git"] if platform.system().lower() == "darwin": git_locations.append("/usr/local/git/bin/git") output = err = None for cur_git in git_locations: cmd = cur_git + " " + args try: logger.debug('Trying to execute: "' + cmd + '" with shell in ' + plexcs.PROG_DIR) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=plexcs.PROG_DIR) output, err = p.communicate() output = output.strip() logger.debug("Git output: " + output) except OSError: logger.debug("Command failed: %s", cmd) continue if "not found" in output or "not recognized as an internal or external command" in output: logger.debug("Unable to find git with command " + cmd) output = None elif "fatal:" in output or err: logger.error("Git returned bad info. Are you sure this is a git installation?") output = None elif output: break return (output, err)
def on_buffer(self): if self.is_valid_session(): logger.debug(u"Plex:CS ActivityHandler :: Session %s is buffering." % self.get_session_key()) ap = activity_processor.ActivityProcessor() db_stream = ap.get_session_by_key(session_key=self.get_session_key()) # Increment our buffer count ap.increment_session_buffer_count(session_key=self.get_session_key()) # Get our current buffer count current_buffer_count = ap.get_session_buffer_count(self.get_session_key()) logger.debug( u"Plex:CS ActivityHandler :: Session %s buffer count is %s." % (self.get_session_key(), current_buffer_count) ) # Get our last triggered time buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key()) time_since_last_trigger = 0 if buffer_last_triggered: logger.debug( u"Plex:CS ActivityHandler :: Session %s buffer last triggered at %s." % (self.get_session_key(), buffer_last_triggered) ) time_since_last_trigger = int(time.time()) - int(buffer_last_triggered) if ( current_buffer_count >= plexcs.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or time_since_last_trigger >= plexcs.CONFIG.BUFFER_WAIT ): ap.set_session_buffer_trigger_time(session_key=self.get_session_key()) threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=db_stream, notify_action="buffer") ).start()
def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0): try: connection = sqlite3.connect(database, timeout=20) connection.row_factory = sqlite3.Row except sqlite3.OperationalError: logger.error('Plex:CS Importer :: Invalid filename.') return None except ValueError: logger.error('Plex:CS Importer :: Invalid filename.') return None try: connection.execute('SELECT ratingKey from %s' % table_name) except sqlite3.OperationalError: logger.error('Plex:CS Importer :: Database specified does not contain the required fields.') return None logger.debug(u"Plex:CS Importer :: PlexWatch data import in progress...") logger.debug(u"Plex:CS Importer :: Disabling monitoring while import in progress.") plexcs.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=0) plexcs.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items', hours=0, minutes=0, seconds=0) ap = activity_processor.ActivityProcessor() user_data = users.Users() # Get the latest friends list so we can pull user id's try: plextv.refresh_users() except: logger.debug(u"Plex:CS Importer :: Unable to refresh the users list. Aborting import.") return None query = 'SELECT time AS started, ' \ 'stopped, ' \ 'cast(ratingKey as text) AS rating_key, ' \ 'null AS user_id, ' \ 'user, ' \ 'ip_address, ' \ 'paused_counter, ' \ 'platform AS player, ' \ 'null AS platform, ' \ 'null as machine_id, ' \ 'parentRatingKey as parent_rating_key, ' \ 'grandparentRatingKey as grandparent_rating_key, ' \ 'null AS media_type, ' \ 'null AS view_offset, ' \ 'xml, ' \ 'rating as content_rating,' \ 'summary,' \ 'title AS full_title,' \ '(case when orig_title_ep = "" then orig_title else ' \ 'orig_title_ep end) as title,' \ '(case when orig_title_ep != "" then orig_title else ' \ 'null end) as grandparent_title ' \ 'FROM ' + table_name + ' ORDER BY id' result = connection.execute(query) for row in result: # Extract the xml from the Plexwatch db xml field. extracted_xml = extract_plexwatch_xml(row['xml']) # If we get back None from our xml extractor skip over the record and log error. if not extracted_xml: logger.error(u"Plex:CS Importer :: Skipping record with ratingKey %s due to malformed xml." % str(row['rating_key'])) continue # Skip line if we don't have a ratingKey to work with if not row['rating_key']: logger.error(u"Plex:CS Importer :: Skipping record due to null ratingRey.") continue # If the user_id no longer exists in the friends list, pull it from the xml. if user_data.get_user_id(user=row['user']): user_id = user_data.get_user_id(user=row['user']) else: user_id = extracted_xml['user_id'] session_history = {'started': row['started'], 'stopped': row['stopped'], 'rating_key': row['rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'user_id': user_id, 'user': row['user'], 'ip_address': row['ip_address'], 'paused_counter': row['paused_counter'], 'player': row['player'], 'platform': extracted_xml['platform'], 'machine_id': extracted_xml['machine_id'], 'parent_rating_key': row['parent_rating_key'], 'grandparent_rating_key': row['grandparent_rating_key'], 'media_type': extracted_xml['media_type'], 'view_offset': extracted_xml['view_offset'], 'video_decision': extracted_xml['video_decision'], 'audio_decision': extracted_xml['audio_decision'], 'duration': extracted_xml['duration'], 'width': extracted_xml['width'], 'height': extracted_xml['height'], 'container': extracted_xml['container'], 'video_codec': extracted_xml['video_codec'], 'audio_codec': extracted_xml['audio_codec'], 'bitrate': extracted_xml['bitrate'], 'video_resolution': extracted_xml['video_resolution'], 'video_framerate': extracted_xml['video_framerate'], 'aspect_ratio': extracted_xml['aspect_ratio'], 'audio_channels': extracted_xml['audio_channels'], 'transcode_protocol': extracted_xml['transcode_protocol'], 'transcode_container': extracted_xml['transcode_container'], 'transcode_video_codec': extracted_xml['transcode_video_codec'], 'transcode_audio_codec': extracted_xml['transcode_audio_codec'], 'transcode_audio_channels': extracted_xml['transcode_audio_channels'], 'transcode_width': extracted_xml['transcode_width'], 'transcode_height': extracted_xml['transcode_height'] } session_history_metadata = {'rating_key': helpers.latinToAscii(row['rating_key']), 'parent_rating_key': row['parent_rating_key'], 'grandparent_rating_key': row['grandparent_rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'index': extracted_xml['media_index'], 'parent_index': extracted_xml['parent_media_index'], 'thumb': extracted_xml['thumb'], 'parent_thumb': extracted_xml['parent_thumb'], 'grandparent_thumb': extracted_xml['grandparent_thumb'], 'art': extracted_xml['art'], 'media_type': extracted_xml['media_type'], 'year': extracted_xml['year'], 'originally_available_at': extracted_xml['originally_available_at'], 'added_at': extracted_xml['added_at'], 'updated_at': extracted_xml['updated_at'], 'last_viewed_at': extracted_xml['last_viewed_at'], 'content_rating': row['content_rating'], 'summary': row['summary'], 'tagline': extracted_xml['tagline'], 'rating': extracted_xml['rating'], 'duration': extracted_xml['duration'], 'guid': extracted_xml['guid'], 'directors': extracted_xml['directors'], 'writers': extracted_xml['writers'], 'actors': extracted_xml['actors'], 'genres': extracted_xml['genres'], 'studio': extracted_xml['studio'], 'full_title': row['full_title'] } # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values # Just make sure that the ratingKey is indeed an integer if session_history_metadata['rating_key'].isdigit(): ap.write_session_history(session=session_history, import_metadata=session_history_metadata, is_import=True, import_ignore_interval=import_ignore_interval) else: logger.debug(u"Plex:CS Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key']) logger.debug(u"Plex:CS Importer :: PlexWatch data import complete.") import_users() logger.debug(u"Plex:CS Importer :: Re-enabling monitoring.") plexcs.initialize_scheduler()
def request_response(url, method="get", auto_raise=True, whitelist_status_code=None, lock=fake_lock, **kwargs): """ Convenient wrapper for `requests.get', which will capture the exceptions and log them. On success, the Response object is returned. In case of a exception, None is returned. Additionally, there is support for rate limiting. To use this feature, supply a tuple of (lock, request_limit). The lock is used to make sure no other request with the same lock is executed. The request limit is the minimal time between two requests (and so 1/request_limit is the number of requests per seconds). """ # Convert whitelist_status_code to a list if needed if whitelist_status_code and type(whitelist_status_code) != list: whitelist_status_code = [whitelist_status_code] # Disable verification of SSL certificates if requested. Note: this could # pose a security issue! kwargs["verify"] = bool(plexcs.CONFIG.VERIFY_SSL_CERT) # Map method to the request.XXX method. This is a simple hack, but it # allows requests to apply more magic per method. See lib/requests/api.py. request_method = getattr(requests, method.lower()) try: # Request URL and wait for response with lock: logger.debug( "Requesting URL via %s method: %s", method.upper(), url) response = request_method(url, **kwargs) # If status code != OK, then raise exception, except if the status code # is white listed. if whitelist_status_code and auto_raise: if response.status_code not in whitelist_status_code: try: response.raise_for_status() except: logger.debug( "Response status code %d is not white " "listed, raised exception", response.status_code) raise elif auto_raise: response.raise_for_status() return response except requests.exceptions.SSLError as e: if kwargs["verify"]: logger.error( "Unable to connect to remote host because of a SSL error. " "It is likely that your system cannot verify the validity" "of the certificate. The remote certificate is either " "self-signed, or the remote server uses SNI. See the wiki for " "more information on this topic.") else: logger.error( "SSL error raised during connection, with certificate " "verification turned off: %s", e) except requests.ConnectionError: logger.error( "Unable to connect to remote host. Check if the remote " "host is up and running.") except requests.Timeout: logger.error( "Request timed out. The remote host did not respond timely.") except requests.HTTPError as e: if e.response is not None: if e.response.status_code >= 500: cause = "remote server error" elif e.response.status_code >= 400: cause = "local client error" else: # I don't think we will end up here, but for completeness cause = "unknown" logger.error( "Request raise HTTP error with status code %d (%s).", e.response.status_code, cause) # Debug response if plexcs.VERBOSE: server_message(e.response) else: logger.error("Request raised HTTP error.") except requests.RequestException as e: logger.error("Request raised exception: %s", e)
def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0): try: connection = sqlite3.connect(database, timeout=20) connection.row_factory = sqlite3.Row except sqlite3.OperationalError: logger.error('Plex:CS Importer :: Invalid filename.') return None except ValueError: logger.error('Plex:CS Importer :: Invalid filename.') return None try: connection.execute('SELECT ratingKey from %s' % table_name) except sqlite3.OperationalError: logger.error( 'Plex:CS Importer :: Database specified does not contain the required fields.' ) return None logger.debug(u"Plex:CS Importer :: PlexWatch data import in progress...") logger.debug( u"Plex:CS Importer :: Disabling monitoring while import in progress.") plexcs.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=0) plexcs.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items', hours=0, minutes=0, seconds=0) ap = activity_processor.ActivityProcessor() user_data = users.Users() # Get the latest friends list so we can pull user id's try: plextv.refresh_users() except: logger.debug( u"Plex:CS Importer :: Unable to refresh the users list. Aborting import." ) return None query = 'SELECT time AS started, ' \ 'stopped, ' \ 'cast(ratingKey as text) AS rating_key, ' \ 'null AS user_id, ' \ 'user, ' \ 'ip_address, ' \ 'paused_counter, ' \ 'platform AS player, ' \ 'null AS platform, ' \ 'null as machine_id, ' \ 'parentRatingKey as parent_rating_key, ' \ 'grandparentRatingKey as grandparent_rating_key, ' \ 'null AS media_type, ' \ 'null AS view_offset, ' \ 'xml, ' \ 'rating as content_rating,' \ 'summary,' \ 'title AS full_title,' \ '(case when orig_title_ep = "" then orig_title else ' \ 'orig_title_ep end) as title,' \ '(case when orig_title_ep != "" then orig_title else ' \ 'null end) as grandparent_title ' \ 'FROM ' + table_name + ' ORDER BY id' result = connection.execute(query) for row in result: # Extract the xml from the Plexwatch db xml field. extracted_xml = extract_plexwatch_xml(row['xml']) # If we get back None from our xml extractor skip over the record and log error. if not extracted_xml: logger.error( u"Plex:CS Importer :: Skipping record with ratingKey %s due to malformed xml." % str(row['rating_key'])) continue # Skip line if we don't have a ratingKey to work with if not row['rating_key']: logger.error( u"Plex:CS Importer :: Skipping record due to null ratingRey.") continue # If the user_id no longer exists in the friends list, pull it from the xml. if user_data.get_user_id(user=row['user']): user_id = user_data.get_user_id(user=row['user']) else: user_id = extracted_xml['user_id'] session_history = { 'started': row['started'], 'stopped': row['stopped'], 'rating_key': row['rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'user_id': user_id, 'user': row['user'], 'ip_address': row['ip_address'], 'paused_counter': row['paused_counter'], 'player': row['player'], 'platform': extracted_xml['platform'], 'machine_id': extracted_xml['machine_id'], 'parent_rating_key': row['parent_rating_key'], 'grandparent_rating_key': row['grandparent_rating_key'], 'media_type': extracted_xml['media_type'], 'view_offset': extracted_xml['view_offset'], 'video_decision': extracted_xml['video_decision'], 'audio_decision': extracted_xml['audio_decision'], 'duration': extracted_xml['duration'], 'width': extracted_xml['width'], 'height': extracted_xml['height'], 'container': extracted_xml['container'], 'video_codec': extracted_xml['video_codec'], 'audio_codec': extracted_xml['audio_codec'], 'bitrate': extracted_xml['bitrate'], 'video_resolution': extracted_xml['video_resolution'], 'video_framerate': extracted_xml['video_framerate'], 'aspect_ratio': extracted_xml['aspect_ratio'], 'audio_channels': extracted_xml['audio_channels'], 'transcode_protocol': extracted_xml['transcode_protocol'], 'transcode_container': extracted_xml['transcode_container'], 'transcode_video_codec': extracted_xml['transcode_video_codec'], 'transcode_audio_codec': extracted_xml['transcode_audio_codec'], 'transcode_audio_channels': extracted_xml['transcode_audio_channels'], 'transcode_width': extracted_xml['transcode_width'], 'transcode_height': extracted_xml['transcode_height'] } session_history_metadata = { 'rating_key': helpers.latinToAscii(row['rating_key']), 'parent_rating_key': row['parent_rating_key'], 'grandparent_rating_key': row['grandparent_rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'index': extracted_xml['media_index'], 'parent_index': extracted_xml['parent_media_index'], 'thumb': extracted_xml['thumb'], 'parent_thumb': extracted_xml['parent_thumb'], 'grandparent_thumb': extracted_xml['grandparent_thumb'], 'art': extracted_xml['art'], 'media_type': extracted_xml['media_type'], 'year': extracted_xml['year'], 'originally_available_at': extracted_xml['originally_available_at'], 'added_at': extracted_xml['added_at'], 'updated_at': extracted_xml['updated_at'], 'last_viewed_at': extracted_xml['last_viewed_at'], 'content_rating': row['content_rating'], 'summary': row['summary'], 'tagline': extracted_xml['tagline'], 'rating': extracted_xml['rating'], 'duration': extracted_xml['duration'], 'guid': extracted_xml['guid'], 'directors': extracted_xml['directors'], 'writers': extracted_xml['writers'], 'actors': extracted_xml['actors'], 'genres': extracted_xml['genres'], 'studio': extracted_xml['studio'], 'full_title': row['full_title'] } # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values # Just make sure that the ratingKey is indeed an integer if session_history_metadata['rating_key'].isdigit(): ap.write_session_history( session=session_history, import_metadata=session_history_metadata, is_import=True, import_ignore_interval=import_ignore_interval) else: logger.debug(u"Plex:CS Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key']) logger.debug(u"Plex:CS Importer :: PlexWatch data import complete.") import_users() logger.debug(u"Plex:CS Importer :: Re-enabling monitoring.") plexcs.initialize_scheduler()
def generate_uuid(): logger.debug(u"Generating UUID...") return uuid.uuid4().hex
def dbcheck(): conn_db = sqlite3.connect(DB_FILE) c_db = conn_db.cursor() # sessions table :: This is a temp table that logs currently active sessions c_db.execute( 'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'session_key INTEGER, rating_key INTEGER, media_type TEXT, started INTEGER, ' 'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, ' 'ip_address TEXT, machine_id TEXT, player TEXT, platform TEXT, title TEXT, parent_title TEXT, ' 'grandparent_title TEXT, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, ' 'width INTEGER, height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, ' 'bitrate INTEGER, video_resolution TEXT, video_framerate TEXT, aspect_ratio TEXT, ' 'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, ' 'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,' 'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, ' 'buffer_last_triggered INTEGER, last_paused INTEGER)' ) # session_history table :: This is a history table which logs essential stream details c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, ' 'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' 'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, platform TEXT, machine_id TEXT, ' 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER DEFAULT 0)' ) # session_history_media_info table :: This is a table which logs each session's media info c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, ' 'rating_key INTEGER, video_decision TEXT, audio_decision TEXT, duration INTEGER DEFAULT 0, width INTEGER, ' 'height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, bitrate INTEGER, video_resolution TEXT, ' 'video_framerate TEXT, aspect_ratio TEXT, audio_channels INTEGER, transcode_protocol TEXT, ' 'transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, ' 'transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER)' ) # session_history_metadata table :: This is a table which logs each session's media metadata c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, ' 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'title TEXT, parent_title TEXT, grandparent_title TEXT, full_title TEXT, media_index INTEGER, ' 'parent_media_index INTEGER, thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, art TEXT, media_type TEXT, ' 'year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, last_viewed_at INTEGER, ' 'content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, duration INTEGER DEFAULT 0, guid TEXT, ' 'directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT)' '' ) # users table :: This table keeps record of the friends list c_db.execute( 'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL UNIQUE, ' 'friendly_name TEXT, thumb TEXT, email TEXT, custom_avatar_url TEXT, is_home_user INTEGER DEFAULT NULL, ' 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, ' 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)' ) # Upgrade sessions table from earlier versions try: c_db.execute('SELECT started from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN started INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN state TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN user TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN machine_id TEXT' ) # Upgrade sessions table from earlier versions try: c_db.execute('SELECT title from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN title TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN parent_title TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN grandparent_title TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN friendly_name TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN player TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN user_id INTEGER' ) # Upgrade sessions table from earlier versions try: c_db.execute('SELECT ip_address from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN ip_address TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN platform TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN duration INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN video_decision TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN audio_decision TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN width INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN height INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN container TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN video_codec TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN audio_codec TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN bitrate INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN video_resolution TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN video_framerate TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN audio_channels INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_container TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_width INTEGER' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_height INTEGER' ) # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT full_title from session_history_metadata') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table session_history_metadata.") c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT' ) # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT tagline from session_history_metadata') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table session_history_metadata.") c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT' ) # notify_log table :: This is a table which logs notifications sent c_db.execute( 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'session_key INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' 'agent_id INTEGER, agent_name TEXT, on_play INTEGER, on_stop INTEGER, on_watched INTEGER, ' 'on_pause INTEGER, on_resume INTEGER, on_buffer INTEGER, on_created INTEGER)' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT do_notify from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT keep_history from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1' ) # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT on_pause from notify_log') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table notify_log.") c_db.execute( 'ALTER TABLE notify_log ADD COLUMN on_pause INTEGER' ) c_db.execute( 'ALTER TABLE notify_log ADD COLUMN on_resume INTEGER' ) c_db.execute( 'ALTER TABLE notify_log ADD COLUMN on_buffer INTEGER' ) # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT on_created from notify_log') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table notify_log.") c_db.execute( 'ALTER TABLE notify_log ADD COLUMN on_created INTEGER' ) # Upgrade sessions table from earlier versions try: c_db.execute('SELECT buffer_count from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0' ) c_db.execute( 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT custom_avatar_url from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT' ) # Upgrade sessions table from earlier versions try: c_db.execute('SELECT last_paused from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN last_paused INTEGER' ) # Add "Local" user to database as default unauthenticated user. result = c_db.execute('SELECT id FROM users WHERE username = "******"') if not result.fetchone(): logger.debug(u'User "Local" does not exist. Adding user.') c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")') # Upgrade session_history table from earlier versions try: c_db.execute('SELECT reference_id from session_history') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table session_history.") c_db.execute( 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' ) # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id c_db.execute( 'UPDATE session_history ' \ 'SET reference_id = (SELECT (CASE \ WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ 'FROM session_history AS t1 ' \ 'WHERE t1.id = session_history.id) ' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT deleted_user from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0' ) conn_db.commit() c_db.close()
def check_active_sessions(ws_request=False): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = database.MonitorDatabase() monitor_process = activity_processor.ActivityProcessor() # logger.debug(u"Plex:CS Monitor :: Checking for active streams.") global int_ping_count if session_list: int_ping_count = 0 media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams db_streams = monitor_db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, ' 'grandparent_title, user_id, user, friendly_name, ip_address, player, ' 'platform, machine_id, parent_rating_key, grandparent_rating_key, state, ' 'view_offset, duration, video_decision, audio_decision, width, height, ' 'container, video_codec, audio_codec, bitrate, video_resolution, ' 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, ' 'transcode_container, transcode_video_codec, transcode_audio_codec, ' 'transcode_audio_channels, transcode_width, transcode_height, ' 'paused_counter, last_paused ' 'FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='pause')).start() if session['state'] == 'playing' and stream['state'] == 'paused': # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='resume')).start() if stream['state'] == 'paused' and not ws_request: # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int(stream['paused_counter']) + plexcs.CONFIG.MONITORING_INTERVAL monitor_db.action('UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [paused_counter, stream['session_key'], stream['rating_key']]) if session['state'] == 'buffering' and plexcs.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) if buffer_values[0]['buffer_count'] >= plexcs.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0]['buffer_count'] == plexcs.CONFIG.BUFFER_THRESHOLD: logger.info(u"Plex:CS Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action('UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='buffer')).start() else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexcs.CONFIG.BUFFER_WAIT: logger.info(u"Plex:CS Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action('UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='buffer')).start() logger.debug(u"Plex:CS Monitor :: Stream buffering. Count is now %s. Last triggered %s." % (buffer_values[0]['buffer_count'], buffer_values[0]['buffer_last_triggered'])) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['view_offset'] and session['duration'] and session['state'] != 'buffering': if helpers.get_percent(session['view_offset'], session['duration']) > plexcs.CONFIG.NOTIFY_WATCHED_PERCENT: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched')).start() else: # The user has stopped playing a stream logger.debug(u"Plex:CS Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action('DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) # Check if the user has reached the offset in the media we defined as the "watched" percent if stream['view_offset'] and stream['duration']: if helpers.get_percent(stream['view_offset'], stream['duration']) > plexcs.CONFIG.NOTIFY_WATCHED_PERCENT: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched')).start() # Push any notifications - Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='stop')).start() # Write the item history on playback stop monitor_process.write_session_history(session=stream) # Process the newly received session data for session in media_container: monitor_process.write_session(session) else: logger.debug(u"Plex:CS Monitor :: Unable to read session list.") int_ping_count += 1 logger.warn(u"Plex:CS Monitor :: Unable to get an internal response from the server, ping attempt %s." \ % str(int_ping_count)) if int_ping_count == 3: # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='intdown')).start()
def make_request(self, uri=None, proto='HTTP', request_type='GET', headers=None, output_format='raw', return_type=False, no_token=False): valid_request_types = ['GET', 'POST', 'PUT', 'DELETE'] if request_type.upper() not in valid_request_types: logger.debug(u"HTTP request made but unsupported request type given.") return None if uri: if proto.upper() == 'HTTPS': if not self.ssl_verify and hasattr(ssl, '_create_unverified_context'): context = ssl._create_unverified_context() handler = HTTPSConnection(host=self.host, port=self.port, timeout=10, context=context) logger.warn(u"Plex:CS HTTP Handler :: Unverified HTTPS request made. This connection is not secure.") else: handler = HTTPSConnection(host=self.host, port=self.port, timeout=10) else: handler = HTTPConnection(host=self.host, port=self.port, timeout=10) token_string = '' if not no_token: if uri.find('?') > 0: token_string = '&X-Plex-Token=' + self.token else: token_string = '?X-Plex-Token=' + self.token try: if headers: handler.request(request_type, uri + token_string, headers=headers) else: handler.request(request_type, uri + token_string) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader('content-type') except IOError as e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None except Exception as e: logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (uri, e)) return None except: logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % uri) return None if request_status == 200: try: if output_format == 'dict': output = helpers.convert_xml_to_dict(request_content) elif output_format == 'json': output = helpers.convert_xml_to_json(request_content) elif output_format == 'xml': output = helpers.parse_xml(request_content) else: output = request_content if return_type: return output, content_type return output except Exception as e: logger.warn(u"Failed format response from uri %s to %s error %s" % (uri, output_format, e)) return None else: logger.warn(u"Failed to access uri endpoint %s. Status code %r" % (uri, request_status)) return None else: logger.debug(u"HTTP request made but no enpoint given.") return None
def find_session_ip(self, rating_key=None, machine_id=None): logger.debug(u"Plex:CS ActivityProcessor :: Requesting log lines...") log_lines = log_reader.get_log_tail(window=5000, parsed=False) rating_key_line = 'ratingKey=' + rating_key rating_key_line_2 = 'metadata%2F' + rating_key machine_id_line = 'session=' + machine_id for line in reversed(log_lines): # We're good if we find a line with both machine id and rating key # This is usually when there is a transcode session if machine_id_line in line and (rating_key_line in line or rating_key_line_2 in line): # Currently only checking for ipv4 addresses ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line) if ipv4: # The logged IP will always be the first match and we don't want localhost entries if ipv4[0] != '127.0.0.1': # check if IPv4 mapped IPv6 address (::ffff:xxx.xxx.xxx.xxx) #if '::ffff:' + ipv4[0] in line: # logger.debug(u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s " # u"and machineIdentifier %s." # % ('::ffff:' + ipv4[0], rating_key, machine_id)) # return '::ffff:' + ipv4[0] #else: logger.debug(u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s " u"and machineIdentifier %s." % (ipv4[0], rating_key, machine_id)) return ipv4[0] logger.debug(u"Plex:CS ActivityProcessor :: Unable to find IP address on first pass. " u"Attempting fallback check in 5 seconds...") # Wait for the log to catch up and read in new lines time.sleep(5) logger.debug(u"Plex:CS ActivityProcessor :: Requesting log lines...") log_lines = log_reader.get_log_tail(window=5000, parsed=False) for line in reversed(log_lines): if 'GET /:/timeline' in line and (rating_key_line in line or rating_key_line_2 in line): # Currently only checking for ipv4 addresses # This method can return the wrong IP address if more than one user # starts watching the same media item around the same time. ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line) if ipv4: # The logged IP will always be the first match and we don't want localhost entries if ipv4[0] != '127.0.0.1': #if '::ffff:' + ipv4[0] in line: # logger.debug(u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s." % # ('::ffff:' + ipv4[0], rating_key)) # return '::ffff:' + ipv4[0] #else: logger.debug(u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s." % (ipv4[0], rating_key)) return ipv4[0] logger.debug(u"Plex:CS ActivityProcessor :: Unable to find IP address on fallback search. Not logging IP address.") return None
def notify(stream_data=None, notify_action=None): from plexcs import users if stream_data and notify_action: # Check if notifications enabled for user user_data = users.Users() user_details = user_data.get_user_friendly_name( user=stream_data['user']) if not user_details['do_notify']: return if (stream_data['media_type'] == 'movie' and plexcs.CONFIG.MOVIE_NOTIFY_ENABLE) \ or (stream_data['media_type'] == 'episode' and plexcs.CONFIG.TV_NOTIFY_ENABLE): progress_percent = helpers.get_percent(stream_data['view_offset'], stream_data['duration']) for agent in notifiers.available_notification_agents(): if agent['on_play'] and notify_action == 'play': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_stop'] and notify_action == 'stop' \ and (plexcs.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < plexcs.CONFIG.NOTIFY_WATCHED_PERCENT): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_pause'] and notify_action == 'pause' \ and (plexcs.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_resume'] and notify_action == 'resume' \ and (plexcs.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_buffer'] and notify_action == 'buffer': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_watched'] and notify_action == 'watched': # Get the current states for notifications from our db notify_states = get_notify_state(session=stream_data) # If there is nothing in the notify_log for our agent id but it is enabled we should notify if not any(d['agent_id'] == agent['id'] for d in notify_states): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) else: # Check in our notify log if the notification has already been sent for notify_state in notify_states: if not notify_state['on_watched'] and ( notify_state['agent_id'] == agent['id']): # Build and send notification notify_strings = build_notify_text( session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif (stream_data['media_type'] == 'track' and plexcs.CONFIG.MUSIC_NOTIFY_ENABLE): for agent in notifiers.available_notification_agents(): if agent['on_play'] and notify_action == 'play': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_stop'] and notify_action == 'stop': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_pause'] and notify_action == 'pause': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_resume'] and notify_action == 'resume': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_buffer'] and notify_action == 'buffer': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif stream_data['media_type'] == 'clip': pass else: #logger.debug(u"Plex:CS Notifier :: Notify called with unsupported media type.") pass else: logger.debug( u"Plex:CS Notifier :: Notify called but incomplete data received.")
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): from plexcs import users user_data = users.Users() user_details = user_data.get_user_friendly_name(user=session['user']) if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) else: stopped = int(time.time()) if plexcs.CONFIG.MOVIE_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'movie': logging_enabled = True elif plexcs.CONFIG.TV_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'episode': logging_enabled = True elif plexcs.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug(u"Plex:CS ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int(session['paused_counter']) else: real_play_time = stopped - session['started'] if plexcs.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexcs.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug(u"Plex:CS ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexcs.CONFIG.LOGGING_IGNORE_INTERVAL)) if session['media_type'] == 'track' and not is_import: if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug(u"Plex:CS ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug(u"Plex:CS ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not user_details['keep_history'] and not is_import: logging_enabled = False logger.debug(u"Plex:CS ActivityProcessor :: History logging for user '%s' is disabled." % session['user']) if logging_enabled: # logger.debug(u"Plex:CS ActivityProcessor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset']] # logger.debug(u"Plex:CS ActivityProcessor :: Writing session_history transaction...") self.db.action(query=query, args=args) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = {'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id']} if len(result) == 1: prev_session = None else: prev_session = {'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id']} query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key']): args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"Plex:CS ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"Plex:CS ActivityProcessor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height']] # logger.debug(u"Plex:CS ActivityProcessor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) if not is_import: logger.debug(u"Plex:CS ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details(rating_key=str(session['rating_key'])) metadata = result['metadata'] else: metadata = import_metadata # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) # Build media item title if session['media_type'] == 'episode' or session['media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"Plex:CS ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \ 'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \ 'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio']] # logger.debug(u"Plex:CS ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args)
def notify(stream_data=None, notify_action=None): from plexcs import users if stream_data and notify_action: # Check if notifications enabled for user user_data = users.Users() user_details = user_data.get_user_friendly_name(user=stream_data["user"]) if not user_details["do_notify"]: return if (stream_data["media_type"] == "movie" and plexcs.CONFIG.MOVIE_NOTIFY_ENABLE) or ( stream_data["media_type"] == "episode" and plexcs.CONFIG.TV_NOTIFY_ENABLE ): progress_percent = helpers.get_percent(stream_data["view_offset"], stream_data["duration"]) for agent in notifiers.available_notification_agents(): if agent["on_play"] and notify_action == "play": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif ( agent["on_stop"] and notify_action == "stop" and (plexcs.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < plexcs.CONFIG.NOTIFY_WATCHED_PERCENT) ): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif ( agent["on_pause"] and notify_action == "pause" and (plexcs.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99) ): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif ( agent["on_resume"] and notify_action == "resume" and (plexcs.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99) ): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent["on_buffer"] and notify_action == "buffer": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent["on_watched"] and notify_action == "watched": # Get the current states for notifications from our db notify_states = get_notify_state(session=stream_data) # If there is nothing in the notify_log for our agent id but it is enabled we should notify if not any(d["agent_id"] == agent["id"] for d in notify_states): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) else: # Check in our notify log if the notification has already been sent for notify_state in notify_states: if not notify_state["on_watched"] and (notify_state["agent_id"] == agent["id"]): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif stream_data["media_type"] == "track" and plexcs.CONFIG.MUSIC_NOTIFY_ENABLE: for agent in notifiers.available_notification_agents(): if agent["on_play"] and notify_action == "play": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent["on_stop"] and notify_action == "stop": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent["on_pause"] and notify_action == "pause": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent["on_resume"] and notify_action == "resume": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent["on_buffer"] and notify_action == "buffer": # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1] ) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif stream_data["media_type"] == "clip": pass else: # logger.debug(u"Plex:CS Notifier :: Notify called with unsupported media type.") pass else: logger.debug(u"Plex:CS Notifier :: Notify called but incomplete data received.")
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): from plexcs import users user_data = users.Users() user_details = user_data.get_user_friendly_name(user=session['user']) if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) else: stopped = int(time.time()) if plexcs.CONFIG.MOVIE_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'movie': logging_enabled = True elif plexcs.CONFIG.TV_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'episode': logging_enabled = True elif plexcs.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"Plex:CS ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if plexcs.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexcs.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"Plex:CS ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexcs.CONFIG.LOGGING_IGNORE_INTERVAL)) if session['media_type'] == 'track' and not is_import: if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"Plex:CS ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"Plex:CS ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not user_details['keep_history'] and not is_import: logging_enabled = False logger.debug( u"Plex:CS ActivityProcessor :: History logging for user '%s' is disabled." % session['user']) if logging_enabled: # logger.debug(u"Plex:CS ActivityProcessor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"Plex:CS ActivityProcessor :: Writing session_history transaction...") self.db.action(query=query, args=args) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id'] } if len(result) == 1: prev_session = None else: prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id'] } query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key']): args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"Plex:CS ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"Plex:CS ActivityProcessor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'] ] # logger.debug(u"Plex:CS ActivityProcessor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) if not is_import: logger.debug( u"Plex:CS ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) metadata = result['metadata'] else: metadata = import_metadata # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"Plex:CS ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \ 'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \ 'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'] ] # logger.debug(u"Plex:CS ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args)
def find_session_ip(self, rating_key=None, machine_id=None): logger.debug(u"Plex:CS ActivityProcessor :: Requesting log lines...") log_lines = log_reader.get_log_tail(window=5000, parsed=False) rating_key_line = 'ratingKey=' + rating_key rating_key_line_2 = 'metadata%2F' + rating_key machine_id_line = 'session=' + machine_id for line in reversed(log_lines): # We're good if we find a line with both machine id and rating key # This is usually when there is a transcode session if machine_id_line in line and (rating_key_line in line or rating_key_line_2 in line): # Currently only checking for ipv4 addresses ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line) if ipv4: # The logged IP will always be the first match and we don't want localhost entries if ipv4[0] != '127.0.0.1': # check if IPv4 mapped IPv6 address (::ffff:xxx.xxx.xxx.xxx) #if '::ffff:' + ipv4[0] in line: # logger.debug(u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s " # u"and machineIdentifier %s." # % ('::ffff:' + ipv4[0], rating_key, machine_id)) # return '::ffff:' + ipv4[0] #else: logger.debug( u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s " u"and machineIdentifier %s." % (ipv4[0], rating_key, machine_id)) return ipv4[0] logger.debug( u"Plex:CS ActivityProcessor :: Unable to find IP address on first pass. " u"Attempting fallback check in 5 seconds...") # Wait for the log to catch up and read in new lines time.sleep(5) logger.debug(u"Plex:CS ActivityProcessor :: Requesting log lines...") log_lines = log_reader.get_log_tail(window=5000, parsed=False) for line in reversed(log_lines): if 'GET /:/timeline' in line and (rating_key_line in line or rating_key_line_2 in line): # Currently only checking for ipv4 addresses # This method can return the wrong IP address if more than one user # starts watching the same media item around the same time. ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line) if ipv4: # The logged IP will always be the first match and we don't want localhost entries if ipv4[0] != '127.0.0.1': #if '::ffff:' + ipv4[0] in line: # logger.debug(u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s." % # ('::ffff:' + ipv4[0], rating_key)) # return '::ffff:' + ipv4[0] #else: logger.debug( u"Plex:CS ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s." % (ipv4[0], rating_key)) return ipv4[0] logger.debug( u"Plex:CS ActivityProcessor :: Unable to find IP address on fallback search. Not logging IP address." ) return None
def _getLogs(self, sort='', search='', order='desc', regex='', **kwargs): """ Returns the log Returns [{"response": {"msg": "Hey", "result": "success"}, "data": [{"time": "29-sept.2015", "thread: "MainThread", "msg: "Called x from y", "loglevel": "DEBUG" } ] } ] """ logfile = os.path.join(plexcs.CONFIG.LOG_DIR, 'plexcs.log') templog = [] start = int(kwargs.get('start', 0)) end = int(kwargs.get('end', 0)) if regex: logger.debug('Filtering log using regex %s' % regex) reg = re.compile('u' + regex, flags=re.I) for line in open(logfile, 'r').readlines(): temp_loglevel_and_time = None try: temp_loglevel_and_time = line.split('- ') loglvl = temp_loglevel_and_time[1].split(' :')[0].strip() tl_tread = line.split(' :: ') if loglvl is None: msg = line.replace('\n', '') else: msg = line.split(' : ')[1].replace('\n', '') thread = tl_tread[1].split(' : ')[0] except IndexError: # We assume this is a traceback tl = (len(templog) - 1) templog[tl]['msg'] += line.replace('\n', '') continue if len( line ) > 1 and temp_loglevel_and_time is not None and loglvl in line: d = { 'time': temp_loglevel_and_time[0], 'loglevel': loglvl, 'msg': msg.replace('\n', ''), 'thread': thread } templog.append(d) if end > 0: logger.debug('Slicing the log from %s to %s' % (start, end)) templog = templog[start:end] if sort: logger.debug('Sorting log based on %s' % sort) templog = sorted(templog, key=lambda k: k[sort]) if search: logger.debug('Searching log values for %s' % search) tt = [ d for d in templog for k, v in d.items() if search.lower() in v.lower() ] if len(tt): templog = tt if regex: tt = [] for l in templog: stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items()) if reg.search(stringdict): tt.append(l) if len(tt): templog = tt if order == 'desc': templog = templog[::-1] self.data = templog return templog
def _getLogs(self, sort='', search='', order='desc', regex='', **kwargs): """ Returns the log Returns [{"response": {"msg": "Hey", "result": "success"}, "data": [{"time": "29-sept.2015", "thread: "MainThread", "msg: "Called x from y", "loglevel": "DEBUG" } ] } ] """ logfile = os.path.join(plexcs.CONFIG.LOG_DIR, 'plexcs.log') templog = [] start = int(kwargs.get('start', 0)) end = int(kwargs.get('end', 0)) if regex: logger.debug('Filtering log using regex %s' % regex) reg = re.compile('u' + regex, flags=re.I) for line in open(logfile, 'r').readlines(): temp_loglevel_and_time = None try: temp_loglevel_and_time = line.split('- ') loglvl = temp_loglevel_and_time[1].split(' :')[0].strip() tl_tread = line.split(' :: ') if loglvl is None: msg = line.replace('\n', '') else: msg = line.split(' : ')[1].replace('\n', '') thread = tl_tread[1].split(' : ')[0] except IndexError: # We assume this is a traceback tl = (len(templog) - 1) templog[tl]['msg'] += line.replace('\n', '') continue if len(line) > 1 and temp_loglevel_and_time is not None and loglvl in line: d = { 'time': temp_loglevel_and_time[0], 'loglevel': loglvl, 'msg': msg.replace('\n', ''), 'thread': thread } templog.append(d) if end > 0: logger.debug('Slicing the log from %s to %s' % (start, end)) templog = templog[start:end] if sort: logger.debug('Sorting log based on %s' % sort) templog = sorted(templog, key=lambda k: k[sort]) if search: logger.debug('Searching log values for %s' % search) tt = [d for d in templog for k, v in d.items() if search.lower() in v.lower()] if len(tt): templog = tt if regex: tt = [] for l in templog: stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items()) if reg.search(stringdict): tt.append(l) if len(tt): templog = tt if order == 'desc': templog = templog[::-1] self.data = templog return templog