def uploadToImgur(imgPath, imgTitle=''): from plexpy import logger client_id = '743b1a443ccd2b0' img_url = '' try: with open(imgPath, 'rb') as imgFile: img = imgFile.read() except IOError as e: logger.error(u"PlexPy Helpers :: Unable to read image file for Imgur: %s" % e) return img_url headers = {'Authorization': 'Client-ID %s' % client_id} data = {'type': 'base64', 'image': base64.b64encode(img)} if imgTitle: data['title'] = imgTitle data['name'] = imgTitle + '.jpg' request = urllib2.Request('https://api.imgur.com/3/image', headers=headers, data=urllib.urlencode(data)) response = urllib2.urlopen(request) response = json.loads(response.read()) if response.get('status') == 200: logger.debug(u"PlexPy Helpers :: Image uploaded to Imgur.") img_url = response.get('data').get('link', '') elif response.get('status') >= 400 and response.get('status') < 500: logger.warn(u"PlexPy Helpers :: Unable to upload image to Imgur: %s" % response.reason) else: logger.warn(u"PlexPy Helpers :: Unable to upload image to Imgur.") return img_url
def notify(self, subject=None, message=None): title = "PlexPy" api = plexpy.CONFIG.NMA_APIKEY nma_priority = plexpy.CONFIG.NMA_PRIORITY logger.debug(u"NMA title: " + title) logger.debug(u"NMA API: " + api) logger.debug(u"NMA Priority: " + str(nma_priority)) event = subject logger.debug(u"NMA event: " + event) logger.debug(u"NMA message: " + message) batch = False p = pynma.PyNMA() keys = api.split(",") p.addkey(keys) if len(keys) > 1: batch = True response = p.push(title, event, message, priority=nma_priority, batch_mode=batch) if not response[api][u"code"] == u"200": logger.error(u"Could not send notification to NotifyMyAndroid") return False else: return True
def notify_timeline(timeline_data=None, notify_action=None): if timeline_data and notify_action: if (timeline_data['media_type'] == 'movie' and plexpy.CONFIG.MOVIE_NOTIFY_ENABLE) \ or ((timeline_data['media_type'] == 'show' or timeline_data['media_type'] == 'episode') \ and plexpy.CONFIG.TV_NOTIFY_ENABLE) \ or ((timeline_data['media_type'] == 'artist' or timeline_data['media_type'] == 'track') \ and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE): for agent in notifiers.available_notification_agents(): if agent['on_created'] and notify_action == 'created': # Build and send notification notify_strings = build_notify_text(timeline=timeline_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=timeline_data, state=notify_action, agent_info=agent) elif not timeline_data and notify_action: for agent in notifiers.available_notification_agents(): if agent['on_extdown'] and notify_action == 'extdown': # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) if agent['on_intdown'] and notify_action == 'intdown': # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) else: logger.debug(u"PlexPy Notifier :: Notify timeline called but incomplete data received.")
def notify_timeline(timeline_data=None, notify_action=None): if timeline_data and notify_action: for agent in notifiers.available_notification_agents(): if agent["on_created"] and notify_action == "created": # Build and send notification notify_strings = build_notify_text(timeline=timeline_data, state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1], notify_action=notify_action, script_args=notify_strings[2], ) # Set the notification state in the db set_notify_state(session=timeline_data, state=notify_action, agent_info=agent) elif not timeline_data and notify_action: for agent in notifiers.available_notification_agents(): if agent["on_extdown"] and notify_action == "extdown": # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1], notify_action=notify_action, script_args=notify_strings[2], ) if agent["on_intdown"] and notify_action == "intdown": # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1], notify_action=notify_action, script_args=notify_strings[2], ) if agent["on_extup"] and notify_action == "extup": # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1], notify_action=notify_action, script_args=notify_strings[2], ) if agent["on_intup"] and notify_action == "intup": # Build and send notification notify_strings = build_server_notify_text(state=notify_action) notifiers.send_notification( config_id=agent["id"], subject=notify_strings[0], body=notify_strings[1], notify_action=notify_action, script_args=notify_strings[2], ) else: logger.debug(u"PlexPy NotificationHandler :: Notify timeline called but incomplete data received.")
def check_recently_added(): with monitor_lock: # add delay to allow for metadata processing delay = plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY time_threshold = int(time.time()) - delay time_interval = plexpy.CONFIG.MONITORING_INTERVAL pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details(count='10') if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: metadata = [] if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'movie': metadata_list = pms_connect.get_metadata_details(item['rating_key']) if metadata_list: metadata = [metadata_list['metadata']] else: logger.error(u"PlexPy Monitor :: Unable to retrieve metadata for rating_key %s" \ % str(item['rating_key'])) else: metadata_list = pms_connect.get_metadata_children_details(item['rating_key']) if metadata_list: metadata = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve children metadata for rating_key %s" \ % str(item['rating_key'])) if metadata: if not plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT: for item in metadata: if 0 < time_threshold - int(item['added_at']) <= time_interval: logger.debug(u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start() else: item = max(metadata, key=lambda x:x['added_at']) if 0 < time_threshold - int(item['added_at']) <= time_interval: if item['media_type'] == 'episode' or item['media_type'] == 'track': metadata_list = pms_connect.get_metadata_details(item['grandparent_rating_key']) if metadata_list: item = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \ % str(item['rating_key'])) logger.debug(u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start()
def toggleVerbose(self): plexpy.VERBOSE = not plexpy.VERBOSE logger.initLogger(console=not plexpy.QUIET, log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE) logger.info("Verbose toggled, set to %s", plexpy.VERBOSE) logger.debug("If you read this message, debug logging is available") raise cherrypy.HTTPRedirect("logs")
def on_created(self): if self.is_item(): logger.debug(u"PlexPy TimelineHandler :: Library item %s has been added to Plex." % str(self.get_rating_key())) # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(timeline_data=self.get_metadata(), notify_action='created')).start()
def on_stop(self, force_stop=False): if self.is_valid_session(): logger.debug(u"PlexPy ActivityHandler :: Session %s has stopped." % str(self.get_session_key())) # Set the session last_paused timestamp ap = activity_processor.ActivityProcessor() ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) # Update the session state and viewOffset # Set force_stop to true to disable the state set if not force_stop: ap.set_session_state( session_key=self.get_session_key(), state=self.timeline["state"], view_offset=self.timeline["viewOffset"], ) # Retrieve the session data from our temp table db_session = ap.get_session_by_key(session_key=self.get_session_key()) # Fire off notifications threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=db_session, notify_action="stop") ).start() # Write it to the history table monitor_proc = activity_processor.ActivityProcessor() monitor_proc.write_session_history(session=db_session) # Remove the session from our temp session table ap.delete_session(session_key=self.get_session_key())
def clear_history_tables(): logger.debug(u"PlexPy Database :: Deleting all session_history records... No turning back now bub.") monitor_db = MonitorDatabase() monitor_db.action('DELETE FROM session_history') monitor_db.action('DELETE FROM session_history_media_info') monitor_db.action('DELETE FROM session_history_metadata') monitor_db.action('VACUUM;')
def get_season_children(self, rating_key=''): episode_data = self.get_episode_list(rating_key, output_format='xml') episode_list = [] xml_head = episode_data.getElementsByTagName('MediaContainer') if not xml_head: logger.warn("Error parsing XML for Plex session data.") return None for a in xml_head: if a.getAttribute('size'): if a.getAttribute('size') == '0': logger.debug(u"No episode data.") episode_list = {'episode_count': '0', 'episode_list': [] } return episode_list if a.getElementsByTagName('Video'): result_data = a.getElementsByTagName('Video') for result in result_data: episode_output = {'rating_key': helpers.get_xml_attr(result, 'ratingKey'), 'index': helpers.get_xml_attr(result, 'index'), 'title': helpers.get_xml_attr(result, 'title'), 'thumb': helpers.get_xml_attr(result, 'thumb') } episode_list.append(episode_output) output = {'episode_count': helpers.get_xml_attr(xml_head[0], 'size'), 'title': helpers.get_xml_attr(xml_head[0], 'title2'), 'episode_list': episode_list } return output
def process(opcode, data): from plexpy import activity_handler if opcode not in opcode_data: return False try: info = json.loads(data) except Exception as ex: logger.warn(u'PlexPy WebSocket :: Error decoding message from websocket: %s' % ex) logger.debug(data) return False type = info.get('type') if not type: return False if type == 'playing': # logger.debug('%s.playing %s' % (name, info)) try: time_line = info.get('_children') except: logger.debug(u"PlexPy WebSocket :: Session found but unable to get timeline data.") return False activity = activity_handler.ActivityHandler(timeline=time_line[0]) activity.process() return True
def notify(self, artist=None, album=None, snatched=None): title = 'PlexPy' api = plexpy.CONFIG.NMA_APIKEY nma_priority = plexpy.CONFIG.NMA_PRIORITY logger.debug(u"NMA title: " + title) logger.debug(u"NMA API: " + api) logger.debug(u"NMA Priority: " + str(nma_priority)) if snatched: event = snatched + " snatched!" message = "PlexPy has snatched: " + snatched else: event = artist + ' - ' + album + ' complete!' message = "PlexPy has downloaded and postprocessed: " + artist + ' [' + album + ']' logger.debug(u"NMA event: " + event) logger.debug(u"NMA message: " + message) batch = False p = pynma.PyNMA() keys = api.split(',') p.addkey(keys) if len(keys) > 1: batch = True response = p.push(title, event, message, priority=nma_priority, batch_mode=batch) if not response[api][u'code'] == u'200': logger.error(u'Could not send notification to NotifyMyAndroid') return False else: return True
def on_stop(self, force_stop=False): if self.is_valid_session(): logger.debug(u"PlexPy ActivityHandler :: Session %s has stopped." % str(self.get_session_key())) # Set the session last_paused timestamp ap = activity_processor.ActivityProcessor() ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) # Update the session state and viewOffset # Set force_stop to true to disable the state set if not force_stop: ap.set_session_state(session_key=self.get_session_key(), state=self.timeline['state'], view_offset=self.timeline['viewOffset'], stopped=int(time.time())) # Retrieve the session data from our temp table db_session = ap.get_session_by_key(session_key=self.get_session_key()) # Check if any notification agents have notifications enabled if any(d['on_stop'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=db_session, notify_action='stop')).start() # Write it to the history table monitor_proc = activity_processor.ActivityProcessor() monitor_proc.write_session_history(session=db_session) # Remove the session from our temp session table logger.debug(u"PlexPy ActivityHandler :: Removing session %s from session queue" % str(self.get_session_key())) ap.delete_session(session_key=self.get_session_key())
def create_https_certificates(ssl_cert, ssl_key): """ Create a self-signed HTTPS certificate and store in it in 'ssl_cert' and 'ssl_key'. Method assumes pyOpenSSL is installed. This code is stolen from SickBeard (http://github.com/midgetspy/Sick-Beard). """ from plexpy import logger from OpenSSL import crypto from certgen import createKeyPair, createSelfSignedCertificate, TYPE_RSA serial = int(time.time()) domains = ['DNS:' + d.strip() for d in plexpy.CONFIG.HTTPS_DOMAIN.split(',') if d] ips = ['IP:' + d.strip() for d in plexpy.CONFIG.HTTPS_IP.split(',') if d] altNames = ','.join(domains + ips) # Create the self-signed PlexPy certificate logger.debug(u"Generating self-signed SSL certificate.") pkey = createKeyPair(TYPE_RSA, 2048) cert = createSelfSignedCertificate(("PlexPy", pkey), serial, (0, 60 * 60 * 24 * 365 * 10), altNames) # ten years # Save the key and certificate to disk try: with open(ssl_cert, "w") as fp: fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) with open(ssl_key, "w") as fp: fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) except IOError as e: logger.error("Error creating SSL key and certificate: %s", e) return False return True
def on_start(self): if self.is_valid_session() and self.get_live_session(): logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key())) # Fire off notifications threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start() # Write the new session to our temp session table self.update_db_session()
def check_db_tables(): try: myDB = db.DBConnection() query = 'CREATE TABLE IF NOT EXISTS plexpy_users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' \ 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL UNIQUE, ' \ 'friendly_name TEXT, thumb TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, ' \ 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL)' result = myDB.action(query) except: logger.debug(u"Unable to create users table.")
def on_start(self): if self.is_valid_session() and self.get_live_session(): logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key())) # Check if any notification agents have notifications enabled if any(d['on_play'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start() # Write the new session to our temp session table self.update_db_session()
def set_user_profile_url(self, user=None, user_id=None, profile_url=None): if user_id: if profile_url.strip() == '': profile_url = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = {"custom_avatar_url": profile_url} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e)
def set_user_friendly_name(self, user=None, user_id=None, friendly_name=None): if user_id: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = {"friendly_name": friendly_name} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e)
def make_request( self, uri=None, proto="HTTP", request_type="GET", headers=None, output_format="raw", return_type=False ): valid_request_types = ["GET", "POST", "PUT", "DELETE"] if request_type.upper() not in valid_request_types: logger.debug(u"HTTP request made but unsupported request type given.") return None if uri: if proto.upper() == "HTTPS": handler = HTTPSConnection(self.host, self.port, timeout=10) else: handler = HTTPConnection(self.host, self.port, timeout=10) if uri.find("?") > 0: token_string = "&X-Plex-Token=" + self.token else: token_string = "?X-Plex-Token=" + self.token try: if headers: handler.request(request_type, uri + token_string, headers=headers) else: handler.request(request_type, uri + token_string) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader("content-type") except IOError, e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None if request_status == 200: if output_format == "dict": output = helpers.convert_xml_to_dict(request_content) elif output_format == "json": output = helpers.convert_xml_to_json(request_content) elif output_format == "xml": output = helpers.parse_xml(request_content) else: output = request_content if return_type: return output, content_type return output else: logger.warn(u"Failed to access uri endpoint %s. Status code %r" % (uri, request_status)) return None
def delete_datatable_media_info_cache(self, section_id=None): import os try: if section_id.isdigit(): [os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, f)) for f in os.listdir(plexpy.CONFIG.CACHE_DIR) if f.startswith('media_info-%s' % section_id) and f.endswith('.json')] logger.debug(u"PlexPy Libraries :: Deleted media info table cache for section_id %s." % section_id) return 'Deleted media info table cache for library with id %s.' % section_id else: return 'Unable to delete media info table cache, section_id not valid.' except Exception as e: logger.warn(u"PlexPy Libraries :: Unable to delete media info table cache: %s." % e)
def get_ip(host): from plexpy import logger ip_address = '' try: socket.inet_aton(host) ip_address = host except socket.error: try: ip_address = socket.gethostbyname(host) logger.debug(u"IP Checker :: Resolved %s to %s." % (host, ip_address)) except: logger.error(u"IP Checker :: Bad IP or hostname provided.") return ip_address
def get_plex_auth(self): http_handler = HTTPSConnection("plex.tv") base64string = base64.encodestring('%s:%s' % (self.username, self.password)).replace('\n', '') http_handler.request("POST", '/users/sign_in.xml', headers={'Content-Type': 'application/xml; charset=utf-8', 'Content-Length': '0', 'X-Plex-Device-Name': 'PlexPy', 'X-Plex-Product': 'PlexPy', 'X-Plex-Version': 'v0.1 dev', 'X-Plex-Client-Identifier': 'f0864d3531d75b19fa9204eaea456515e2502017', 'Authorization': 'Basic %s' % base64string + ":" }) response = http_handler.getresponse() request_status = response.status request_body = response.read() logger.debug(u"Plex.tv response status: %r" % request_status) logger.debug(u"Plex.tv response headers: %r" % response.getheaders()) logger.debug(u"Plex.tv content type: %r" % response.getheader('content-type')) logger.debug(u"Plex.tv response body: %r" % request_body) if request_status == 201: logger.info(u"Plex.tv connection successful.") return request_body elif request_status >= 400 and request_status < 500: logger.info(u"Plex.tv request failed: %s" % response.reason) return False else: logger.info(u"Plex.tv notification failed serverside.") return False
def checkGithub(): plexpy.COMMITS_BEHIND = 0 # Get the latest version available from github logger.info("Retrieving latest version information from GitHub") url = "https://api.github.com/repos/%s/plexpy/commits/%s" % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_BRANCH) version = request.request_json(url, timeout=20, validator=lambda x: type(x) == dict) if version is None: logger.warn("Could not get the latest version from GitHub. Are you running a local development version?") return plexpy.CURRENT_VERSION plexpy.LATEST_VERSION = version["sha"] logger.debug("Latest version is %s", plexpy.LATEST_VERSION) # See how many commits behind we are if not plexpy.CURRENT_VERSION: logger.info("You are running an unknown version of PlexPy. Run the updater to identify your version") return plexpy.LATEST_VERSION if plexpy.LATEST_VERSION == plexpy.CURRENT_VERSION: logger.info("PlexPy is up to date") return plexpy.LATEST_VERSION logger.info("Comparing currently installed version with latest GitHub version") url = "https://api.github.com/repos/%s/plexpy/compare/%s...%s" % ( plexpy.CONFIG.GIT_USER, plexpy.LATEST_VERSION, plexpy.CURRENT_VERSION, ) commits = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == dict) if commits is None: logger.warn("Could not get commits behind from GitHub.") return plexpy.LATEST_VERSION try: plexpy.COMMITS_BEHIND = int(commits["behind_by"]) logger.debug("In total, %d commits behind", plexpy.COMMITS_BEHIND) except KeyError: logger.info("Cannot compare versions. Are you running a local development version?") plexpy.COMMITS_BEHIND = 0 if plexpy.COMMITS_BEHIND > 0: logger.info("New version is available. You are %s commits behind" % plexpy.COMMITS_BEHIND) elif plexpy.COMMITS_BEHIND == 0: logger.info("PlexPy is up to date") return plexpy.LATEST_VERSION
def make_request(self, uri=None, proto='HTTP', request_type='GET', headers=None, output_format='raw', return_type=False, no_token=False, timeout=20): valid_request_types = ['GET', 'POST', 'PUT', 'DELETE'] if request_type.upper() not in valid_request_types: logger.debug(u"HTTP request made but unsupported request type given.") return None if uri: if proto.upper() == 'HTTPS': if not self.ssl_verify and hasattr(ssl, '_create_unverified_context'): context = ssl._create_unverified_context() handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout, context=context) logger.warn(u"PlexPy HTTP Handler :: Unverified HTTPS request made. This connection is not secure.") else: handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout) else: handler = HTTPConnection(host=self.host, port=self.port, timeout=timeout) token_string = '' if not no_token: if uri.find('?') > 0: token_string = '&X-Plex-Token=' + self.token else: token_string = '?X-Plex-Token=' + self.token try: if headers: handler.request(request_type, uri + token_string, headers=headers) else: handler.request(request_type, uri + token_string) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader('content-type') except IOError, e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None except Exception, e: logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (uri, e)) return None
def user(self, user=None, user_id=None): user_data = users.Users() if user_id: try: user_details = user_data.get_user_details(user_id=user_id) except: logger.warn("Unable to retrieve friendly name for user_id %s " % user_id) elif user: try: user_details = user_data.get_user_details(user=user) except: logger.warn("Unable to retrieve friendly name for user %s " % user) else: logger.debug(u"User page requested but no parameters received.") raise cherrypy.HTTPRedirect("home") return serve_template(templatename="user.html", title="User", data=user_details)
def delete_duplicate_libraries(self): from plexpy import plextv monitor_db = database.MonitorDatabase() # Refresh the PMS_URL to make sure the server_id is updated plextv.get_real_pms_url() server_id = plexpy.CONFIG.PMS_IDENTIFIER try: logger.debug(u"PlexPy Libraries :: Deleting libraries where server_id does not match %s." % server_id) monitor_db.action('DELETE FROM library_sections WHERE server_id != ?', [server_id]) return 'Deleted duplicate libraries from the database.' except Exception as e: logger.warn(u"PlexPy Libraries :: Unable to delete duplicate libraries: %s." % e)
def send_notification(config_id, subject, body): if config_id: config_id = int(config_id) if config_id == 0: growl = GROWL() growl.notify(message=body, event=subject) elif config_id == 1: prowl = PROWL() prowl.notify(message=body, event=subject) elif config_id == 2: xbmc = XBMC() xbmc.notify(subject=subject, message=body) elif config_id == 3: plex = Plex() plex.notify(subject=subject, message=body) elif config_id == 4: nma = NMA() nma.notify(subject=subject, message=body) elif config_id == 5: pushalot = PUSHALOT() pushalot.notify(message=body, event=subject) elif config_id == 6: pushbullet = PUSHBULLET() pushbullet.notify(message=body, subject=subject) elif config_id == 7: pushover = PUSHOVER() pushover.notify(message=body, event=subject) elif config_id == 8: osx_notify = OSX_NOTIFY() osx_notify.notify(title=subject, text=body) elif config_id == 9: boxcar = BOXCAR() boxcar.notify(title=subject, message=body) elif config_id == 10: email = Email() email.notify(subject=subject, message=body) elif config_id == 11: tweet = TwitterNotifier() tweet.notify(subject=subject, message=body) else: logger.debug(u"PlexPy Notifier :: Unknown agent id received.") else: logger.debug(u"PlexPy Notifier :: Notification requested but no agent id received.")
def on_pause(self): if self.is_valid_session(): logger.debug(u"PlexPy ActivityHandler :: Session %s has been paused." % str(self.get_session_key())) # Set the session last_paused timestamp ap = activity_processor.ActivityProcessor() ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=int(time.time())) # Update the session state and viewOffset ap.set_session_state(session_key=self.get_session_key(), state=self.timeline['state'], view_offset=self.timeline['viewOffset']) # Retrieve the session data from our temp table db_session = ap.get_session_by_key(session_key=self.get_session_key()) # Fire off notifications threading.Thread(target=notification_handler.notify, kwargs=dict(stream_data=db_session, notify_action='pause')).start()
def make_request(self, uri=None, proto='HTTP', request_type='GET', headers=None, output_format='raw', return_type=False, no_token=False): valid_request_types = ['GET', 'POST', 'PUT', 'DELETE'] if request_type.upper() not in valid_request_types: logger.debug(u"HTTP request made but unsupported request type given.") return None if uri: if proto.upper() == 'HTTPS': handler = HTTPSConnection(self.host, self.port, timeout=10) else: handler = HTTPConnection(self.host, self.port, timeout=10) token_string = '' if not no_token: if uri.find('?') > 0: token_string = '&X-Plex-Token=' + self.token else: token_string = '?X-Plex-Token=' + self.token try: if headers: handler.request(request_type, uri + token_string, headers=headers) else: handler.request(request_type, uri + token_string) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader('content-type') except IOError, e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None except Exception, e: logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only?" % uri) return None
def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0): try: connection = sqlite3.connect(database, timeout=20) connection.row_factory = sqlite3.Row except sqlite3.OperationalError: logger.error(u"Tautulli Importer :: Invalid filename.") return None except ValueError: logger.error(u"Tautulli Importer :: Invalid filename.") return None try: connection.execute('SELECT ratingKey from %s' % table_name) except sqlite3.OperationalError: logger.error( u"Tautulli Importer :: Database specified does not contain the required fields." ) return None logger.debug(u"Tautulli Importer :: PlexWatch data import in progress...") ap = activity_processor.ActivityProcessor() user_data = users.Users() # Get the latest friends list so we can pull user id's try: users.refresh_users() except: logger.debug( u"Tautulli Importer :: Unable to refresh the users list. Aborting import." ) return None query = 'SELECT time AS started, ' \ 'stopped, ' \ 'cast(ratingKey as text) AS rating_key, ' \ 'null AS user_id, ' \ 'user, ' \ 'ip_address, ' \ 'paused_counter, ' \ 'platform AS player, ' \ 'null AS platform, ' \ 'null as machine_id, ' \ 'parentRatingKey as parent_rating_key, ' \ 'grandparentRatingKey as grandparent_rating_key, ' \ 'null AS media_type, ' \ 'null AS view_offset, ' \ 'xml, ' \ 'rating as content_rating,' \ 'summary,' \ 'title AS full_title,' \ '(case when orig_title_ep = "" then orig_title else ' \ 'orig_title_ep end) as title,' \ '(case when orig_title_ep != "" then orig_title else ' \ 'null end) as grandparent_title ' \ 'FROM ' + table_name + ' ORDER BY id' result = connection.execute(query) for row in result: # Extract the xml from the Plexwatch db xml field. extracted_xml = extract_plexwatch_xml(row['xml']) # If we get back None from our xml extractor skip over the record and log error. if not extracted_xml: logger.error( u"Tautulli Importer :: Skipping record with ratingKey %s due to malformed xml." % str(row['rating_key'])) continue # Skip line if we don't have a ratingKey to work with if not row['rating_key']: logger.error( u"Tautulli Importer :: Skipping record due to null ratingKey.") continue # If the user_id no longer exists in the friends list, pull it from the xml. if user_data.get_user_id(user=row['user']): user_id = user_data.get_user_id(user=row['user']) else: user_id = extracted_xml['user_id'] session_history = { 'started': row['started'], 'stopped': row['stopped'], 'rating_key': row['rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'original_title': extracted_xml['original_title'], 'full_title': row['full_title'], 'user_id': user_id, 'user': row['user'], 'ip_address': row['ip_address'] if row['ip_address'] else extracted_xml['ip_address'], 'paused_counter': row['paused_counter'], 'player': row['player'], 'platform': extracted_xml['platform'], 'machine_id': extracted_xml['machine_id'], 'parent_rating_key': row['parent_rating_key'], 'grandparent_rating_key': row['grandparent_rating_key'], 'media_type': extracted_xml['media_type'], 'view_offset': extracted_xml['view_offset'], 'video_decision': extracted_xml['video_decision'], 'audio_decision': extracted_xml['audio_decision'], 'transcode_decision': extracted_xml['transcode_decision'], 'duration': extracted_xml['duration'], 'width': extracted_xml['width'], 'height': extracted_xml['height'], 'container': extracted_xml['container'], 'video_codec': extracted_xml['video_codec'], 'audio_codec': extracted_xml['audio_codec'], 'bitrate': extracted_xml['bitrate'], 'video_resolution': extracted_xml['video_resolution'], 'video_framerate': extracted_xml['video_framerate'], 'aspect_ratio': extracted_xml['aspect_ratio'], 'audio_channels': extracted_xml['audio_channels'], 'transcode_protocol': extracted_xml['transcode_protocol'], 'transcode_container': extracted_xml['transcode_container'], 'transcode_video_codec': extracted_xml['transcode_video_codec'], 'transcode_audio_codec': extracted_xml['transcode_audio_codec'], 'transcode_audio_channels': extracted_xml['transcode_audio_channels'], 'transcode_width': extracted_xml['transcode_width'], 'transcode_height': extracted_xml['transcode_height'] } session_history_metadata = { 'rating_key': helpers.latinToAscii(row['rating_key']), 'parent_rating_key': row['parent_rating_key'], 'grandparent_rating_key': row['grandparent_rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'original_title': extracted_xml['original_title'], 'media_index': extracted_xml['media_index'], 'parent_media_index': extracted_xml['parent_media_index'], 'thumb': extracted_xml['thumb'], 'parent_thumb': extracted_xml['parent_thumb'], 'grandparent_thumb': extracted_xml['grandparent_thumb'], 'art': extracted_xml['art'], 'media_type': extracted_xml['media_type'], 'year': extracted_xml['year'], 'originally_available_at': extracted_xml['originally_available_at'], 'added_at': extracted_xml['added_at'], 'updated_at': extracted_xml['updated_at'], 'last_viewed_at': extracted_xml['last_viewed_at'], 'content_rating': row['content_rating'], 'summary': row['summary'], 'tagline': extracted_xml['tagline'], 'rating': extracted_xml['rating'], 'duration': extracted_xml['duration'], 'guid': extracted_xml['guid'], 'section_id': extracted_xml['section_id'], 'directors': extracted_xml['directors'], 'writers': extracted_xml['writers'], 'actors': extracted_xml['actors'], 'genres': extracted_xml['genres'], 'studio': extracted_xml['studio'], 'labels': extracted_xml['labels'], 'full_title': row['full_title'], 'width': extracted_xml['width'], 'height': extracted_xml['height'], 'container': extracted_xml['container'], 'video_codec': extracted_xml['video_codec'], 'audio_codec': extracted_xml['audio_codec'], 'bitrate': extracted_xml['bitrate'], 'video_resolution': extracted_xml['video_resolution'], 'video_framerate': extracted_xml['video_framerate'], 'aspect_ratio': extracted_xml['aspect_ratio'], 'audio_channels': extracted_xml['audio_channels'] } # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values # Just make sure that the ratingKey is indeed an integer if session_history_metadata['rating_key'].isdigit(): ap.write_session_history( session=session_history, import_metadata=session_history_metadata, is_import=True, import_ignore_interval=import_ignore_interval) else: logger.debug(u"Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key']) logger.debug(u"Tautulli Importer :: PlexWatch data import complete.") import_users()
class Users(object): def __init__(self): pass def get_user_list(self, kwargs=None): data_tables = datatables.DataTables() custom_where = ['users.deleted_user', 0] columns = [ 'session_history.id', 'users.user_id as user_id', 'users.custom_avatar_url as user_thumb', '(case when users.friendly_name is null then users.username else \ users.friendly_name end) as friendly_name', 'MAX(session_history.started) as last_seen', 'session_history.ip_address as ip_address', 'COUNT(session_history.id) as plays', 'session_history.platform as platform', 'session_history.player as player', 'session_history_metadata.full_title as last_watched', 'session_history_metadata.thumb', 'session_history_metadata.parent_thumb', 'session_history_metadata.grandparent_thumb', 'session_history_metadata.media_type', 'session_history.rating_key as rating_key', 'session_history_media_info.video_decision', 'users.username as user', 'users.do_notify as do_notify', 'users.keep_history as keep_history' ] try: query = data_tables.ssp_query( table_name='users', columns=columns, custom_where=[custom_where], group_by=['users.user_id'], join_types=[ 'LEFT OUTER JOIN', 'LEFT OUTER JOIN', 'LEFT OUTER JOIN' ], join_tables=[ 'session_history', 'session_history_metadata', 'session_history_media_info' ], join_evals=[ ['session_history.user_id', 'users.user_id'], ['session_history.id', 'session_history_metadata.id'], ['session_history.id', 'session_history_media_info.id'] ], kwargs=kwargs) except: logger.warn("Unable to execute database query.") return { 'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.' } users = query['result'] rows = [] for item in users: if item["media_type"] == 'episode' and item["parent_thumb"]: thumb = item["parent_thumb"] elif item["media_type"] == 'episode': thumb = item["grandparent_thumb"] else: thumb = item["thumb"] if not item['user_thumb'] or item['user_thumb'] == '': user_thumb = common.DEFAULT_USER_THUMB else: user_thumb = item['user_thumb'] # Rename Mystery platform names platform = common.PLATFORM_NAME_OVERRIDES.get( item["platform"], item["platform"]) row = { "id": item['id'], "plays": item['plays'], "last_seen": item['last_seen'], "friendly_name": item['friendly_name'], "ip_address": item['ip_address'], "platform": platform, "player": item["player"], "last_watched": item['last_watched'], "thumb": thumb, "media_type": item['media_type'], "rating_key": item['rating_key'], "video_decision": item['video_decision'], "user_thumb": user_thumb, "user": item["user"], "user_id": item['user_id'], "do_notify": helpers.checked(item['do_notify']), "keep_history": helpers.checked(item['keep_history']) } rows.append(row) dict = { 'recordsFiltered': query['filteredCount'], 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] } return dict def get_user_unique_ips(self, kwargs=None, custom_where=None): data_tables = datatables.DataTables() # Change custom_where column name due to ambiguous column name after JOIN custom_where[0][0] = 'custom_user_id' if custom_where[0][ 0] == 'user_id' else custom_where[0][0] columns = [ 'session_history.id', 'session_history.started as last_seen', 'session_history.ip_address as ip_address', 'COUNT(session_history.id) as play_count', 'session_history.platform as platform', 'session_history.player as player', 'session_history_metadata.full_title as last_watched', 'session_history_metadata.thumb', 'session_history_metadata.parent_thumb', 'session_history_metadata.grandparent_thumb', 'session_history_metadata.media_type', 'session_history.rating_key as rating_key', 'session_history_media_info.video_decision', 'session_history.user as user', 'session_history.user_id as custom_user_id', '(case when users.friendly_name is null then users.username else \ users.friendly_name end) as friendly_name' ] try: query = data_tables.ssp_query( table_name='session_history', columns=columns, custom_where=custom_where, group_by=['ip_address'], join_types=['JOIN', 'JOIN', 'JOIN'], join_tables=[ 'users', 'session_history_metadata', 'session_history_media_info' ], join_evals=[ ['session_history.user_id', 'users.user_id'], ['session_history.id', 'session_history_metadata.id'], ['session_history.id', 'session_history_media_info.id'] ], kwargs=kwargs) except: logger.warn("Unable to execute database query.") return { 'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.' } results = query['result'] rows = [] for item in results: if item["media_type"] == 'episode' and item["parent_thumb"]: thumb = item["parent_thumb"] elif item["media_type"] == 'episode': thumb = item["grandparent_thumb"] else: thumb = item["thumb"] # Rename Mystery platform names platform = common.PLATFORM_NAME_OVERRIDES.get( item["platform"], item["platform"]) row = { "id": item['id'], "last_seen": item['last_seen'], "ip_address": item['ip_address'], "play_count": item['play_count'], "platform": platform, "player": item['player'], "last_watched": item['last_watched'], "thumb": thumb, "media_type": item['media_type'], "rating_key": item['rating_key'], "video_decision": item['video_decision'], "friendly_name": item['friendly_name'] } rows.append(row) dict = { 'recordsFiltered': query['filteredCount'], 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] } return dict # TODO: The getter and setter for this needs to become a config getter/setter for more than just friendlyname def set_user_friendly_name(self, user=None, user_id=None, friendly_name=None, do_notify=0, keep_history=1): if user_id: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = { "friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history } try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e) if user: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"username": user} new_value_dict = { "friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history } try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e)
def check_rclone_status(server, kwargs=None): """ If rclone returns the PID, rclone is alive. Then copy a test file to a temporary directory accessing the test file thru the mount directory. returns: True = Alive and well False = Not functioning """ if kwargs: user = kwargs['rclone_user'] password = kwargs['rclone_pass'] testFile = kwargs['rclone_testfile'] mountPath = kwargs['rclone_mountdir'] tmpDir = kwargs['rclone_tmpdir'] port = kwargs['rclone_port'] ssl = bool_int(kwargs['rclone_ssl']) hostname = (kwargs['rclone_ssl_hostname'] if ssl else server.CONFIG.PMS_IP) else: user = server.CONFIG.RCLONE_USER password = server.CONFIG.RCLONE_PASS testFile = server.CONFIG.RCLONE_TESTFILE mountPath = server.CONFIG.RCLONE_MOUNTDIR tmpDir = server.CONFIG.RCLONE_TMPDIR port = server.CONFIG.RCLONE_PORT ssl = server.CONFIG.RCLONE_SSL hostname = (server.CONFIG.RCLONE_SSL_HOSTNAME if ssl else server.CONFIG.PMS_IP) scheme = ('https' if ssl else 'http') url = '{scheme}://{hostname}:{port}'.format(scheme=scheme, hostname=hostname, port=port) status = False try: """ Test if we get back a pid. """ uri = '/core/pid' response = requests.post(url + uri, timeout=10, auth=(user, password)) if response.status_code == requests.codes.ok: if 'pid' in response.json(): status = True """ Copy the testfile from the mount path to the temp directory. """ if status: status = False uri = '/operations/copyfile?srcFs=' + mountPath + \ '&srcRemote=' + testFile + \ '&dstFs=' + tmpDir + \ '&dstRemote=' + testFile response = requests.post(url + uri, timeout=20, auth=(user, password)) if response.status_code == requests.codes.ok: status = True """ Delete the testfile from the temp location. """ if status: status = False uri = '/operations/deletefile?fs=' + tmpDir + '&remote=' + testFile response = requests.post(url + uri, timeout=10, auth=(user, password)) if response.status_code == requests.codes.ok: status = True if status: if server.rclone_status == False: plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_rcloneup', 'server_id': server.CONFIG.ID }) else: raise requests.exceptions.RequestException except requests.exceptions.RequestException as e: logger.debug( u"Tautulli Monitor :: %s: rClone mount not responding. %s" % (server.CONFIG.PMS_NAME, e)) logger.debug(u"Tautulli Monitor :: %s: rClone uri: %s" % (server.CONFIG.PMS_NAME, uri)) if server.rclone_status == True: plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_rclonedown', 'server_id': server.CONFIG.ID }) with server.monitor_lock: server.rclone_status = status
def check_recently_added(): with monitor_lock: # add delay to allow for metadata processing delay = plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY time_threshold = int(time.time()) - delay time_interval = plexpy.CONFIG.MONITORING_INTERVAL pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details( count='10') if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: if item['media_type'] == 'movie': metadata_list = pms_connect.get_metadata_details( item['rating_key']) if metadata_list: metadata = [metadata_list['metadata']] else: logger.error(u"PlexPy Monitor :: Unable to retrieve metadata for rating_key %s" \ % str(item['rating_key'])) else: metadata_list = pms_connect.get_metadata_children_details( item['rating_key']) if metadata_list: metadata = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve children metadata for rating_key %s" \ % str(item['rating_key'])) if metadata: if not plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT: for item in metadata: if 0 < int(item['added_at'] ) - time_threshold <= time_interval: logger.debug( u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread( target=notification_handler. notify_timeline, kwargs=dict( timeline_data=item, notify_action='created')).start() else: item = max(metadata, key=lambda x: x['added_at']) if 0 < int(item['added_at'] ) - time_threshold <= time_interval: if item['media_type'] == 'episode' or item[ 'media_type'] == 'track': metadata_list = pms_connect.get_metadata_details( item['grandparent_rating_key']) if metadata_list: item = metadata_list['metadata'] else: logger.error(u"PlexPy Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \ % str(item['rating_key'])) logger.debug( u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(timeline_data=item, notify_action='created')).start()
def get_media_info_file_sizes(self, section_id=None, rating_key=None): from plexpy import pmsconnect import json, os if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid section_id provided." ) return False elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info file size called by invalid rating_key provided." ) return False # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return False if library_details['section_type'] == 'photo': return False rows = [] # Import media info cache from json file if rating_key: #logger.debug(u"PlexPy Libraries :: Getting file sizes for rating_key %s." % rating_key) try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: Getting file sizes for section_id %s." % section_id) try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # Get the total file size for each item pms_connect = pmsconnect.PmsConnect() for item in rows: if item['rating_key'] and not item['file_size']: file_size = 0 child_metadata = pms_connect.get_metadata_children_details( rating_key=item['rating_key'], get_children=True, get_media_info=True) metadata_list = child_metadata['metadata'] for child_metadata in metadata_list: file_size += helpers.cast_to_int( child_metadata.get('file_size', 0)) item['file_size'] = file_size # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) if rating_key: #logger.debug(u"PlexPy Libraries :: File sizes updated for rating_key %s." % rating_key) pass elif section_id: logger.debug( u"PlexPy Libraries :: File sizes updated for section_id %s." % section_id) return True
def signin(self, username=None, password=None, token=None, remember_me='0', admin_login='******', *args, **kwargs): if cherrypy.request.method != 'POST': cherrypy.response.status = 405 return {'status': 'error', 'message': 'Sign in using POST.'} error_message = {'status': 'error', 'message': 'Invalid credentials.'} valid_login, user_details, user_group = check_credentials( username=username, password=password, token=token, admin_login=admin_login, headers=kwargs) if valid_login: time_delta = timedelta( days=30) if remember_me == '1' else timedelta(minutes=60) expiry = datetime.utcnow() + time_delta payload = { 'user_id': user_details['user_id'], 'user': user_details['username'], 'user_group': user_group, 'access_level': user_details['access_level'], 'exp': expiry } jwt_token = jwt.encode(payload, plexpy.CONFIG.JWT_SECRET, algorithm=JWT_ALGORITHM) self.on_login(username=user_details['username'], user_id=user_details['user_id'], user_group=user_group, success=True, oauth=bool(token)) jwt_cookie = JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID cherrypy.response.cookie[jwt_cookie] = jwt_token cherrypy.response.cookie[jwt_cookie]['expires'] = int( time_delta.total_seconds()) cherrypy.response.cookie[jwt_cookie]['path'] = '/' cherrypy.request.login = payload cherrypy.response.status = 200 return { 'status': 'success', 'token': jwt_token, 'uuid': plexpy.CONFIG.PMS_UUID } elif admin_login == '1' and username: self.on_login(username=username) logger.debug( u"Tautulli WebAuth :: Invalid admin login attempt from '%s'." % username) cherrypy.response.status = 401 return error_message elif username: self.on_login(username=username) logger.debug( u"Tautulli WebAuth :: Invalid user login attempt from '%s'." % username) cherrypy.response.status = 401 return error_message elif token: self.on_login(username='******', oauth=True) logger.debug( u"Tautulli WebAuth :: Invalid Plex OAuth login attempt.") cherrypy.response.status = 401 return error_message
else: handler.request(request_type, uri + token_string) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader('content-type') except IOError, e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None if request_status == 200: if output_format == 'dict': output = helpers.convert_xml_to_dict(request_content) elif output_format == 'json': output = helpers.convert_xml_to_json(request_content) elif output_format == 'xml': output = helpers.parse_xml(request_content) else: output = request_content if return_type: return output, content_type return output else: logger.warn(u"Failed to access uri endpoint %s. Status code %r" % (uri, request_status)) return [] else: logger.debug(u"HTTP request made but no enpoint given.") return None
try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e) if user: if profile_url.strip() == '': profile_url = None monitor_db = database.MonitorDatabase() control_value_dict = {"username": user} new_value_dict = {"thumb": profile_url} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e) def get_user_friendly_name(self, user=None, user_id=None): if user_id: monitor_db = database.MonitorDatabase() query = 'select username, ' \ '(CASE WHEN friendly_name IS NULL THEN username ELSE friendly_name END),' \ 'do_notify, keep_history, thumb ' \ 'FROM users WHERE user_id = ?' result = monitor_db.select(query, args=[user_id]) if result: user_detail = {'user_id': user_id, 'user': result[0][0], 'friendly_name': result[0][1], 'thumb': result[0][4], 'do_notify': helpers.checked(result[0][2]),
def dbcheck(): conn_db = sqlite3.connect(DB_FILE) c_db = conn_db.cursor() # sessions table :: This is a temp table that logs currently active sessions c_db.execute( 'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'session_key INTEGER, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, ' 'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, ' 'ip_address TEXT, machine_id TEXT, player TEXT, platform TEXT, title TEXT, parent_title TEXT, ' 'grandparent_title TEXT, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, ' 'width INTEGER, height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, ' 'bitrate INTEGER, video_resolution TEXT, video_framerate TEXT, aspect_ratio TEXT, ' 'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, ' 'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,' 'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, ' 'buffer_last_triggered INTEGER, last_paused INTEGER)') # session_history table :: This is a history table which logs essential stream details c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, ' 'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' 'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, platform TEXT, machine_id TEXT, ' 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER DEFAULT 0)' ) # session_history_media_info table :: This is a table which logs each session's media info c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, rating_key INTEGER, ' 'video_decision TEXT, audio_decision TEXT, transcode_decision TEXT, duration INTEGER DEFAULT 0, width INTEGER, ' 'height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, bitrate INTEGER, video_resolution TEXT, ' 'video_framerate TEXT, aspect_ratio TEXT, audio_channels INTEGER, transcode_protocol TEXT, ' 'transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, ' 'transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER)' ) # session_history_metadata table :: This is a table which logs each session's media metadata c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, ' 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'title TEXT, parent_title TEXT, grandparent_title TEXT, full_title TEXT, media_index INTEGER, ' 'parent_media_index INTEGER, section_id INTEGER, thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT)' ) # users table :: This table keeps record of the friends list c_db.execute( 'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' 'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, ' 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, ' 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)') # notify_log table :: This is a table which logs notifications sent c_db.execute( 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' 'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' 'subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)' ) # library_sections table :: This table keeps record of the servers library sections c_db.execute( 'CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, ' 'thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, ' 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' 'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT started FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN started INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0') c_db.execute('ALTER TABLE sessions ADD COLUMN state TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN user TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN machine_id TEXT') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT title FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN title TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN parent_title TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN grandparent_title TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN friendly_name TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN player TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN user_id INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT ip_address FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN ip_address TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN platform TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0') c_db.execute('ALTER TABLE sessions ADD COLUMN duration INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN video_decision TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN audio_decision TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN width INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN height INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN container TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN video_codec TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN audio_codec TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN bitrate INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN video_resolution TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN video_framerate TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN audio_channels INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_container TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN transcode_width INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_height INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT buffer_count FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0') c_db.execute( 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT last_paused FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN last_paused INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT section_id FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN section_id INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT stopped FROM sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN stopped INTEGER') # Upgrade session_history table from earlier versions try: c_db.execute('SELECT reference_id FROM session_history') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history.") c_db.execute( 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' ) # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id c_db.execute( 'UPDATE session_history ' \ 'SET reference_id = (SELECT (CASE \ WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ 'FROM session_history AS t1 ' \ 'WHERE t1.id = session_history.id) ' ) # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT full_title FROM session_history_metadata') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history_metadata." ) c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT') # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT tagline FROM session_history_metadata') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history_metadata." ) c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT') # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT section_id FROM session_history_metadata') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history_metadata." ) c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN section_id INTEGER' ) # Upgrade session_history_media_info table from earlier versions try: c_db.execute( 'SELECT transcode_decision FROM session_history_media_info') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history_media_info." ) c_db.execute( 'ALTER TABLE session_history_media_info ADD COLUMN transcode_decision TEXT' ) c_db.execute( 'UPDATE session_history_media_info SET transcode_decision = (CASE ' 'WHEN video_decision = "transcode" OR audio_decision = "transcode" THEN "transcode" ' 'WHEN video_decision = "copy" OR audio_decision = "copy" THEN "copy" ' 'WHEN video_decision = "direct play" OR audio_decision = "direct play" THEN "direct play" END)' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT do_notify FROM users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1') # Upgrade users table from earlier versions try: c_db.execute('SELECT keep_history FROM users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1') # Upgrade users table from earlier versions try: c_db.execute('SELECT custom_avatar_url FROM users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute('ALTER TABLE users ADD COLUMN custom_avatar_url TEXT') # Upgrade users table from earlier versions try: c_db.execute('SELECT deleted_user FROM users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0') # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT poster_url FROM notify_log') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table notify_log.") c_db.execute('ALTER TABLE notify_log ADD COLUMN poster_url TEXT') # Upgrade notify_log table from earlier versions (populate table with data from notify_log) try: c_db.execute('SELECT timestamp FROM notify_log') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table notify_log.") c_db.execute( 'CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' 'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' 'subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)' ) c_db.execute( 'INSERT INTO notify_log_temp (session_key, rating_key, user_id, user, agent_id, agent_name, ' 'poster_url, timestamp, notify_action) ' 'SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, timestamp, ' 'notify_action FROM notify_log_temp ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_play, "play" FROM notify_log WHERE on_play ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_stop, "stop" FROM notify_log WHERE on_stop ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_watched, "watched" FROM notify_log WHERE on_watched ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_pause, "pause" FROM notify_log WHERE on_pause ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_resume, "resume" FROM notify_log WHERE on_resume ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_buffer, "buffer" FROM notify_log WHERE on_buffer ' 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' 'on_created, "created" FROM notify_log WHERE on_created ' 'ORDER BY timestamp ') c_db.execute('DROP TABLE notify_log') c_db.execute('ALTER TABLE notify_log_temp RENAME TO notify_log') # Upgrade library_sections table from earlier versions (remove UNIQUE constraint on section_id) try: result = c_db.execute( 'SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"' ).fetchone() if 'section_id INTEGER UNIQUE' in result[0]: logger.debug( u"Altering database. Removing unique constraint on section_id from library_sections table." ) c_db.execute( 'CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, ' 'thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, ' 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' 'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))' ) c_db.execute( 'INSERT INTO library_sections_temp (id, server_id, section_id, section_name, section_type, ' 'thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, ' 'keep_history, deleted_section) ' 'SELECT id, server_id, section_id, section_name, section_type, ' 'thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, ' 'keep_history, deleted_section ' 'FROM library_sections') c_db.execute('DROP TABLE library_sections') c_db.execute( 'ALTER TABLE library_sections_temp RENAME TO library_sections') except sqlite3.OperationalError: logger.warn( u"Unable to remove section_id unique constraint from library_sections." ) try: c_db.execute('DROP TABLE library_sections_temp') except: pass # Upgrade library_sections table from earlier versions (remove duplicated libraries) try: result = c_db.execute( 'SELECT * FROM library_sections WHERE server_id = ""') if result.rowcount > 0: logger.debug( u"Altering database. Removing duplicate libraries from library_sections table." ) c_db.execute('DELETE FROM library_sections WHERE server_id = ""') except sqlite3.OperationalError: logger.warn( u"Unable to remove duplicate libraries from library_sections table." ) # Upgrade users table from earlier versions (remove UNIQUE constraint on username) try: result = c_db.execute( 'SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"' ).fetchone() if 'username TEXT NOT NULL UNIQUE' in result[0]: logger.debug( u"Altering database. Removing unique constraint on username from users table." ) c_db.execute( 'CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' 'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, ' 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, ' 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)' ) c_db.execute( 'INSERT INTO users_temp (id, user_id, username, friendly_name, thumb, custom_avatar_url, ' 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user) ' 'SELECT id, user_id, username, friendly_name, thumb, custom_avatar_url, ' 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user ' 'FROM users') c_db.execute('DROP TABLE users') c_db.execute('ALTER TABLE users_temp RENAME TO users') except sqlite3.OperationalError: logger.warn(u"Unable to remove username unique constraint from users.") try: c_db.execute('DROP TABLE users_temp') except: pass # Add "Local" user to database as default unauthenticated user. result = c_db.execute('SELECT id FROM users WHERE username = "******"') if not result.fetchone(): logger.debug(u'User "Local" does not exist. Adding user.') c_db.execute( 'INSERT INTO users (user_id, username) VALUES (0, "Local")') conn_db.commit() c_db.close()
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): from plexpy import users, libraries section_id = session[ 'section_id'] if not is_import else import_metadata['section_id'] if not is_import: user_data = users.Users() user_details = user_data.get_details(user_id=session['user_id']) library_data = libraries.Libraries() library_details = library_data.get_details(section_id=section_id) # Return false if failed to retrieve user or library details if not user_details or not library_details: return False if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) elif session['stopped']: stopped = int(session['stopped']) else: stopped = int(time.time()) self.set_session_state(session_key=session['session_key'], state='stopped', stopped=stopped) if plexpy.CONFIG.MOVIE_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'movie': logging_enabled = True elif plexpy.CONFIG.TV_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'episode': logging_enabled = True elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"PlexPy ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if not is_import and session['media_type'] == 'track': if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not is_import and not user_details['keep_history']: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) elif not is_import and not library_details['keep_history']: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # Fetch metadata first so we can return false if it fails if not is_import: logger.debug( u"PlexPy ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) if result: metadata = result['metadata'] else: return False else: metadata = import_metadata # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history transaction...") self.db.action(query=query, args=args) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, view_offset, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'view_offset': result[0]['view_offset'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id'] } if len(result) == 1: prev_session = None else: prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'view_offset': result[1]['view_offset'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id'] } query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key'] \ and prev_session['view_offset'] <= new_session['view_offset']): args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"PlexPy ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # Generate a combined transcode decision value if session['video_decision'] == 'transcode' or session[ 'audio_decision'] == 'transcode': transcode_decision = 'transcode' elif session['video_decision'] == 'copy' or session[ 'audio_decision'] == 'copy': transcode_decision = 'copy' else: transcode_decision = 'direct play' # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height, transcode_decision) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'], transcode_decision ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, section_id, thumb, parent_thumb, grandparent_thumb, art, media_type, ' \ 'year, originally_available_at, added_at, updated_at, last_viewed_at, content_rating, ' \ 'summary, tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['media_index'], metadata['parent_media_index'], metadata['section_id'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args) # Return true when the session is successfully written to the database return True
class DataFactory(object): """ Retrieve and process data from the monitor database """ def __init__(self): pass def get_user_list(self, kwargs=None): data_tables = datatables.DataTables() columns = ['users.user_id as user_id', 'users.thumb as thumb', '(case when users.friendly_name is null then users.username else \ users.friendly_name end) as friendly_name', 'MAX(session_history.started) as last_seen', 'session_history.ip_address as ip_address', 'COUNT(session_history.id) as plays', 'users.username as user' ] try: query = data_tables.ssp_query(table_name='users', columns=columns, custom_where=[], group_by=['users.user_id'], join_types=['LEFT OUTER JOIN'], join_tables=['session_history'], join_evals=[['session_history.user_id', 'users.user_id']], kwargs=kwargs) except: logger.warn("Unable to execute database query.") return {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.'} users = query['result'] rows = [] for item in users: if not item['thumb'] or item['thumb'] == '': user_thumb = common.DEFAULT_USER_THUMB else: user_thumb = item['thumb'] row = {"plays": item['plays'], "last_seen": item['last_seen'], "friendly_name": item["friendly_name"], "ip_address": item["ip_address"], "thumb": user_thumb, "user": item["user"], "user_id": item['user_id'] } rows.append(row) dict = {'recordsFiltered': query['filteredCount'], 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] } return dict def get_history(self, kwargs=None, custom_where=None): data_tables = datatables.DataTables() columns = ['session_history.id', 'session_history.started as date', '(CASE WHEN users.friendly_name IS NULL THEN session_history' '.user ELSE users.friendly_name END) as friendly_name', 'session_history.player', 'session_history.ip_address', 'session_history_metadata.full_title as full_title', 'session_history.started', 'session_history.paused_counter', 'session_history.stopped', 'round((julianday(datetime(session_history.stopped, "unixepoch", "localtime")) - \ julianday(datetime(session_history.started, "unixepoch", "localtime"))) * 86400) - \ (CASE WHEN session_history.paused_counter IS NULL THEN 0 \ ELSE session_history.paused_counter END) as duration', '((CASE WHEN session_history.view_offset IS NULL THEN 0.1 ELSE \ session_history.view_offset * 1.0 END) / \ (CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 ELSE \ session_history_metadata.duration * 1.0 END) * 100) as percent_complete', 'session_history.grandparent_rating_key as grandparent_rating_key', 'session_history.rating_key as rating_key', 'session_history.user', 'session_history_metadata.media_type', 'session_history_media_info.video_decision', 'session_history.user_id as user_id' ] try: query = data_tables.ssp_query(table_name='session_history', columns=columns, custom_where=custom_where, group_by=[], join_types=['LEFT OUTER JOIN', 'JOIN', 'JOIN'], join_tables=['users', 'session_history_metadata', 'session_history_media_info'], join_evals=[['session_history.user_id', 'users.user_id'], ['session_history.id', 'session_history_metadata.id'], ['session_history.id', 'session_history_media_info.id']], kwargs=kwargs) except: logger.warn("Unable to execute database query.") return {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.'} history = query['result'] rows = [] for item in history: row = {"id": item['id'], "date": item['date'], "friendly_name": item['friendly_name'], "player": item["player"], "ip_address": item["ip_address"], "full_title": item["full_title"], "started": item["started"], "paused_counter": item["paused_counter"], "stopped": item["stopped"], "duration": item["duration"], "percent_complete": item["percent_complete"], "grandparent_rating_key": item["grandparent_rating_key"], "rating_key": item["rating_key"], "user": item["user"], "media_type": item["media_type"], "video_decision": item["video_decision"], "user_id": item["user_id"] } rows.append(row) dict = {'recordsFiltered': query['filteredCount'], 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] } return dict def get_user_unique_ips(self, kwargs=None, custom_where=None): data_tables = datatables.DataTables() columns = ['session_history.started as last_seen', 'session_history.ip_address as ip_address', 'COUNT(session_history.id) as play_count', 'session_history.player as platform', 'session_history_metadata.full_title as last_watched', 'session_history.user as user', 'session_history.user_id as user_id' ] try: query = data_tables.ssp_query(table_name='session_history', columns=columns, custom_where=custom_where, group_by=['ip_address'], join_types=['JOIN'], join_tables=['session_history_metadata'], join_evals=[['session_history.id', 'session_history_metadata.id']], kwargs=kwargs) except: logger.warn("Unable to execute database query.") return {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': 'null', 'error': 'Unable to execute database query.'} results = query['result'] rows = [] for item in results: row = {"last_seen": item['last_seen'], "ip_address": item['ip_address'], "play_count": item['play_count'], "platform": item['platform'], "last_watched": item['last_watched'] } rows.append(row) dict = {'recordsFiltered': query['filteredCount'], 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] } return dict # TODO: The getter and setter for this needs to become a config getter/setter for more than just friendlyname def set_user_friendly_name(self, user=None, user_id=None, friendly_name=None, do_notify=0, keep_history=1): if user_id: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"user_id": user_id} new_value_dict = {"friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e) if user: if friendly_name.strip() == '': friendly_name = None monitor_db = database.MonitorDatabase() control_value_dict = {"username": user} new_value_dict = {"friendly_name": friendly_name, "do_notify": do_notify, "keep_history": keep_history} try: monitor_db.upsert('users', new_value_dict, control_value_dict) except Exception, e: logger.debug(u"Uncaught exception %s" % e)
def notify(self, message, event): if not message or not event: return pushalot_authorizationtoken = plexpy.CONFIG.PUSHALOT_APIKEY logger.debug(u"Pushalot event: " + event) logger.debug(u"Pushalot message: " + message) logger.debug(u"Pushalot api: " + pushalot_authorizationtoken) http_handler = HTTPSConnection("pushalot.com") data = { 'AuthorizationToken': pushalot_authorizationtoken, 'Title': event.encode('utf-8'), 'Body': message.encode("utf-8") } http_handler.request( "POST", "/api/sendmessage", headers={'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) response = http_handler.getresponse() request_status = response.status logger.debug(u"Pushalot response status: %r" % request_status) logger.debug(u"Pushalot response headers: %r" % response.getheaders()) logger.debug(u"Pushalot response body: %r" % response.read()) if request_status == 200: logger.info(u"Pushalot notifications sent.") return True elif request_status == 410: logger.info(u"Pushalot auth failed: %s" % response.reason) return False else: logger.info(u"Pushalot notification failed.") return False
def main(): """ PlexPy application entry point. Parses arguments, setups encoding and initializes the application. """ # Fixed paths to PlexPy if hasattr(sys, 'frozen'): plexpy.FULL_PATH = os.path.abspath(sys.executable) else: plexpy.FULL_PATH = os.path.abspath(__file__) plexpy.PROG_DIR = os.path.dirname(plexpy.FULL_PATH) plexpy.ARGS = sys.argv[1:] # From sickbeard plexpy.SYS_PLATFORM = sys.platform plexpy.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") plexpy.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not plexpy.SYS_ENCODING or plexpy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): plexpy.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments parser = argparse.ArgumentParser( description='A Python based monitoring and tracking tool for Plex Media Server.') parser.add_argument( '-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument( '-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument( '-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument( '-p', '--port', type=int, help='Force PlexPy to run on a specified port') parser.add_argument( '--dev', action='store_true', help='Start PlexPy in the development environment') parser.add_argument( '--datadir', help='Specify a directory where to store your data files') parser.add_argument( '--config', help='Specify a config file to use') parser.add_argument( '--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', help='Create a pid file (only relevant when running as a daemon)') parser.add_argument( '--nofork', action='store_true', help='Start PlexPy as a service, do not fork when restarting') args = parser.parse_args() if args.verbose: plexpy.VERBOSE = True if args.quiet: plexpy.QUIET = True # Do an intial setup of the logger. logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=plexpy.VERBOSE) if args.dev: plexpy.DEV = True logger.debug(u"PlexPy is running in the dev environment.") if args.daemon: if sys.platform == 'win32': sys.stderr.write( "Daemonizing not supported under Windows, starting normally\n") else: plexpy.DAEMON = True plexpy.QUIET = True if args.nofork: plexpy.NOFORK = True logger.info("PlexPy is running as a service, it will not fork when restarted.") if args.pidfile: plexpy.PIDFILE = str(args.pidfile) # If the pidfile already exists, plexpy may still be running, so # exit if os.path.exists(plexpy.PIDFILE): try: with open(plexpy.PIDFILE, 'r') as fp: pid = int(fp.read()) os.kill(pid, 0) except IOError as e: raise SystemExit("Unable to read PID file: %s", e) except OSError: logger.warn("PID file '%s' already exists, but PID %d is " \ "not running. Ignoring PID file." % (plexpy.PIDFILE, pid)) else: # The pidfile exists and points to a live PID. plexpy may # still be running, so exit. raise SystemExit("PID file '%s' already exists. Exiting." % plexpy.PIDFILE) # The pidfile is only useful in daemon mode, make sure we can write the # file properly if plexpy.DAEMON: plexpy.CREATEPID = True try: with open(plexpy.PIDFILE, 'w') as fp: fp.write("pid\n") except IOError as e: raise SystemExit("Unable to write PID file: %s", e) else: logger.warn("Not running in daemon mode. PID file creation " \ "disabled.") # Determine which data directory and config file to use if args.datadir: plexpy.DATA_DIR = args.datadir else: plexpy.DATA_DIR = plexpy.PROG_DIR if args.config: config_file = args.config else: config_file = os.path.join(plexpy.DATA_DIR, config.FILENAME) # Try to create the DATA_DIR if it doesn't exist if not os.path.exists(plexpy.DATA_DIR): try: os.makedirs(plexpy.DATA_DIR) except OSError: raise SystemExit( 'Could not create data directory: ' + plexpy.DATA_DIR + '. Exiting....') # Make sure the DATA_DIR is writeable if not os.access(plexpy.DATA_DIR, os.W_OK): raise SystemExit( 'Cannot write to the data directory: ' + plexpy.DATA_DIR + '. Exiting...') # Put the database in the DATA_DIR plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, database.FILENAME) if plexpy.DAEMON: plexpy.daemonize() # Read config and start logging plexpy.initialize(config_file) # Start the background threads plexpy.start() # Open connection for websocket if plexpy.CONFIG.MONITORING_USE_WEBSOCKET: try: web_socket.start_thread() except: logger.warn(u"Websocket :: Unable to open connection.") # Fallback to polling plexpy.POLLING_FAILOVER = True plexpy.initialize_scheduler() # Force the http port if neccessary if args.port: http_port = args.port logger.info('Using forced web server port: %i', http_port) else: http_port = int(plexpy.CONFIG.HTTP_PORT) # Check if pyOpenSSL is installed. It is required for certificate generation # and for CherryPy. if plexpy.CONFIG.ENABLE_HTTPS: try: import OpenSSL except ImportError: logger.warn("The pyOpenSSL module is missing. Install this " \ "module to enable HTTPS. HTTPS will be disabled.") plexpy.CONFIG.ENABLE_HTTPS = False # Try to start the server. Will exit here is address is already in use. web_config = { 'http_port': http_port, 'http_host': plexpy.CONFIG.HTTP_HOST, 'http_root': plexpy.CONFIG.HTTP_ROOT, 'http_environment': plexpy.CONFIG.HTTP_ENVIRONMENT, 'http_proxy': plexpy.CONFIG.HTTP_PROXY, 'enable_https': plexpy.CONFIG.ENABLE_HTTPS, 'https_cert': plexpy.CONFIG.HTTPS_CERT, 'https_key': plexpy.CONFIG.HTTPS_KEY, 'http_username': plexpy.CONFIG.HTTP_USERNAME, 'http_password': plexpy.CONFIG.HTTP_PASSWORD, 'http_basic_auth': plexpy.CONFIG.HTTP_BASIC_AUTH } webstart.initialize(web_config) # Open webbrowser if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not plexpy.DEV: plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, http_port, plexpy.CONFIG.HTTP_ROOT) # Wait endlessy for a signal to happen while True: if not plexpy.SIGNAL: try: time.sleep(1) except KeyboardInterrupt: plexpy.SIGNAL = 'shutdown' else: logger.info('Received signal: %s', plexpy.SIGNAL) if plexpy.SIGNAL == 'shutdown': plexpy.shutdown() elif plexpy.SIGNAL == 'restart': plexpy.shutdown(restart=True) else: plexpy.shutdown(restart=True, update=True) plexpy.SIGNAL = None
def request_response(url, method="get", auto_raise=True, whitelist_status_code=None, lock=fake_lock, **kwargs): """ Convenient wrapper for `requests.get', which will capture the exceptions and log them. On success, the Response object is returned. In case of a exception, None is returned. Additionally, there is support for rate limiting. To use this feature, supply a tuple of (lock, request_limit). The lock is used to make sure no other request with the same lock is executed. The request limit is the minimal time between two requests (and so 1/request_limit is the number of requests per seconds). """ # Convert whitelist_status_code to a list if needed if whitelist_status_code and type(whitelist_status_code) != list: whitelist_status_code = [whitelist_status_code] # Disable verification of SSL certificates if requested. Note: this could # pose a security issue! kwargs["verify"] = bool(plexpy.CONFIG.VERIFY_SSL_CERT) # Map method to the request.XXX method. This is a simple hack, but it # allows requests to apply more magic per method. See lib/requests/api.py. request_method = getattr(requests, method.lower()) try: # Request URL and wait for response with lock: logger.debug("Requesting URL via %s method: %s", method.upper(), url) response = request_method(url, **kwargs) # If status code != OK, then raise exception, except if the status code # is white listed. if whitelist_status_code and auto_raise: if response.status_code not in whitelist_status_code: try: response.raise_for_status() except: logger.debug( "Response status code %d is not white " "listed, raised exception", response.status_code) raise elif auto_raise: response.raise_for_status() return response except requests.exceptions.SSLError as e: if kwargs["verify"]: logger.error( "Unable to connect to remote host because of a SSL error. " "It is likely that your system cannot verify the validity" "of the certificate. The remote certificate is either " "self-signed, or the remote server uses SNI. See the wiki for " "more information on this topic.") else: logger.error( "SSL error raised during connection, with certificate " "verification turned off: %s", e) except requests.ConnectionError: logger.error("Unable to connect to remote host. Check if the remote " "host is up and running.") except requests.Timeout: logger.error( "Request timed out. The remote host did not respond timely.") except requests.HTTPError as e: if e.response is not None: if e.response.status_code >= 500: cause = "remote server error" elif e.response.status_code >= 400: cause = "local client error" else: # I don't think we will end up here, but for completeness cause = "unknown" logger.error("Request raise HTTP error with status code %d (%s).", e.response.status_code, cause) # Debug response if plexpy.VERBOSE: server_message(e.response) else: logger.error("Request raised HTTP error.") except requests.RequestException as e: logger.error("Request raised exception: %s", e)
def find_session_ip(self, rating_key=None, machine_id=None): logger.debug(u"PlexPy Monitor :: Requesting log lines...") log_lines = log_reader.get_log_tail(window=5000, parsed=False) rating_key_line = 'ratingKey=' + rating_key rating_key_line_2 = 'metadata%2F' + rating_key machine_id_line = 'session=' + machine_id for line in reversed(log_lines): # We're good if we find a line with both machine id and rating key # This is usually when there is a transcode session if machine_id_line in line and (rating_key_line in line or rating_key_line_2 in line): # Currently only checking for ipv4 addresses ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line) if ipv4: # The logged IP will always be the first match and we don't want localhost entries if ipv4[0] != '127.0.0.1': logger.debug( u"PlexPy Monitor :: Matched IP address (%s) for stream ratingKey %s " u"and machineIdentifier %s." % (ipv4[0], rating_key, machine_id)) return ipv4[0] logger.debug( u"PlexPy Monitor :: Unable to find IP address on first pass. " u"Attempting fallback check in 5 seconds...") # Wait for the log to catch up and read in new lines time.sleep(5) logger.debug(u"PlexPy Monitor :: Requesting log lines...") log_lines = log_reader.get_log_tail(window=5000, parsed=False) for line in reversed(log_lines): if 'GET /:/timeline' in line and (rating_key_line in line or rating_key_line_2 in line): # Currently only checking for ipv4 addresses # This method can return the wrong IP address if more than one user # starts watching the same media item around the same time. ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line) if ipv4: # The logged IP will always be the first match and we don't want localhost entries if ipv4[0] != '127.0.0.1': logger.debug( u"PlexPy Monitor :: Matched IP address (%s) for stream ratingKey %s." % (ipv4[0], rating_key)) return ipv4[0] logger.debug( u"PlexPy Monitor :: Unable to find IP address on fallback search. Not logging IP address." ) return None
def generate_uuid(): logger.debug(u"Generating UUID...") return uuid.uuid4().hex
def plex_user_login(username=None, password=None, token=None, headers=None): user_token = None user_id = None # Try to login to Plex.tv to check if the user has a vaild account if username and password: plex_tv = PlexTV(username=username, password=password, headers=headers) plex_user = plex_tv.get_token() if plex_user: user_token = plex_user['auth_token'] user_id = plex_user['user_id'] elif token: plex_tv = PlexTV(token=token, headers=headers) plex_user = plex_tv.get_plex_account_details() if plex_user: user_token = token user_id = plex_user['user_id'] else: return None if user_token and user_id: # Try to retrieve the user from the database. # Also make sure guest access is enabled for the user and the user is not deleted. user_data = Users() user_details = user_data.get_details(user_id=user_id) if int(user_id) != int(user_details['user_id']): # The user is not in the database. return None elif user_details['is_admin']: # Plex admin login if user_details['is_plextv']: account = plexpy.PLEXTV_ACCOUNTS.get_account( user_id=user_details['user_id']) if not account.is_validated: account.reinit() account.is_validated = True account.refresh_servers() account.start_servers() return user_details, 'admin' elif not user_details['allow_guest'] or user_details['deleted_user']: # Guest access is disabled or the user is deleted. return None # Stop here if guest access is not enabled if not plexpy.CONFIG.ALLOW_GUEST_ACCESS: return None # The user is in the database, and guest access is enabled, so try to retrieve a server token. # If a server token is returned, then the user is a valid friend of the server. plex_tv = PlexTV(token=user_token, headers=headers) server_tokens = plex_tv.get_server_token() if server_tokens: # Register the new user / update the access tokens. monitor_db = MonitorDatabase() try: logger.debug( u"Tautulli WebAuth :: Registering tokens for user '%s' in the database." % user_details['username']) for server_id, server_token in server_tokens.items(): result = monitor_db.action( 'UPDATE user_shared_libraries ' ' SET server_token = ? ' ' WHERE id = (SELECT id FROM users WHERE user_id = ?) ' ' AND server_id = ?', [server_token, user_details['user_id'], server_id]) if server_tokens: # Refresh the users list to make sure we have all the correct permissions. #refresh_users() threading.Thread(target=refresh_users).start() # Successful login if user_details['allow_guest'] in GUEST_ACCESS_LEVELS: guest_level = GUEST_ACCESS_LEVELS[ user_details['allow_guest']].lower() else: guest_level = None return user_details, guest_level else: logger.warn( u"Tautulli WebAuth :: Unable to register user '%s' in database." % user_details['username']) return None except Exception as e: logger.warn( u"Tautulli WebAuth :: Unable to register user '%s' in database: %s." % (user_details['username'], e)) return None else: logger.warn( u"Tautulli WebAuth :: Unable to retrieve Plex.tv server token for user '%s'." % user_details['username']) return None elif username: logger.warn( u"Tautulli WebAuth :: Unable to retrieve Plex.tv user token for user '%s'." % username) return None elif token: logger.warn( u"Tautulli WebAuth :: Unable to retrieve Plex.tv user token for Plex OAuth." ) return None
def main(): """ Tautulli application entry point. Parses arguments, setups encoding and initializes the application. """ # Fixed paths to Tautulli if hasattr(sys, 'frozen'): plexpy.FULL_PATH = os.path.abspath(sys.executable) else: plexpy.FULL_PATH = os.path.abspath(__file__) plexpy.PROG_DIR = os.path.dirname(plexpy.FULL_PATH) plexpy.ARGS = sys.argv[1:] # From sickbeard plexpy.SYS_PLATFORM = sys.platform plexpy.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not plexpy.SYS_ENCODING or plexpy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): plexpy.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments parser = argparse.ArgumentParser( description= 'A Python based monitoring and tracking tool for Plex Media Server.') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument('-p', '--port', type=int, help='Force Tautulli to run on a specified port') parser.add_argument('--dev', action='store_true', help='Start Tautulli in the development environment') parser.add_argument( '--datadir', help='Specify a directory where to store your data files') parser.add_argument('--config', help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', help='Create a pid file (only relevant when running as a daemon)') parser.add_argument( '--nofork', action='store_true', help='Start Tautulli as a service, do not fork when restarting') args = parser.parse_args() if args.verbose: plexpy.VERBOSE = True if args.quiet: plexpy.QUIET = True # Do an intial setup of the logger. logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=plexpy.VERBOSE) try: plexpy.SYS_TIMEZONE = tzlocal.get_localzone() except (pytz.UnknownTimeZoneError, LookupError, ValueError) as e: logger.error("Could not determine system timezone: %s" % e) plexpy.SYS_TIMEZONE = pytz.UTC plexpy.SYS_UTC_OFFSET = datetime.datetime.now( plexpy.SYS_TIMEZONE).strftime('%z') if os.getenv('TAUTULLI_DOCKER', False) == 'True': plexpy.DOCKER = True if args.dev: plexpy.DEV = True logger.debug(u"Tautulli is running in the dev environment.") if args.daemon: if sys.platform == 'win32': sys.stderr.write( "Daemonizing not supported under Windows, starting normally\n") else: plexpy.DAEMON = True plexpy.QUIET = True if args.nofork: plexpy.NOFORK = True logger.info( "Tautulli is running as a service, it will not fork when restarted." ) if args.pidfile: plexpy.PIDFILE = str(args.pidfile) # If the pidfile already exists, plexpy may still be running, so # exit if os.path.exists(plexpy.PIDFILE): try: with open(plexpy.PIDFILE, 'r') as fp: pid = int(fp.read()) os.kill(pid, 0) except IOError as e: raise SystemExit("Unable to read PID file: %s", e) except OSError: logger.warn("PID file '%s' already exists, but PID %d is " \ "not running. Ignoring PID file." % (plexpy.PIDFILE, pid)) else: # The pidfile exists and points to a live PID. plexpy may # still be running, so exit. raise SystemExit("PID file '%s' already exists. Exiting." % plexpy.PIDFILE) # The pidfile is only useful in daemon mode, make sure we can write the # file properly if plexpy.DAEMON: plexpy.CREATEPID = True try: with open(plexpy.PIDFILE, 'w') as fp: fp.write("pid\n") except IOError as e: raise SystemExit("Unable to write PID file: %s", e) else: logger.warn("Not running in daemon mode. PID file creation " \ "disabled.") # Determine which data directory and config file to use if args.datadir: plexpy.DATA_DIR = args.datadir else: plexpy.DATA_DIR = plexpy.PROG_DIR if args.config: config_file = args.config else: config_file = os.path.join(plexpy.DATA_DIR, config.FILENAME) # Try to create the DATA_DIR if it doesn't exist if not os.path.exists(plexpy.DATA_DIR): try: os.makedirs(plexpy.DATA_DIR) except OSError: raise SystemExit('Could not create data directory: ' + plexpy.DATA_DIR + '. Exiting....') # Make sure the DATA_DIR is writeable if not os.access(plexpy.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + plexpy.DATA_DIR + '. Exiting...') # Put the database in the DATA_DIR plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, database.FILENAME) # Move 'plexpy.db' to 'tautulli.db' if os.path.isfile(os.path.join(plexpy.DATA_DIR, 'plexpy.db')) and \ not os.path.isfile(os.path.join(plexpy.DATA_DIR, plexpy.DB_FILE)): try: os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'), plexpy.DB_FILE) except OSError as e: raise SystemExit("Unable to rename plexpy.db to tautulli.db: %s", e) if plexpy.DAEMON: plexpy.daemonize() # Read config and start logging plexpy.initialize(config_file) # Start the background threads plexpy.start() # Force the http port if neccessary if args.port: plexpy.HTTP_PORT = args.port logger.info('Using forced web server port: %i', plexpy.HTTP_PORT) else: plexpy.HTTP_PORT = int(plexpy.CONFIG.HTTP_PORT) # Check if pyOpenSSL is installed. It is required for certificate generation # and for CherryPy. if plexpy.CONFIG.ENABLE_HTTPS: try: import OpenSSL except ImportError: logger.warn("The pyOpenSSL module is missing. Install this " \ "module to enable HTTPS. HTTPS will be disabled.") plexpy.CONFIG.ENABLE_HTTPS = False # Try to start the server. Will exit here is address is already in use. webstart.start() # Windows system tray icon if os.name == 'nt' and plexpy.CONFIG.WIN_SYS_TRAY: plexpy.win_system_tray() logger.info("Tautulli is ready!") # Open webbrowser if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not plexpy.DEV: plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT, plexpy.HTTP_ROOT) # Wait endlessy for a signal to happen while True: if not plexpy.SIGNAL: try: time.sleep(1) except KeyboardInterrupt: plexpy.SIGNAL = 'shutdown' else: logger.info('Received signal: %s', plexpy.SIGNAL) if plexpy.SIGNAL == 'shutdown': plexpy.shutdown() elif plexpy.SIGNAL == 'restart': plexpy.shutdown(restart=True) elif plexpy.SIGNAL == 'checkout': plexpy.shutdown(restart=True, checkout=True) else: plexpy.shutdown(restart=True, update=True) plexpy.SIGNAL = None
def on_logout(self, username, user_group): """Called on logout""" logger.debug( u"Tautulli WebAuth :: %s user '%s' logged out of Tautulli." % (user_group.capitalize(), username))
def notify(stream_data=None, notify_action=None): from plexpy import users if stream_data and notify_action: # Check if notifications enabled for user user_data = users.Users() user_details = user_data.get_user_friendly_name( user=stream_data['user']) if not user_details['do_notify']: return if (stream_data['media_type'] == 'movie' and plexpy.CONFIG.MOVIE_NOTIFY_ENABLE) \ or (stream_data['media_type'] == 'episode' and plexpy.CONFIG.TV_NOTIFY_ENABLE): progress_percent = helpers.get_percent(stream_data['view_offset'], stream_data['duration']) for agent in notifiers.available_notification_agents(): if agent['on_play'] and notify_action == 'play': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_stop'] and notify_action == 'stop' \ and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < plexpy.CONFIG.NOTIFY_WATCHED_PERCENT): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_pause'] and notify_action == 'pause' \ and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_resume'] and notify_action == 'resume' \ and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_buffer'] and notify_action == 'buffer': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_watched'] and notify_action == 'watched': # Get the current states for notifications from our db notify_states = get_notify_state(session=stream_data) # If there is nothing in the notify_log for our agent id but it is enabled we should notify if not any(d['agent_id'] == agent['id'] for d in notify_states): # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) else: # Check in our notify log if the notification has already been sent for notify_state in notify_states: if not notify_state['on_watched'] and ( notify_state['agent_id'] == agent['id']): # Build and send notification notify_strings = build_notify_text( session=stream_data, state=notify_action) notifiers.send_notification( config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif (stream_data['media_type'] == 'track' and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE): for agent in notifiers.available_notification_agents(): if agent['on_play'] and notify_action == 'play': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_stop'] and notify_action == 'stop': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_pause'] and notify_action == 'pause': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_resume'] and notify_action == 'resume': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif agent['on_buffer'] and notify_action == 'buffer': # Build and send notification notify_strings = build_notify_text(session=stream_data, state=notify_action) notifiers.send_notification(config_id=agent['id'], subject=notify_strings[0], body=notify_strings[1]) # Set the notification state in the db set_notify_state(session=stream_data, state=notify_action, agent_info=agent) elif stream_data['media_type'] == 'clip': pass else: #logger.debug(u"PlexPy Notifier :: Notify called with unsupported media type.") pass else: logger.debug( u"PlexPy Notifier :: Notify called but incomplete data received.")
def dbcheck(): conn_db = sqlite3.connect(DB_FILE) c_db = conn_db.cursor() # sessions table :: This is a temp table that logs currently active sessions c_db.execute( 'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'session_key INTEGER, rating_key INTEGER, media_type TEXT, started INTEGER, ' 'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, ' 'ip_address TEXT, machine_id TEXT, player TEXT, platform TEXT, title TEXT, parent_title TEXT, ' 'grandparent_title TEXT, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, ' 'width INTEGER, height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, ' 'bitrate INTEGER, video_resolution TEXT, video_framerate TEXT, aspect_ratio TEXT, ' 'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, ' 'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,' 'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, ' 'buffer_last_triggered INTEGER, last_paused INTEGER)') # session_history table :: This is a history table which logs essential stream details c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, ' 'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' 'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, platform TEXT, machine_id TEXT, ' 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER DEFAULT 0)' ) # session_history_media_info table :: This is a table which logs each session's media info c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, ' 'rating_key INTEGER, video_decision TEXT, audio_decision TEXT, duration INTEGER DEFAULT 0, width INTEGER, ' 'height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, bitrate INTEGER, video_resolution TEXT, ' 'video_framerate TEXT, aspect_ratio TEXT, audio_channels INTEGER, transcode_protocol TEXT, ' 'transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, ' 'transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER)' ) # session_history_metadata table :: This is a table which logs each session's media metadata c_db.execute( 'CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, ' 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'title TEXT, parent_title TEXT, grandparent_title TEXT, full_title TEXT, media_index INTEGER, ' 'parent_media_index INTEGER, thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, art TEXT, media_type TEXT, ' 'year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, last_viewed_at INTEGER, ' 'content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, duration INTEGER DEFAULT 0, guid TEXT, ' 'directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT)' '') # users table :: This table keeps record of the friends list c_db.execute( 'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL UNIQUE, ' 'friendly_name TEXT, thumb TEXT, email TEXT, custom_avatar_url TEXT, is_home_user INTEGER DEFAULT NULL, ' 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, ' 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT started from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN started INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0') c_db.execute('ALTER TABLE sessions ADD COLUMN state TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN user TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN machine_id TEXT') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT title from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN title TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN parent_title TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN grandparent_title TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN friendly_name TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN player TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN user_id INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT ip_address from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN ip_address TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN platform TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0') c_db.execute('ALTER TABLE sessions ADD COLUMN duration INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN video_decision TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN audio_decision TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN width INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN height INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN container TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN video_codec TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN audio_codec TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN bitrate INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN video_resolution TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN video_framerate TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT') c_db.execute('ALTER TABLE sessions ADD COLUMN audio_channels INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_container TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER') c_db.execute('ALTER TABLE sessions ADD COLUMN transcode_width INTEGER') c_db.execute( 'ALTER TABLE sessions ADD COLUMN transcode_height INTEGER') # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT full_title from session_history_metadata') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history_metadata." ) c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT') # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT tagline from session_history_metadata') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history_metadata." ) c_db.execute( 'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT') # notify_log table :: This is a table which logs notifications sent c_db.execute( 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'session_key INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' 'agent_id INTEGER, agent_name TEXT, on_play INTEGER, on_stop INTEGER, on_watched INTEGER, ' 'on_pause INTEGER, on_resume INTEGER, on_buffer INTEGER, on_created INTEGER)' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT do_notify from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1') # Upgrade users table from earlier versions try: c_db.execute('SELECT keep_history from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1') # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT on_pause from notify_log') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table notify_log.") c_db.execute('ALTER TABLE notify_log ADD COLUMN on_pause INTEGER') c_db.execute('ALTER TABLE notify_log ADD COLUMN on_resume INTEGER') c_db.execute('ALTER TABLE notify_log ADD COLUMN on_buffer INTEGER') # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT on_created from notify_log') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table notify_log.") c_db.execute('ALTER TABLE notify_log ADD COLUMN on_created INTEGER') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT buffer_count from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0') c_db.execute( 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER') # Upgrade users table from earlier versions try: c_db.execute('SELECT custom_avatar_url from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute('ALTER TABLE users ADD COLUMN custom_avatar_url TEXT') # Upgrade sessions table from earlier versions try: c_db.execute('SELECT last_paused from sessions') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute('ALTER TABLE sessions ADD COLUMN last_paused INTEGER') # Add "Local" user to database as default unauthenticated user. result = c_db.execute('SELECT id FROM users WHERE username = "******"') if not result.fetchone(): logger.debug(u'User "Local" does not exist. Adding user.') c_db.execute( 'INSERT INTO users (user_id, username) VALUES (0, "Local")') # Upgrade session_history table from earlier versions try: c_db.execute('SELECT reference_id from session_history') except sqlite3.OperationalError: logger.debug( u"Altering database. Updating database table session_history.") c_db.execute( 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' ) # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id c_db.execute( 'UPDATE session_history ' \ 'SET reference_id = (SELECT (CASE \ WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ 'FROM session_history AS t1 ' \ 'WHERE t1.id = session_history.id) ' ) # Upgrade users table from earlier versions try: c_db.execute('SELECT deleted_user from users') except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table users.") c_db.execute( 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0') conn_db.commit() c_db.close()
def check_github(auto_update=False, notify=False): plexpy.COMMITS_BEHIND = 0 # Get the latest version available from github logger.info('Retrieving latest version information from GitHub') url = 'https://api.github.com/repos/%s/%s/commits/%s' % ( plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO, plexpy.CONFIG.GIT_BRANCH) if plexpy.CONFIG.GIT_TOKEN: url = url + '?access_token=%s' % plexpy.CONFIG.GIT_TOKEN version = request.request_json(url, timeout=20, validator=lambda x: type(x) == dict) if version is None: logger.warn( 'Could not get the latest version from GitHub. Are you running a local development version?' ) return plexpy.CURRENT_VERSION plexpy.LATEST_VERSION = version['sha'] logger.debug("Latest version is %s", plexpy.LATEST_VERSION) # See how many commits behind we are if not plexpy.CURRENT_VERSION: logger.info( 'You are running an unknown version of Tautulli. Run the updater to identify your version' ) return plexpy.LATEST_VERSION if plexpy.LATEST_VERSION == plexpy.CURRENT_VERSION: logger.info('Tautulli is up to date') return plexpy.LATEST_VERSION logger.info( 'Comparing currently installed version with latest GitHub version') url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % ( plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO, plexpy.LATEST_VERSION, plexpy.CURRENT_VERSION) if plexpy.CONFIG.GIT_TOKEN: url = url + '?access_token=%s' % plexpy.CONFIG.GIT_TOKEN commits = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == dict) if commits is None: logger.warn('Could not get commits behind from GitHub.') return plexpy.LATEST_VERSION try: plexpy.COMMITS_BEHIND = int(commits['behind_by']) logger.debug("In total, %d commits behind", plexpy.COMMITS_BEHIND) except KeyError: logger.info( 'Cannot compare versions. Are you running a local development version?' ) plexpy.COMMITS_BEHIND = 0 if plexpy.COMMITS_BEHIND > 0: logger.info('New version is available. You are %s commits behind' % plexpy.COMMITS_BEHIND) url = 'https://api.github.com/repos/%s/%s/releases' % ( plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO) releases = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == list) if releases is None or len(releases) == 0: logger.warn('Could not get releases from GitHub.') return plexpy.LATEST_VERSION if plexpy.CONFIG.GIT_BRANCH == 'master': release = next((r for r in releases if not r['prerelease']), releases[0]) elif plexpy.CONFIG.GIT_BRANCH == 'beta': release = next( (r for r in releases if not r['tag_name'].endswith('-nightly')), releases[0]) elif plexpy.CONFIG.GIT_BRANCH == 'nightly': release = next((r for r in releases), releases[0]) else: release = releases[0] plexpy.LATEST_RELEASE = release['tag_name'] if notify: plexpy.NOTIFY_QUEUE.put({ 'notify_action': 'on_plexpyupdate', 'plexpy_download_info': release, 'plexpy_update_commit': plexpy.LATEST_VERSION, 'plexpy_update_behind': plexpy.COMMITS_BEHIND }) if auto_update: logger.info('Running automatic update.') plexpy.shutdown(restart=True, update=True) elif plexpy.COMMITS_BEHIND == 0: logger.info('Tautulli is up to date') return plexpy.LATEST_VERSION
def check_active_sessions(ws_request=False): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = database.MonitorDatabase() monitor_process = activity_processor.ActivityProcessor() # logger.debug(u"PlexPy Monitor :: Checking for active streams.") global int_ping_count if session_list: if int_ping_count >= 3: logger.info( u"PlexPy Monitor :: The Plex Media Server is back up.") # Check if any notification agents have notifications enabled if any(d['on_intup'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread( target=notification_handler.notify_timeline, kwargs=dict(notify_action='intup')).start() int_ping_count = 0 media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams db_streams = monitor_db.select('SELECT * FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': logger.debug( u"PlexPy Monitor :: Session %s has been paused." % stream['session_key']) # Check if any notification agents have notifications enabled if any(d['on_pause'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='pause' )).start() if session['state'] == 'playing' and stream[ 'state'] == 'paused': logger.debug( u"PlexPy Monitor :: Session %s has been resumed." % stream['session_key']) # Check if any notification agents have notifications enabled if any(d['on_resume'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='resume' )).start() if stream['state'] == 'paused' and not ws_request: # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int( stream['paused_counter'] ) + plexpy.CONFIG.MONITORING_INTERVAL monitor_db.action( 'UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [ paused_counter, stream['session_key'], stream['rating_key'] ]) if session[ 'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action( 'UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select( 'SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) if buffer_values[0][ 'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0][ 'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD: logger.info( u"PlexPy Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check if any notification agents have notifications enabled if any(d['on_buffer'] for d in notifiers. available_notification_agents( )): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler. notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexpy.CONFIG.BUFFER_WAIT: logger.info( u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check if any notification agents have notifications enabled if any(d['on_buffer'] for d in notifiers. available_notification_agents( )): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler .notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() logger.debug( u"PlexPy Monitor :: Session %s is buffering. Count is now %s. Last triggered %s." % (stream['session_key'], buffer_values[0]['buffer_count'], buffer_values[0]['buffer_last_triggered']) ) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['view_offset'] and session[ 'duration'] and session[ 'state'] != 'buffering': if helpers.get_percent( session['view_offset'], session['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Check if any notification agents have notifications enabled if any(d['on_watched'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched' )).start() else: # The user has stopped playing a stream if stream['state'] != 'stopped': logger.debug( u"PlexPy Monitor :: Session %s has stopped." % stream['session_key']) # Set the stream stop time stream['stopped'] = int(time.time()) monitor_db.action( 'UPDATE sessions SET stopped = ?, state = ? ' 'WHERE session_key = ? AND rating_key = ?', [ stream['stopped'], 'stopped', stream['session_key'], stream['rating_key'] ]) # Check if the user has reached the offset in the media we defined as the "watched" percent if stream['view_offset'] and stream['duration']: if helpers.get_percent( stream['view_offset'], stream['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Check if any notification agents have notifications enabled if any(d['on_watched'] for d in notifiers. available_notification_agents()): # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='watched')).start() # Check if any notification agents have notifications enabled if any(d['on_stop'] for d in notifiers.available_notification_agents()): # Push any notifications - Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='stop')).start() # Write the item history on playback stop success = monitor_process.write_session_history( session=stream) if success: # If session is written to the databaase successfully, remove the session from the session table logger.debug( u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) else: logger.warn(u"PlexPy Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \ "Will try again on the next pass." % (stream['session_key'], stream['rating_key'])) # Process the newly received session data for session in media_container: new_session = monitor_process.write_session(session) if new_session: logger.debug(u"PlexPy Monitor :: Session %s has started." % session['session_key']) else: logger.debug(u"PlexPy Monitor :: Unable to read session list.") int_ping_count += 1 logger.warn(u"PlexPy Monitor :: Unable to get an internal response from the server, ping attempt %s." \ % str(int_ping_count)) if int_ping_count == 3: # Check if any notification agents have notifications enabled if any(d['on_intdown'] for d in notifiers.available_notification_agents()): # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, kwargs=dict(notify_action='intdown')).start()
def update(): if plexpy.INSTALL_TYPE == 'win': logger.info('Windows .exe updating not supported yet.') elif plexpy.INSTALL_TYPE == 'git': output, err = runGit( 'diff --name-only %s/%s' % (plexpy.CONFIG.GIT_REMOTE, plexpy.CONFIG.GIT_BRANCH)) if output == '': logger.debug("No differences found from the origin") elif output == 'requirements.txt': logger.warn( 'Requirements file is out of sync. Restoring to original.') output, err = runGit( 'checkout %s/%s requirements.txt' % (plexpy.CONFIG.GIT_REMOTE, plexpy.CONFIG.GIT_BRANCH)) else: logger.error("Differences Found. Unable to update.") return False output, err = runGit('pull ' + plexpy.CONFIG.GIT_REMOTE + ' ' + plexpy.CONFIG.GIT_BRANCH) if not output: logger.error('Unable to download latest version') return for line in output.split('\n'): if 'Already up-to-date.' in line: logger.info('No update available, not updating') logger.info('Output: ' + output) elif line.endswith(('Aborting', 'Aborting.')): logger.error('Unable to update from git: ' + line) logger.info('Output: ' + output) else: tar_download_url = 'https://github.com/{}/{}/tarball/{}'.format( plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO, plexpy.CONFIG.GIT_BRANCH) update_dir = os.path.join(plexpy.PROG_DIR, 'update') version_path = os.path.join(plexpy.PROG_DIR, 'version.txt') logger.info('Downloading update from: ' + tar_download_url) data = request.request_content(tar_download_url) if not data: logger.error( "Unable to retrieve new version from '%s', can't update", tar_download_url) return download_name = plexpy.CONFIG.GIT_BRANCH + '-github' tar_download_path = os.path.join(plexpy.PROG_DIR, download_name) # Save tar to disk with open(tar_download_path, 'wb') as f: f.write(data) # Extract the tar to update folder logger.info('Extracting file: ' + tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(update_dir) tar.close() # Delete the tar.gz logger.info('Deleting file: ' + tar_download_path) os.remove(tar_download_path) # Find update dir name update_dir_contents = [ x for x in os.listdir(update_dir) if os.path.isdir(os.path.join(update_dir, x)) ] if len(update_dir_contents) != 1: logger.error("Invalid update data, update failed: " + str(update_dir_contents)) return content_dir = os.path.join(update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder for dirname, dirnames, filenames in os.walk(content_dir): dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(plexpy.PROG_DIR, dirname, curfile) if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) # Update version.txt try: with open(version_path, 'w') as f: f.write(str(plexpy.LATEST_VERSION)) except IOError as e: logger.error( "Unable to write current version to version.txt, update not complete: %s", e) return output, err = pip_sync() logger.info("Tautulli Update Complete") return True
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None): from plexpy import pmsconnect import json, os default_return = { 'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, 'data': None, 'error': 'Unable to execute database query.' } if section_id and not str(section_id).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called by invalid section_id provided." ) return default_return elif rating_key and not str(rating_key).isdigit(): logger.warn( u"PlexPy Libraries :: Datatable media info called by invalid rating_key provided." ) return default_return # Get the library details library_details = self.get_details(section_id=section_id) if library_details['section_id'] == None: logger.debug( u"PlexPy Libraries :: Library section_id %s not found." % section_id) return default_return if not section_type: section_type = library_details['section_type'] # Get play counts from the database monitor_db = database.MonitorDatabase() if plexpy.CONFIG.GROUP_HISTORY_TABLES: count_by = 'reference_id' else: count_by = 'id' if section_type == 'show' or section_type == 'artist': group_by = 'grandparent_rating_key' elif section_type == 'season' or section_type == 'album': group_by = 'parent_rating_key' else: group_by = 'rating_key' try: query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \ 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \ 'FROM session_history ' \ 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \ 'WHERE session_history_metadata.section_id = ? ' \ 'GROUP BY session_history.%s ' % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn( u"PlexPy Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) return default_return watched_list = {} for item in result: watched_list[str(item[group_by])] = { 'last_played': item['last_played'], 'play_count': item['play_count'] } rows = [] # Import media info cache from json file if rating_key: try: inFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) pass elif section_id: try: inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(inFilePath, 'r') as inFile: rows = json.load(inFile) library_count = len(rows) except IOError as e: #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) pass # If no cache was imported, get all library children items cached_items = {d['rating_key']: d['file_size'] for d in rows} if refresh or not rows: pms_connect = pmsconnect.PmsConnect() if rating_key: library_children = pms_connect.get_library_children_details( rating_key=rating_key, get_media_info=True) elif section_id: library_children = pms_connect.get_library_children_details( section_id=section_id, section_type=section_type, get_media_info=True) if library_children: library_count = library_children['library_count'] children_list = library_children['childern_list'] else: logger.warn( u"PlexPy Libraries :: Unable to get a list of library items." ) return default_return new_rows = [] for item in children_list: cached_file_size = cached_items.get(item['rating_key'], None) file_size = cached_file_size if cached_file_size else item.get( 'file_size', '') row = { 'section_id': library_details['section_id'], 'section_type': library_details['section_type'], 'added_at': item['added_at'], 'media_type': item['media_type'], 'rating_key': item['rating_key'], 'parent_rating_key': item['parent_rating_key'], 'grandparent_rating_key': item['grandparent_rating_key'], 'title': item['title'], 'year': item['year'], 'media_index': item['media_index'], 'parent_media_index': item['parent_media_index'], 'thumb': item['thumb'], 'container': item.get('container', ''), 'bitrate': item.get('bitrate', ''), 'video_codec': item.get('video_codec', ''), 'video_resolution': item.get('video_resolution', ''), 'video_framerate': item.get('video_framerate', ''), 'audio_codec': item.get('audio_codec', ''), 'audio_channels': item.get('audio_channels', ''), 'file_size': file_size } new_rows.append(row) rows = new_rows if not rows: return default_return # Cache the media info to a json file if rating_key: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key)) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for rating_key %s." % rating_key) elif section_id: try: outFilePath = os.path.join( plexpy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id) with open(outFilePath, 'w') as outFile: json.dump(rows, outFile) except IOError as e: logger.debug( u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id) # Update the last_played and play_count for item in rows: watched_item = watched_list.get(item['rating_key'], None) if watched_item: item['last_played'] = watched_item['last_played'] item['play_count'] = watched_item['play_count'] else: item['last_played'] = None item['play_count'] = None results = [] # Get datatables JSON data if kwargs.get('json_data'): json_data = helpers.process_json_kwargs( json_kwargs=kwargs.get('json_data')) #print json_data # Search results search_value = json_data['search']['value'].lower() if search_value: searchable_columns = [ d['data'] for d in json_data['columns'] if d['searchable'] ] for row in rows: for k, v in row.iteritems(): if k in searchable_columns and search_value in v.lower(): results.append(row) break else: results = rows filtered_count = len(results) # Sort results results = sorted(results, key=lambda k: k['title']) sort_order = json_data['order'] for order in reversed(sort_order): sort_key = json_data['columns'][int(order['column'])]['data'] reverse = True if order['dir'] == 'desc' else False if rating_key and sort_key == 'title': results = sorted( results, key=lambda k: helpers.cast_to_int(k['media_index']), reverse=reverse) elif sort_key == 'file_size' or sort_key == 'bitrate': results = sorted( results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse) else: results = sorted(results, key=lambda k: k[sort_key], reverse=reverse) total_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) # Paginate results results = results[json_data['start']:(json_data['start'] + json_data['length'])] filtered_file_size = sum( [helpers.cast_to_int(d['file_size']) for d in results]) dict = { 'recordsFiltered': filtered_count, 'recordsTotal': library_count, 'data': results, 'draw': int(json_data['draw']), 'filtered_file_size': filtered_file_size, 'total_file_size': total_file_size } return dict
def check_active_sessions(): with monitor_lock: pms_connect = pmsconnect.PmsConnect() session_list = pms_connect.get_current_activity() monitor_db = database.MonitorDatabase() monitor_process = MonitorProcessing() # logger.debug(u"PlexPy Monitor :: Checking for active streams.") if session_list: media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams db_streams = monitor_db.select( 'SELECT started, session_key, rating_key, media_type, title, parent_title, ' 'grandparent_title, user_id, user, friendly_name, ip_address, player, ' 'platform, machine_id, parent_rating_key, grandparent_rating_key, state, ' 'view_offset, duration, video_decision, audio_decision, width, height, ' 'container, video_codec, audio_codec, bitrate, video_resolution, ' 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, ' 'transcode_container, transcode_video_codec, transcode_audio_codec, ' 'transcode_audio_channels, transcode_width, transcode_height, paused_counter ' 'FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): # The user's session is still active for session in media_container: if session['session_key'] == str(stream['session_key']) and \ session['rating_key'] == str(stream['rating_key']): # The user is still playing the same media item # Here we can check the play states if session['state'] != stream['state']: if session['state'] == 'paused': # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='pause')).start() if session['state'] == 'playing' and stream[ 'state'] == 'paused': # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='resume')).start() if stream['state'] == 'paused': # The stream is still paused so we need to increment the paused_counter # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. paused_counter = int( stream['paused_counter'] ) + plexpy.CONFIG.MONITORING_INTERVAL monitor_db.action( 'UPDATE sessions SET paused_counter = ? ' 'WHERE session_key = ? AND rating_key = ?', [ paused_counter, stream['session_key'], stream['rating_key'] ]) if session[ 'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise monitor_db.action( 'UPDATE sessions SET buffer_count = buffer_count + 1 ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) # Check the current buffer count and last buffer to determine if we should notify buffer_values = monitor_db.select( 'SELECT buffer_count, buffer_last_triggered ' 'FROM sessions ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) if buffer_values[0][ 'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions # Our first buffer notification if buffer_values[0][ 'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD: logger.info( u"PlexPy Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='buffer' )).start() else: # Subsequent buffer notifications after wait time if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \ plexpy.CONFIG.BUFFER_WAIT: logger.info( u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time monitor_db.action( 'UPDATE sessions ' 'SET buffer_last_triggered = strftime("%s","now") ' 'WHERE session_key = ? AND rating_key = ?', [ stream['session_key'], stream['rating_key'] ]) threading.Thread( target=notification_handler. notify, kwargs=dict( stream_data=stream, notify_action='buffer' )).start() logger.debug( u"PlexPy Monitor :: Stream buffering. Count is now %s. Last triggered %s." % (buffer_values[0][0], buffer_values[0][1])) # Check if the user has reached the offset in the media we defined as the "watched" percent # Don't trigger if state is buffer as some clients push the progress to the end when # buffering on start. if session['progress'] and session[ 'duration'] and session[ 'state'] != 'buffering': if helpers.get_percent( session['progress'], session['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='watched')).start() else: # The user has stopped playing a stream logger.debug( u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue" % (stream['session_key'], stream['rating_key'])) monitor_db.action( 'DELETE FROM sessions WHERE session_key = ? AND rating_key = ?', [stream['session_key'], stream['rating_key']]) # Check if the user has reached the offset in the media we defined as the "watched" percent if stream['view_offset'] and stream['duration']: if helpers.get_percent( stream['view_offset'], stream['duration'] ) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT: # Push any notifications - # Push it on it's own thread so we don't hold up our db actions threading.Thread( target=notification_handler.notify, kwargs=dict(stream_data=stream, notify_action='watched')).start() # Push any notifications - Push it on it's own thread so we don't hold up our db actions threading.Thread(target=notification_handler.notify, kwargs=dict( stream_data=stream, notify_action='stop')).start() # Write the item history on playback stop monitor_process.write_session_history(session=stream) # Process the newly received session data for session in media_container: monitor_process.write_session(session) else: logger.debug(u"PlexPy Monitor :: Unable to read session list.")
def _getLogs(self, sort='', search='', order='desc', regex='', **kwargs): """ Returns the log Returns [{"response": {"msg": "Hey", "result": "success"}, "data": [{"time": "29-sept.2015", "thread: "MainThread", "msg: "Called x from y", "loglevel": "DEBUG" } ] } ] """ logfile = os.path.join(plexpy.CONFIG.LOG_DIR, 'plexpy.log') templog = [] start = int(kwargs.get('start', 0)) end = int(kwargs.get('end', 0)) if regex: logger.debug('Filtering log using regex %s' % regex) reg = re.compile('u' + regex, flags=re.I) for line in open(logfile, 'r').readlines(): temp_loglevel_and_time = None try: temp_loglevel_and_time = line.split('- ') loglvl = temp_loglevel_and_time[1].split(' :')[0].strip() tl_tread = line.split(' :: ') if loglvl is None: msg = line.replace('\n', '') else: msg = line.split(' : ')[1].replace('\n', '') thread = tl_tread[1].split(' : ')[0] except IndexError: # We assume this is a traceback tl = (len(templog) - 1) templog[tl]['msg'] += line.replace('\n', '') continue if len( line ) > 1 and temp_loglevel_and_time is not None and loglvl in line: d = { 'time': temp_loglevel_and_time[0], 'loglevel': loglvl, 'msg': msg.replace('\n', ''), 'thread': thread } templog.append(d) if end > 0: logger.debug('Slicing the log from %s to %s' % (start, end)) templog = templog[start:end] if sort: logger.debug('Sorting log based on %s' % sort) templog = sorted(templog, key=lambda k: k[sort]) if search: logger.debug('Searching log values for %s' % search) tt = [ d for d in templog for k, v in d.items() if search.lower() in v.lower() ] if len(tt): templog = tt if regex: tt = [] for l in templog: stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items()) if reg.search(stringdict): tt.append(l) if len(tt): templog = tt if order == 'desc': templog = templog[::-1] self.data = templog return templog
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): from plexpy import users user_data = users.Users() user_details = user_data.get_user_friendly_name(user=session['user']) if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = session['stopped'] else: stopped = int(time.time()) else: stopped = int(time.time()) if plexpy.CONFIG.VIDEO_LOGGING_ENABLE and \ (session['media_type'] == 'movie' or session['media_type'] == 'episode'): logging_enabled = True elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"PlexPy Monitor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (int(stopped) - session['started'] < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(int(stopped) - session['started']), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (int(stopped) - session['started'] < int(import_ignore_interval)): logging_enabled = False logger.debug( u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(int(stopped) - session['started']), import_ignore_interval)) if not user_details['keep_history'] and not is_import: logging_enabled = False logger.debug( u"PlexPy Monitor :: History logging for user '%s' is disabled." % session['user']) if logging_enabled: # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"PlexPy Monitor :: Writing session_history transaction...") self.db.action(query=query, args=args) # logger.debug(u"PlexPy Monitor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'] ] # logger.debug(u"PlexPy Monitor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) if not is_import: logger.debug( u"PlexPy Monitor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) metadata = result['metadata'] else: metadata = import_metadata # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \ 'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \ 'rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'] ] # logger.debug(u"PlexPy Monitor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args)
def connect(self): from websocket import create_connection if self.server.CONFIG.PMS_SSL and self.server.CONFIG.PMS_URL[:5] == 'https': uri = self.server.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications' secure = 'secure ' else: uri = 'ws://%s:%s/:/websockets/notifications' % ( self.server.CONFIG.PMS_IP, self.server.CONFIG.PMS_PORT ) secure = '' # Set authentication token (if one is available) if self.server.CONFIG.PMS_TOKEN: header = ["X-Plex-Token: %s" % self.server.CONFIG.PMS_TOKEN] else: header = [] self.ws_shutdown = False reconnects = 0 # Try an open the websocket connection logger.info(u"Tautulli WebSocket :: %s: Opening %s websocket." % (self.server.CONFIG.PMS_NAME, secure)) try: if self.server.PLEXTV.is_validated: self.WS_CONNECTION = create_connection(uri, header=header, timeout=30) logger.info(u"Tautulli WebSocket :: %s: Ready" % self.server.CONFIG.PMS_NAME) self.server.WS_CONNECTED = True except (websocket.WebSocketException, IOError, Exception) as e: logger.error("Tautulli WebSocket :: %s: %s." % (self.server.CONFIG.PMS_NAME, e)) if isinstance(e, websocket.WebSocketBadStatusException) and e.status_code == 401: self.ws_shutdown = True self.server.server_shutdown = True self.server.initialize_scheduler() if self.server.WS_CONNECTED: self.on_connect() self.ready.set() while self.server.WS_CONNECTED: try: self.process(*self.receive(self.WS_CONNECTION)) # successfully received data, reset reconnects counter reconnects = 0 except websocket.WebSocketTimeoutException: if self.ws_shutdown: break logger.warn(u"Tautulli WebSocket :: %s: Connection timed out." % self.server.CONFIG.PMS_NAME) if reconnects < plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS: reconnects += 1 else: self.close() break except websocket.WebSocketConnectionClosedException: if self.ws_shutdown: break if reconnects == 0: logger.warn(u"Tautulli WebSocket :: %s: Connection has closed." % self.server.CONFIG.PMS_NAME) if not self.server.CONFIG.PMS_IS_CLOUD and reconnects < plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS: reconnects += 1 # Sleep 5 between connection attempts if reconnects > 1: time.sleep(plexpy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT) logger.warn(u"Tautulli WebSocket :: %s: Reconnection attempt %s." % (self.server.CONFIG.PMS_NAME, str(reconnects))) try: self.WS_CONNECTION = create_connection(uri, header=header) logger.info(u"Tautulli WebSocket :: %s: Ready" % self.server.CONFIG.PMS_NAME) self.server.WS_CONNECTED = True except (websocket.WebSocketException, IOError, Exception) as e: logger.error("Tautulli WebSocket :: %s: %s." % (self.server.CONFIG.PMS_NAME, e)) else: self.close() break except (websocket.WebSocketException, Exception) as e: if self.ws_shutdown: break logger.error(u"Tautulli WebSocket :: %s: %s." % (self.server.CONFIG.PMS_NAME, e)) self.close() break if not self.server.WS_CONNECTED and not self.ws_shutdown: self.on_disconnect() logger.debug(u"Tautulli WebSocket :: %s: Leaving thread." % self.server.CONFIG.PMS_NAME)
def main(): """ Tautulli application entry point. Parses arguments, setups encoding and initializes the application. """ # Fixed paths to Tautulli if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'): plexpy.FROZEN = True plexpy.FULL_PATH = os.path.abspath(sys.executable) plexpy.PROG_DIR = sys._MEIPASS else: plexpy.FULL_PATH = os.path.abspath(__file__) plexpy.PROG_DIR = os.path.dirname(plexpy.FULL_PATH) plexpy.ARGS = sys.argv[1:] # From sickbeard plexpy.SYS_PLATFORM = sys.platform plexpy.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not plexpy.SYS_ENCODING or plexpy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): plexpy.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments parser = argparse.ArgumentParser( description= 'A Python based monitoring and tracking tool for Plex Media Server.') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument('-p', '--port', type=int, help='Force Tautulli to run on a specified port') parser.add_argument('--dev', action='store_true', help='Start Tautulli in the development environment') parser.add_argument( '--datadir', help='Specify a directory where to store your data files') parser.add_argument('--config', help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', help='Create a pid file (only relevant when running as a daemon)') parser.add_argument( '--nofork', action='store_true', help='Start Tautulli as a service, do not fork when restarting') args = parser.parse_args() if args.verbose: plexpy.VERBOSE = True if args.quiet: plexpy.QUIET = True # Do an intial setup of the logger. # Require verbose for pre-initilization to see critical errors logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True) try: plexpy.SYS_TIMEZONE = tzlocal.get_localzone() except (pytz.UnknownTimeZoneError, LookupError, ValueError) as e: logger.error("Could not determine system timezone: %s" % e) plexpy.SYS_TIMEZONE = pytz.UTC plexpy.SYS_UTC_OFFSET = datetime.datetime.now( plexpy.SYS_TIMEZONE).strftime('%z') if helpers.bool_true(os.getenv('TAUTULLI_DOCKER', False)): plexpy.DOCKER = True plexpy.DOCKER_MOUNT = not os.path.isfile('/config/DOCKER') if helpers.bool_true(os.getenv('TAUTULLI_SNAP', False)): plexpy.SNAP = True if args.dev: plexpy.DEV = True logger.debug("Tautulli is running in the dev environment.") if args.daemon: if sys.platform == 'win32': logger.warn( "Daemonizing not supported under Windows, starting normally") else: plexpy.DAEMON = True plexpy.QUIET = True if args.nofork: plexpy.NOFORK = True logger.info( "Tautulli is running as a service, it will not fork when restarted." ) if args.pidfile: plexpy.PIDFILE = str(args.pidfile) # If the pidfile already exists, plexpy may still be running, so # exit if os.path.exists(plexpy.PIDFILE): try: with open(plexpy.PIDFILE, 'r') as fp: pid = int(fp.read()) except IOError as e: raise SystemExit("Unable to read PID file: %s", e) try: os.kill(pid, 0) except OSError: logger.warn("PID file '%s' already exists, but PID %d is " "not running. Ignoring PID file." % (plexpy.PIDFILE, pid)) else: # The pidfile exists and points to a live PID. plexpy may # still be running, so exit. raise SystemExit("PID file '%s' already exists. Exiting." % plexpy.PIDFILE) # The pidfile is only useful in daemon mode, make sure we can write the # file properly if plexpy.DAEMON: plexpy.CREATEPID = True try: with open(plexpy.PIDFILE, 'w') as fp: fp.write("pid\n") except IOError as e: raise SystemExit("Unable to write PID file: %s", e) else: logger.warn("Not running in daemon mode. PID file creation " \ "disabled.") # Determine which data directory and config file to use if args.datadir: plexpy.DATA_DIR = args.datadir elif plexpy.FROZEN: plexpy.DATA_DIR = appdirs.user_data_dir("Tautulli", False) else: plexpy.DATA_DIR = plexpy.PROG_DIR # Migrate Snap data dir if plexpy.SNAP: snap_common = os.environ['SNAP_COMMON'] old_data_dir = os.path.join(snap_common, 'Tautulli') if os.path.exists(old_data_dir) and os.listdir(old_data_dir): plexpy.SNAP_MIGRATE = True logger.info("Migrating Snap user data.") shutil.move(old_data_dir, plexpy.DATA_DIR) if args.config: config_file = args.config else: config_file = os.path.join(plexpy.DATA_DIR, config.FILENAME) # Try to create the DATA_DIR if it doesn't exist if not os.path.exists(plexpy.DATA_DIR): try: os.makedirs(plexpy.DATA_DIR) except OSError: raise SystemExit('Could not create data directory: ' + plexpy.DATA_DIR + '. Exiting....') # Make sure the DATA_DIR is writeable test_file = os.path.join(plexpy.DATA_DIR, '.TEST') try: with open(test_file, 'w'): pass except IOError: raise SystemExit('Cannot write to the data directory: ' + plexpy.DATA_DIR + '. Exiting...') finally: try: os.remove(test_file) except OSError: pass # Put the database in the DATA_DIR plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, database.FILENAME) # Move 'plexpy.db' to 'tautulli.db' if os.path.isfile(os.path.join(plexpy.DATA_DIR, 'plexpy.db')) and \ not os.path.isfile(os.path.join(plexpy.DATA_DIR, plexpy.DB_FILE)): try: os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'), plexpy.DB_FILE) except OSError as e: raise SystemExit("Unable to rename plexpy.db to tautulli.db: %s", e) if plexpy.DAEMON: plexpy.daemonize() # Read config and start logging plexpy.initialize(config_file) # Start the background threads plexpy.start() # Force the http port if neccessary if args.port: plexpy.HTTP_PORT = args.port logger.info('Using forced web server port: %i', plexpy.HTTP_PORT) else: plexpy.HTTP_PORT = int(plexpy.CONFIG.HTTP_PORT) # Check if pyOpenSSL is installed. It is required for certificate generation # and for CherryPy. if plexpy.CONFIG.ENABLE_HTTPS: try: import OpenSSL except ImportError: logger.warn("The pyOpenSSL module is missing. Install this " "module to enable HTTPS. HTTPS will be disabled.") plexpy.CONFIG.ENABLE_HTTPS = False # Try to start the server. Will exit here is address is already in use. webstart.start() if common.PLATFORM == 'Windows': if plexpy.CONFIG.SYS_TRAY_ICON: plexpy.WIN_SYS_TRAY_ICON = windows.WindowsSystemTray() plexpy.WIN_SYS_TRAY_ICON.start() windows.set_startup() elif common.PLATFORM == 'Darwin': macos.set_startup() # Open webbrowser if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not plexpy.DEV: plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT, plexpy.HTTP_ROOT) if common.PLATFORM == 'Darwin' and plexpy.CONFIG.SYS_TRAY_ICON: if not macos.HAS_PYOBJC: logger.warn("The pyobjc module is missing. Install this " "module to enable the MacOS menu bar icon.") plexpy.CONFIG.SYS_TRAY_ICON = False if plexpy.CONFIG.SYS_TRAY_ICON: # MacOS menu bar icon must be run on the main thread and is blocking # Start the rest of Tautulli on a new thread thread = threading.Thread(target=wait) thread.daemon = True thread.start() plexpy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray() plexpy.MAC_SYS_TRAY_ICON.start() else: wait() else: wait()