def login(self, username, password): user = users.Users().getUser(username, password) if user: Cookie('username').set(user.username) Cookie('team').set(user.team.teamName) return self.show_mainpage(user) return self.show_loginpage('Not a valid username/password pair')
def build_database_connection(app): return users.Users( host = app.config['MYSQL_HOST'], username = app.config['MYSQL_USERNAME'], password = app.config['MYSQL_PASSWORD'], database = app.config['MYSQL_DATABASE'], year = app.config['YEAR'])
def filter(self, record): if not plexpy.CONFIG.LOG_BLACKLIST_USERNAMES: return True if not plexpy._INITIALIZED: return True items = sorted(users.Users().get_users(), key=lambda x: len(x['username']), reverse=True) for item in items: username = item['username'] if username.lower() in ('local', 'guest'): continue try: record.msg = self.replace(record.msg, username) args = [] for arg in record.args: if isinstance(arg, str): arg = self.replace(arg, username) args.append(arg) record.args = tuple(args) except: pass return True
def __init__(self, username=None, password=None, token=None): self.username = username self.password = password self.token = token self.urls = 'https://plex.tv' self.timeout = plexpy.CONFIG.PMS_TIMEOUT self.ssl_verify = plexpy.CONFIG.VERIFY_SSL_CERT if self.username is None and self.password is None: if not self.token: # Check if we should use the admin token, or the guest server token if session.get_session_user_id(): user_data = users.Users() user_tokens = user_data.get_tokens( user_id=session.get_session_user_id()) self.token = user_tokens['server_token'] else: self.token = plexpy.CONFIG.PMS_TOKEN if not self.token: logger.error( u"Tautulli PlexTV :: PlexTV called, but no token provided." ) return self.request_handler = http_handler.HTTPHandler( urls=self.urls, token=self.token, timeout=self.timeout, ssl_verify=self.ssl_verify)
def filter(self, record): if not plexpy.CONFIG.LOG_BLACKLIST_USERNAMES: return True if not plexpy._INITIALIZED: return True items = users.Users().get_users() or [] for item in items: username = item['username'] friendly_name = item['friendly_name'] if username == 'Local': continue try: record.msg = self.replace(record.msg, username) record.msg = self.replace(record.msg, friendly_name) args = [] for arg in record.args: if isinstance(arg, str): arg = self.replace(arg, username) arg = self.replace(arg, friendly_name) args.append(arg) record.args = tuple(args) except: pass return True
def init_connection(self): """Initialize variables that are local to a connection, (needed if the client automatically reconnect)""" self.ready_lock.acquire( False) # reacquire the ready-lock in case of reconnection self.connected = PYMUMBLE_CONN_STATE_NOT_CONNECTED self.control_socket = None self.media_socket = None # Not implemented - for UDP media self.bandwidth = PYMUMBLE_BANDWIDTH # reset the outgoing bandwidth to it's default before connectiong self.server_max_bandwidth = None self.udp_active = False self.users = users.Users( self, self.callbacks ) # contain the server's connected users informations self.channels = channels.Channels( self, self.callbacks) # contain the server's channels informations self.blobs = blobs.Blobs(self) # manage the blob objects self.sound_output = soundoutput.SoundOutput( self, PYMUMBLE_AUDIO_PER_PACKET, self.bandwidth) # manage the outgoing sounds self.commands = commands.Commands( ) # manage commands sent between the main and the mumble threads self.receive_buffer = "" # initialize the control connection input buffer
def refresh_users(): logger.info(u"PlexPy PlexTV :: Requesting users list refresh...") result = PlexTV().get_full_users_list() monitor_db = database.MonitorDatabase() user_data = users.Users() if result: for item in result: shared_libraries = '' user_tokens = user_data.get_tokens(user_id=item['user_id']) if user_tokens and user_tokens['server_token']: pms_connect = pmsconnect.PmsConnect( token=user_tokens['server_token']) library_details = pms_connect.get_server_children() if library_details: shared_libraries = ';'.join( d['section_id'] for d in library_details['libraries_list']) else: shared_libraries = '' control_value_dict = {"user_id": item['user_id']} new_value_dict = { "username": item['username'], "thumb": item['thumb'], "email": item['email'], "is_home_user": item['is_home_user'], "is_allow_sync": item['is_allow_sync'], "is_restricted": item['is_restricted'], "shared_libraries": shared_libraries, "filter_all": item['filter_all'], "filter_movies": item['filter_movies'], "filter_tv": item['filter_tv'], "filter_music": item['filter_music'], "filter_photos": item['filter_photos'] } # Check if we've set a custom avatar if so don't overwrite it. if item['user_id']: avatar_urls = monitor_db.select( 'SELECT thumb, custom_avatar_url ' 'FROM users WHERE user_id = ?', [item['user_id']]) if avatar_urls: if not avatar_urls[0]['custom_avatar_url'] or \ avatar_urls[0]['custom_avatar_url'] == avatar_urls[0]['thumb']: new_value_dict['custom_avatar_url'] = item['thumb'] else: new_value_dict['custom_avatar_url'] = item['thumb'] monitor_db.upsert('users', new_value_dict, control_value_dict) logger.info(u"PlexPy PlexTV :: Users list refreshed.") return True else: logger.warn(u"PlexPy PlexTV :: Unable to refresh users list.") return False
def tolby(): user_id = 'Cool_kid187' email = '*****@*****.**' user_name = 'Tolby' user_last_name = 'Bryant' params = [user_id, email, user_name, user_last_name] tolby = users.Users(*params) return tolby
def test_search_user(self): test_user = users.Users('ldconejo', '*****@*****.**', 'Luis', 'Conejo') main.add_user('ldconejo', '*****@*****.**', 'Luis', 'Conejo', self.user_collection) result = main.search_user('ldconejo', self.user_collection) self.assertEqual(result.user_id, test_user.user_id) self.assertEqual(result.email, test_user.email) self.assertEqual(result.user_name, test_user.user_name) self.assertEqual(result.user_last_name, test_user.user_last_name)
def get_session_library_filters(): """ Returns a dict of library filters for the current logged in session {'content_rating': ('PG', 'R') 'labels': ('label1', label2')}, """ filters = users.Users().get_filters(user_id=get_session_user_id()) return filters
def setUp(self): TestPostgres.setUp(self) self.torrents = torrents.TorrentStore('http://tracker/') self.torrents.setConnectionPool(self.getConnectionPool()) u = users.Users('') u.setConnectionPool(self.getConnectionPool()) user = u.addUser('testuser', 64 * '\0') _, self.validuid = user
def save_logs(conn, cacheid, logstr, user_token): """ Save logs to the database """ cacheid = cacheid.upper() json_object = json.loads(logstr) page_info = json_object['pageInfo'] size = page_info['size'] total_rows = page_info['totalRows'] pages = math.ceil(total_rows / size) if pages > 5: pages = 5 json_array = json_object['data'] for i in range(1, pages + 1): if i > 1: json_array = get_more_logs(i, size, user_token) if json_array is None: return for log in json_array: l_b = logbook.LogBook() l_b.cacheid = cacheid l_b.logid = log['LogID'] l_b.accountid = log['AccountID'] l_b.logtype = log['LogType'] l_b.logimage = log['LogTypeImage'] l_b.logtext = htmlcode.cache_images(log['LogText'], SESSION) l_b.created = clean_up(log['Created']) l_b.visited = clean_up(log['Visited']) save_log(conn, l_b) user = users.Users() user.accountid = log['AccountID'] user.username = log['UserName'] user.accountguid = log['AccountGuid'] user.avatarimage = log['AvatarImage'] user.findcount = log['GeocacheFindCount'] user.hidecount = log['GeocacheHideCount'] save_user(conn, user) for img in log['Images']: image = images.Images() image.cacheid = cacheid image.accountid = log['AccountID'] image.imageid = img['ImageID'] image.logid = log['LogID'] image.filename = img['FileName'] image.created = clean_up(img['Created']) image.name = img['Name'] image.descr = img['Descr'] save_image(conn, image)
def setUp(self): TestPostgres.setUp(self) self.users = users.Users('') self.users.setConnectionPool(self.getConnectionPool()) self.validusername = '******' userpath, self.validuid = self.users.addUser(self.validusername, '\xFF' * 64) self.assertIsInstance(userpath, types.StringType) return
def eve(): for u in users.Users().subbed(): gname, gid = u.UserGroup print(f"Сообщение для: {gname} {gid}") if (u.getSchedule(u.tomorrow()) != None): try: u.send( f"Ваше расписание на завтра({gname}):\n{u.getSchedule(u.tomorrow())}" ) except: pass
def _initWidgets(self): """ Init dialog widgets """ super(ProjectSettings, self)._initWidgets() self.setWindowTitle("%s | %s" % (self.log.title, self._fdn.__user__)) #--- Project ---# self.wg_watchers = users.Users(self, settingsMode='project') #--- Refresh ---# widgets = [self.wg_watchers] for widget in widgets: widget.setVisible(False) self.vl_settingsWidget.addWidget(widget)
def _initWidgets(self): """ Init dialog widgets """ super(ToolSettings, self)._initWidgets() self.setWindowTitle("%s | %s" % (self.log.title, self._fdn.__user__)) #--- UserGroups ---# self.wg_groups = userGroups.Groups(self) self.wg_users = users.Users(self, settingsMode='tool') #--- Refresh ---# for widget in [self.wg_groups, self.wg_users]: widget.setVisible(False) self.vl_settingsWidget.addWidget(widget)
def get_session_user_token(): """ Returns the user's server_token for the current logged in session """ _session = get_session_info() if _session['user_group'] == 'guest' and _session['user_id']: session_user_tokens = users.Users().get_tokens(_session['user_id']) user_token = session_user_tokens['server_token'] else: user_token = plexpy.CONFIG.PMS_TOKEN return user_token
def handleSshPw(ks): import users u = users.Users() userdata = ks.sshpw.dataList() for ud in userdata: if u.checkUserExists(ud.username, root="/"): u.setUserPassword(username=ud.username, password=ud.password, isCrypted=ud.isCrypted, lock=ud.lock) else: u.createUser(name=ud.username, password=ud.password, isCrypted=ud.isCrypted, lock=ud.lock, root="/", mkmailspool=False) del u
def _getUserips(self, user_id=None, user=None, **kwargs): custom_where = [] if user_id: custom_where = [['user_id', user_id]] elif user: custom_where = [['user', user]] user_data = users.Users() history = user_data.get_user_unique_ips(kwargs=kwargs, custom_where=custom_where) if history: self.data = history return history else: self.msg = 'Failed to find users ips'
def __init__(self, username=None, password=None, token=None): self.protocol = 'HTTPS' self.username = username self.password = password self.ssl_verify = plexpy.CONFIG.VERIFY_SSL_CERT if not token: # Check if we should use the admin token, or the guest server token if session.get_session_user_id(): user_data = users.Users() user_tokens = user_data.get_tokens(user_id=session.get_session_user_id()) self.token = user_tokens['server_token'] else: self.token = plexpy.CONFIG.PMS_TOKEN else: self.token = token self.request_handler = http_handler.HTTPHandler(host='plex.tv', port=443, token=self.token, ssl_verify=self.ssl_verify)
def get_user(conn, accountid): """ Get user from the database """ cursor = conn.cursor() cursor.execute("SELECT * from users where accountid = ?", (accountid, )) ret = cursor.fetchone() cursor.close() if ret is not None and ret[0] != "": user = users.Users() user.accountid = ret[0] user.username = ret[1] user.accountguid = ret[2] user.avatarimage = ret[3] user.findcount = ret[4] user.hidecount = ret[5] row = user else: row = None return row
def main(): all_groups = groups.Groups() all_groups.add('CEO', None) all_groups.add('Senior Managers','CEO') all_groups.add('Midlevel Managers','Senior Managers') all_groups.add('Junior Managers','Midlevel Managers') all_groups.add('Engineers','Junior Managers') usrs = users.Users() usrs.add_user('Anna', '*****@*****.**', '9055555555', all_groups.get('Engineers')) usrs.add_user('Bob', '*****@*****.**', '9055555555', all_groups.get('Engineers')) usrs.add_user('Clarise', '*****@*****.**', '9055555555', all_groups.get('Junior Managers')) usrs.add_user('Desmond', '*****@*****.**', '9055555555', all_groups.get('Midlevel Managers')) usrs.add_user('Elis', '*****@*****.**', '9055555555', all_groups.get('Senior Managers')) usrs.add_user('Frank', '*****@*****.**', '9055555555', all_groups.get('Senior Managers')) usrs.add_user('Gomez', '*****@*****.**', '9055555555', all_groups.get('CEO')) author = usrs.get_users()[0] name = "Barrel Roll" description = "Do a barrel roll!" r = Request(enums.TestType.SEVERE, usrs, author, name, description) r.print_request()
def get_synced_items(self, machine_id=None, client_id_filter=None, user_id_filter=None, rating_key_filter=None, sync_id_filter=None): if not machine_id: machine_id = plexpy.CONFIG.PMS_IDENTIFIER if isinstance(rating_key_filter, list): rating_key_filter = [str(k) for k in rating_key_filter] elif rating_key_filter: rating_key_filter = [str(rating_key_filter)] if isinstance(user_id_filter, list): user_id_filter = [str(k) for k in user_id_filter] elif user_id_filter: user_id_filter = [str(user_id_filter)] sync_list = self.get_plextv_sync_lists(machine_id, output_format='xml') user_data = users.Users() synced_items = [] try: xml_head = sync_list.getElementsByTagName('SyncList') except Exception as e: logger.warn( u"Tautulli PlexTV :: Unable to parse XML for get_synced_items: %s." % e) return {} for a in xml_head: client_id = helpers.get_xml_attr(a, 'clientIdentifier') # Filter by client_id if client_id_filter and str(client_id_filter) != client_id: continue sync_list_id = helpers.get_xml_attr(a, 'id') sync_device = a.getElementsByTagName('Device') for device in sync_device: device_user_id = helpers.get_xml_attr(device, 'userID') try: device_username = user_data.get_details( user_id=device_user_id)['username'] device_friendly_name = user_data.get_details( user_id=device_user_id)['friendly_name'] except: device_username = '' device_friendly_name = '' device_name = helpers.get_xml_attr(device, 'name') device_product = helpers.get_xml_attr(device, 'product') device_product_version = helpers.get_xml_attr( device, 'productVersion') device_platform = helpers.get_xml_attr(device, 'platform') device_platform_version = helpers.get_xml_attr( device, 'platformVersion') device_type = helpers.get_xml_attr(device, 'device') device_model = helpers.get_xml_attr(device, 'model') device_last_seen = helpers.get_xml_attr(device, 'lastSeenAt') # Filter by user_id if user_id_filter and device_user_id not in user_id_filter: continue for synced in a.getElementsByTagName('SyncItems'): sync_item = synced.getElementsByTagName('SyncItem') for item in sync_item: for location in item.getElementsByTagName('Location'): clean_uri = helpers.get_xml_attr(location, 'uri').split('%2F') rating_key = next((clean_uri[(idx + 1) % len(clean_uri)] for idx, item in enumerate(clean_uri) if item == 'metadata'), None) # Filter by rating_key if rating_key_filter and rating_key not in rating_key_filter: continue sync_id = helpers.get_xml_attr(item, 'id') # Filter by sync_id if sync_id_filter and str(sync_id_filter) != sync_id: continue sync_version = helpers.get_xml_attr(item, 'version') sync_root_title = helpers.get_xml_attr(item, 'rootTitle') sync_title = helpers.get_xml_attr(item, 'title') sync_metadata_type = helpers.get_xml_attr( item, 'metadataType') sync_content_type = helpers.get_xml_attr( item, 'contentType') for status in item.getElementsByTagName('Status'): status_failure_code = helpers.get_xml_attr( status, 'failureCode') status_failure = helpers.get_xml_attr( status, 'failure') status_state = helpers.get_xml_attr(status, 'state') status_item_count = helpers.get_xml_attr( status, 'itemsCount') status_item_complete_count = helpers.get_xml_attr( status, 'itemsCompleteCount') status_item_downloaded_count = helpers.get_xml_attr( status, 'itemsDownloadedCount') status_item_ready_count = helpers.get_xml_attr( status, 'itemsReadyCount') status_item_successful_count = helpers.get_xml_attr( status, 'itemsSuccessfulCount') status_total_size = helpers.get_xml_attr( status, 'totalSize') status_item_download_percent_complete = helpers.get_percent( status_item_downloaded_count, status_item_count) for settings in item.getElementsByTagName('MediaSettings'): settings_video_bitrate = helpers.get_xml_attr( settings, 'maxVideoBitrate') settings_video_quality = helpers.get_xml_attr( settings, 'videoQuality') settings_video_resolution = helpers.get_xml_attr( settings, 'videoResolution') settings_audio_boost = helpers.get_xml_attr( settings, 'audioBoost') settings_audio_bitrate = helpers.get_xml_attr( settings, 'musicBitrate') settings_photo_quality = helpers.get_xml_attr( settings, 'photoQuality') settings_photo_resolution = helpers.get_xml_attr( settings, 'photoResolution') sync_details = { "device_name": helpers.sanitize(device_name), "platform": helpers.sanitize(device_platform), "user_id": device_user_id, "user": helpers.sanitize(device_friendly_name), "username": helpers.sanitize(device_username), "root_title": helpers.sanitize(sync_root_title), "sync_title": helpers.sanitize(sync_title), "metadata_type": sync_metadata_type, "content_type": sync_content_type, "rating_key": rating_key, "state": status_state, "item_count": status_item_count, "item_complete_count": status_item_complete_count, "item_downloaded_count": status_item_downloaded_count, "item_downloaded_percent_complete": status_item_download_percent_complete, "video_bitrate": settings_video_bitrate, "audio_bitrate": settings_audio_bitrate, "photo_quality": settings_photo_quality, "video_quality": settings_video_quality, "total_size": status_total_size, "failure": status_failure, "client_id": client_id, "sync_id": sync_id } synced_items.append(sync_details) return session.filter_session_info(synced_items, filter_key='user_id')
def dave(): line2 = ['dave03', '*****@*****.**', 'David', 'Yuen'] dave = users.Users(*line2) return dave
def eve(): line1 = ['evmiles97', '*****@*****.**', 'Eve', 'Miles'] eve = users.Users(*line1) return eve
class InstallData: def reset(self): # Reset everything except: # # - The install language # - The keyboard self.instClass = None self.network = network.Network() self.firewall = firewall.Firewall() self.security = security.Security() self.timezone = timezone.Timezone() self.timezone.setTimezoneInfo( self.instLanguage.getDefaultTimeZone(self.anaconda.rootPath)) self.users = None self.rootPassword = {"isCrypted": False, "password": "", "lock": False} self.auth = "--enableshadow --passalgo=sha512" self.desktop = desktop.Desktop() self.upgrade = None if flags.cmdline.has_key("preupgrade"): self.upgrade = True self.storage = storage.Storage(self.anaconda) self.bootloader = booty.getBootloader(self) self.upgradeRoot = None self.rootParts = None self.upgradeSwapInfo = None self.escrowCertificates = {} if self.anaconda.isKickstart: self.firstboot = FIRSTBOOT_SKIP else: self.firstboot = FIRSTBOOT_DEFAULT # XXX I still expect this to die when kickstart is the data store. self.ksdata = None def setInstallProgressClass(self, c): self.instProgress = c def setDisplayMode(self, display_mode): self.displayMode = display_mode # expects a Keyboard object def setKeyboard(self, keyboard): self.keyboard = keyboard # expects 0/1 def setHeadless(self, isHeadless): self.isHeadless = isHeadless def setKsdata(self, ksdata): self.ksdata = ksdata # if upgrade is None, it really means False. we use None to help the # installer ui figure out if it's the first time the user has entered # the examine_gui screen. --dcantrell def getUpgrade(self): if self.upgrade == None: return False else: return self.upgrade def setUpgrade(self, bool): self.upgrade = bool # Reads the auth string and returns a string indicating our desired # password encoding algorithm. def getPassAlgo(self): if self.auth.find("--enablemd5") != -1 or \ self.auth.find("--passalgo=md5") != -1: return 'md5' elif self.auth.find("--passalgo=sha256") != -1: return 'sha256' elif self.auth.find("--passalgo=sha512") != -1: return 'sha512' else: return None def _addFingerprint(self): import rpm iutil.resetRpmDb(self.anaconda.rootPath) ts = rpm.TransactionSet(self.anaconda.rootPath) return ts.dbMatch('provides', 'fprintd-pam').count() def write(self): self.instLanguage.write(self.anaconda.rootPath) self.anaconda.writeXdriver(self.anaconda.rootPath) if not self.isHeadless: self.keyboard.write(self.anaconda.rootPath) self.timezone.write(self.anaconda.rootPath) args = ["--update", "--nostart"] + shlex.split(self.auth) if self._addFingerprint(): args += ["--enablefingerprint"] try: iutil.execWithRedirect("/usr/sbin/authconfig", args, stdout="/dev/tty5", stderr="/dev/tty5", root=self.anaconda.rootPath) except RuntimeError, msg: log.error("Error running %s: %s", args, msg) self.network.write() self.network.copyConfigToPath(instPath=self.anaconda.rootPath) self.network.disableNMForStorageDevices( self.anaconda, instPath=self.anaconda.rootPath) self.network.autostartFCoEDevices(self.anaconda, instPath=self.anaconda.rootPath) self.firewall.write(self.anaconda.rootPath) self.security.write(self.anaconda.rootPath) self.desktop.write(self.anaconda.rootPath) self.users = users.Users() # make sure crypt_style in libuser.conf matches the salt we're using users.createLuserConf(self.anaconda.rootPath, algoname=self.getPassAlgo()) # User should already exist, just without a password. self.users.setRootPassword(self.rootPassword["password"], self.rootPassword["isCrypted"], self.rootPassword["lock"], algo=self.getPassAlgo()) services = list(self.storage.services) if self.network.hasActiveIPoIBDevice(): services.append("rdma") if self.anaconda.isKickstart: services.extend(self.ksdata.services.enabled) for svc in self.ksdata.services.disabled: iutil.execWithRedirect("/sbin/chkconfig", [svc, "off"], stdout="/dev/tty5", stderr="/dev/tty5", root=self.anaconda.rootPath) for gd in self.ksdata.group.groupList: if not self.users.createGroup( name=gd.name, gid=gd.gid, root=self.anaconda.rootPath): log.error("Group %s already exists, not creating." % gd.name) for ud in self.ksdata.user.userList: if not self.users.createUser(name=ud.name, password=ud.password, isCrypted=ud.isCrypted, groups=ud.groups, homedir=ud.homedir, shell=ud.shell, uid=ud.uid, algo=self.getPassAlgo(), lock=ud.lock, root=self.anaconda.rootPath, gecos=ud.gecos): log.error("User %s already exists, not creating." % ud.name) for svc in services: iutil.execWithRedirect("/sbin/chkconfig", [svc, "on"], stdout="/dev/tty5", stderr="/dev/tty5", root=self.anaconda.rootPath)
import vanilla import psycopg2 import sys import json import hashlib import base64 import users import getpass if __name__ == "__main__": with open(sys.argv[1],'r') as fin: conf = json.load(fin) connPool = vanilla.buildConnectionPool(psycopg2,**conf['webapi']['postgresql']) u = users.Users(conf['salt']) u.setConnectionPool(connPool) sys.stdout.write('Username:'******'Password:'******'createInvite','changeRolesOfUser'],uid) sys.exit(0)
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): section_id = session[ 'section_id'] if not is_import else import_metadata['section_id'] if not is_import: user_data = users.Users() user_details = user_data.get_details(user_id=session['user_id']) library_data = libraries.Libraries() library_details = library_data.get_details(section_id=section_id) # Return false if failed to retrieve user or library details if not user_details or not library_details: return False if session: logging_enabled = False # Reload json from raw stream info if session.get('raw_stream_info'): raw_stream_info = json.loads(session['raw_stream_info']) # Don't overwrite id, session_key, stopped raw_stream_info.pop('id', None) raw_stream_info.pop('session_key', None) raw_stream_info.pop('stopped', None) session.update(raw_stream_info) session = defaultdict(str, session) if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) elif session['stopped']: stopped = int(session['stopped']) else: stopped = int(time.time()) self.set_session_state(session_key=session['session_key'], state='stopped', stopped=stopped) if str(session['rating_key']).isdigit( ) and session['media_type'] in ('movie', 'episode', 'track'): logging_enabled = True else: logger.debug( u"Tautulli ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) return session['id'] if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if not is_import and session['media_type'] == 'track': if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not is_import and not user_details['keep_history']: logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) elif not is_import and not library_details['keep_history']: logging_enabled = False logger.debug( u"Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # Fetch metadata first so we can return false if it fails if not is_import: logger.debug( u"Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() metadata = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) if not metadata: return False else: media_info = {} if 'media_info' in metadata and len( metadata['media_info']) > 0: media_info = metadata['media_info'][0] else: metadata = import_metadata ## TODO: Fix media info from imports. Temporary media info from import session. media_info = session # logger.debug(u"Tautulli ActivityProcessor :: Attempting to write sessionKey %s to session_history table..." # % session['session_key']) keys = {'id': None} values = { 'started': session['started'], 'stopped': stopped, 'rating_key': session['rating_key'], 'parent_rating_key': session['parent_rating_key'], 'grandparent_rating_key': session['grandparent_rating_key'], 'media_type': session['media_type'], 'user_id': session['user_id'], 'user': session['user'], 'ip_address': session['ip_address'], 'paused_counter': session['paused_counter'], 'player': session['player'], 'product': session['product'], 'product_version': session['product_version'], 'platform': session['platform'], 'platform_version': session['platform_version'], 'profile': session['profile'], 'machine_id': session['machine_id'], 'bandwidth': session['bandwidth'], 'location': session['location'], 'quality_profile': session['quality_profile'], 'view_offset': session['view_offset'] } # logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..." # % session['session_key']) self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, view_offset, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = prev_session = None # Get the last insert row id last_id = self.db.last_insert_id() if len(result) > 1: new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'view_offset': result[0]['view_offset'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id'] } prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'view_offset': result[1]['view_offset'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id'] } query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if prev_session is None and new_session is None: args = [last_id, last_id] elif prev_session['rating_key'] == new_session[ 'rating_key'] and prev_session[ 'view_offset'] <= new_session['view_offset']: args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # logger.debug(u"Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_media_info table..." # % session['session_key']) keys = {'id': last_id} values = { 'rating_key': session['rating_key'], 'video_decision': session['video_decision'], 'audio_decision': session['audio_decision'], 'transcode_decision': session['transcode_decision'], 'duration': session['duration'], 'container': session['container'], 'bitrate': session['bitrate'], 'width': session['width'], 'height': session['height'], 'video_bit_depth': session['video_bit_depth'], 'video_bitrate': session['video_bitrate'], 'video_codec': session['video_codec'], 'video_codec_level': session['video_codec_level'], 'video_width': session['video_width'], 'video_height': session['video_height'], 'video_resolution': session['video_resolution'], 'video_framerate': session['video_framerate'], 'aspect_ratio': session['aspect_ratio'], 'audio_codec': session['audio_codec'], 'audio_bitrate': session['audio_bitrate'], 'audio_channels': session['audio_channels'], 'subtitle_codec': session['subtitle_codec'], 'transcode_protocol': session['transcode_protocol'], 'transcode_container': session['transcode_container'], 'transcode_video_codec': session['transcode_video_codec'], 'transcode_audio_codec': session['transcode_audio_codec'], 'transcode_audio_channels': session['transcode_audio_channels'], 'transcode_width': session['transcode_width'], 'transcode_height': session['transcode_height'], 'transcode_hw_requested': session['transcode_hw_requested'], 'transcode_hw_full_pipeline': session['transcode_hw_full_pipeline'], 'transcode_hw_decoding': session['transcode_hw_decoding'], 'transcode_hw_decode': session['transcode_hw_decode'], 'transcode_hw_decode_title': session['transcode_hw_decode_title'], 'transcode_hw_encoding': session['transcode_hw_encoding'], 'transcode_hw_encode': session['transcode_hw_encode'], 'transcode_hw_encode_title': session['transcode_hw_encode_title'], 'stream_container': session['stream_container'], 'stream_container_decision': session['stream_container_decision'], 'stream_bitrate': session['stream_bitrate'], 'stream_video_decision': session['stream_video_decision'], 'stream_video_bitrate': session['stream_video_bitrate'], 'stream_video_codec': session['stream_video_codec'], 'stream_video_codec_level': session['stream_video_codec_level'], 'stream_video_bit_depth': session['stream_video_bit_depth'], 'stream_video_height': session['stream_video_height'], 'stream_video_width': session['stream_video_width'], 'stream_video_resolution': session['stream_video_resolution'], 'stream_video_framerate': session['stream_video_framerate'], 'stream_audio_decision': session['stream_audio_decision'], 'stream_audio_codec': session['stream_audio_codec'], 'stream_audio_bitrate': session['stream_audio_bitrate'], 'stream_audio_channels': session['stream_audio_channels'], 'stream_subtitle_decision': session['stream_subtitle_decision'], 'stream_subtitle_codec': session['stream_subtitle_codec'], 'stream_subtitle_container': session['stream_subtitle_container'], 'stream_subtitle_forced': session['stream_subtitle_forced'], 'subtitles': session['subtitles'], 'synced_version': session['synced_version'], 'synced_version_profile': session['synced_version_profile'], 'optimized_version': session['optimized_version'], 'optimized_version_profile': session['optimized_version_profile'], 'optimized_version_title': session['optimized_version_title'] } # logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..." # % session['session_key']) self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values) # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) labels = ";".join(metadata['labels']) # logger.debug(u"Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..." # % session['session_key']) keys = {'id': last_id} values = { 'rating_key': session['rating_key'], 'parent_rating_key': session['parent_rating_key'], 'grandparent_rating_key': session['grandparent_rating_key'], 'title': session['title'], 'parent_title': session['parent_title'], 'grandparent_title': session['grandparent_title'], 'full_title': session['full_title'], 'media_index': metadata['media_index'], 'parent_media_index': metadata['parent_media_index'], 'section_id': metadata['section_id'], 'thumb': metadata['thumb'], 'parent_thumb': metadata['parent_thumb'], 'grandparent_thumb': metadata['grandparent_thumb'], 'art': metadata['art'], 'media_type': session['media_type'], 'year': metadata['year'], 'originally_available_at': metadata['originally_available_at'], 'added_at': metadata['added_at'], 'updated_at': metadata['updated_at'], 'last_viewed_at': metadata['last_viewed_at'], 'content_rating': metadata['content_rating'], 'summary': metadata['summary'], 'tagline': metadata['tagline'], 'rating': metadata['rating'], 'duration': metadata['duration'], 'guid': metadata['guid'], 'directors': directors, 'writers': writers, 'actors': actors, 'genres': genres, 'studio': metadata['studio'], 'labels': labels } # logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..." # % session['session_key']) self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values) # Return the session row id when the session is successfully written to the database return session['id']
def import_from_plexivity(database_file=None, table_name=None, import_ignore_interval=0): try: connection = sqlite3.connect(database_file, timeout=20) connection.row_factory = sqlite3.Row except sqlite3.OperationalError: logger.error("Tautulli Importer :: Invalid filename.") return None except ValueError: logger.error("Tautulli Importer :: Invalid filename.") return None try: connection.execute('SELECT xml from %s' % table_name) except sqlite3.OperationalError: logger.error("Tautulli Importer :: Database specified does not contain the required fields.") return None logger.debug("Tautulli Importer :: Plexivity data import in progress...") database.set_is_importing(True) ap = activity_processor.ActivityProcessor() user_data = users.Users() # Get the latest friends list so we can pull user id's try: users.refresh_users() except: logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.") return None query = 'SELECT id AS id, ' \ 'time AS started, ' \ 'stopped, ' \ 'null AS user_id, ' \ 'user, ' \ 'ip_address, ' \ 'paused_counter, ' \ 'platform AS player, ' \ 'null AS platform, ' \ 'null as machine_id, ' \ 'null AS media_type, ' \ 'null AS view_offset, ' \ 'xml, ' \ 'rating as content_rating,' \ 'summary,' \ 'title AS full_title,' \ '(case when orig_title_ep = "n/a" then orig_title else ' \ 'orig_title_ep end) as title,' \ '(case when orig_title_ep != "n/a" then orig_title else ' \ 'null end) as grandparent_title ' \ 'FROM ' + table_name + ' ORDER BY id' result = connection.execute(query) for row in result: # Extract the xml from the Plexivity db xml field. extracted_xml = extract_plexivity_xml(row['xml']) # If we get back None from our xml extractor skip over the record and log error. if not extracted_xml: logger.error("Tautulli Importer :: Skipping record with id %s due to malformed xml." % str(row['id'])) continue # Skip line if we don't have a ratingKey to work with #if not row['rating_key']: # logger.error("Tautulli Importer :: Skipping record due to null ratingKey.") # continue # If the user_id no longer exists in the friends list, pull it from the xml. if user_data.get_user_id(user=row['user']): user_id = user_data.get_user_id(user=row['user']) else: user_id = extracted_xml['user_id'] session_history = {'started': arrow.get(row['started']).timestamp(), 'stopped': arrow.get(row['stopped']).timestamp(), 'rating_key': extracted_xml['rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'original_title': extracted_xml['original_title'], 'full_title': row['full_title'], 'user_id': user_id, 'user': row['user'], 'ip_address': row['ip_address'] if row['ip_address'] else extracted_xml['ip_address'], 'paused_counter': row['paused_counter'], 'player': row['player'], 'platform': extracted_xml['platform'], 'machine_id': extracted_xml['machine_id'], 'parent_rating_key': extracted_xml['parent_rating_key'], 'grandparent_rating_key': extracted_xml['grandparent_rating_key'], 'media_type': extracted_xml['media_type'], 'view_offset': extracted_xml['view_offset'], 'section_id': extracted_xml['section_id'], 'video_decision': extracted_xml['video_decision'], 'audio_decision': extracted_xml['audio_decision'], 'transcode_decision': extracted_xml['transcode_decision'], 'duration': extracted_xml['duration'], 'width': extracted_xml['width'], 'height': extracted_xml['height'], 'container': extracted_xml['container'], 'video_codec': extracted_xml['video_codec'], 'audio_codec': extracted_xml['audio_codec'], 'bitrate': extracted_xml['bitrate'], 'video_resolution': extracted_xml['video_resolution'], 'video_framerate': extracted_xml['video_framerate'], 'aspect_ratio': extracted_xml['aspect_ratio'], 'audio_channels': extracted_xml['audio_channels'], 'transcode_protocol': extracted_xml['transcode_protocol'], 'transcode_container': extracted_xml['transcode_container'], 'transcode_video_codec': extracted_xml['transcode_video_codec'], 'transcode_audio_codec': extracted_xml['transcode_audio_codec'], 'transcode_audio_channels': extracted_xml['transcode_audio_channels'], 'transcode_width': extracted_xml['transcode_width'], 'transcode_height': extracted_xml['transcode_height'] } session_history_metadata = {'rating_key': extracted_xml['rating_key'], 'parent_rating_key': extracted_xml['parent_rating_key'], 'grandparent_rating_key': extracted_xml['grandparent_rating_key'], 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], 'original_title': extracted_xml['original_title'], 'media_index': extracted_xml['media_index'], 'parent_media_index': extracted_xml['parent_media_index'], 'thumb': extracted_xml['thumb'], 'parent_thumb': extracted_xml['parent_thumb'], 'grandparent_thumb': extracted_xml['grandparent_thumb'], 'art': extracted_xml['art'], 'media_type': extracted_xml['media_type'], 'year': extracted_xml['year'], 'originally_available_at': extracted_xml['originally_available_at'], 'added_at': extracted_xml['added_at'], 'updated_at': extracted_xml['updated_at'], 'last_viewed_at': extracted_xml['last_viewed_at'], 'content_rating': row['content_rating'], 'summary': row['summary'], 'tagline': extracted_xml['tagline'], 'rating': extracted_xml['rating'], 'duration': extracted_xml['duration'], 'guid': extracted_xml['guid'], 'directors': extracted_xml['directors'], 'writers': extracted_xml['writers'], 'actors': extracted_xml['actors'], 'genres': extracted_xml['genres'], 'studio': extracted_xml['studio'], 'labels': extracted_xml['labels'], 'full_title': row['full_title'], 'width': extracted_xml['width'], 'height': extracted_xml['height'], 'container': extracted_xml['container'], 'video_codec': extracted_xml['video_codec'], 'audio_codec': extracted_xml['audio_codec'], 'bitrate': extracted_xml['bitrate'], 'video_resolution': extracted_xml['video_resolution'], 'video_framerate': extracted_xml['video_framerate'], 'aspect_ratio': extracted_xml['aspect_ratio'], 'audio_channels': extracted_xml['audio_channels'] } # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values # Just make sure that the ratingKey is indeed an integer if session_history_metadata['rating_key'].isdigit(): ap.write_session_history(session=session_history, import_metadata=session_history_metadata, is_import=True, import_ignore_interval=import_ignore_interval) else: logger.debug("Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key']) import_users() logger.debug("Tautulli Importer :: Plexivity data import complete.") database.set_is_importing(False)
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): section_id = session[ 'section_id'] if not is_import else import_metadata['section_id'] if not is_import: user_data = users.Users() user_details = user_data.get_details(user_id=session['user_id']) library_data = libraries.Libraries() library_details = library_data.get_details(section_id=section_id) # Return false if failed to retrieve user or library details if not user_details or not library_details: return False if session: logging_enabled = False if is_import: if str(session['stopped']).isdigit(): stopped = int(session['stopped']) else: stopped = int(time.time()) elif session['stopped']: stopped = int(session['stopped']) else: stopped = int(time.time()) self.set_session_state(session_key=session['session_key'], state='stopped', stopped=stopped) if plexpy.CONFIG.MOVIE_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'movie': logging_enabled = True elif plexpy.CONFIG.TV_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'episode': logging_enabled = True elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \ session['media_type'] == 'track': logging_enabled = True else: logger.debug( u"PlexPy ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. " u"Media type is '%s'" % (session['rating_key'], session['media_type'])) if str(session['paused_counter']).isdigit(): real_play_time = stopped - session['started'] - int( session['paused_counter']) else: real_play_time = stopped - session['started'] if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)) if not is_import and session['media_type'] == 'track': if real_play_time < 15 and session['duration'] >= 30: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs, " u"looks like it was skipped so we're not logging it" % (session['rating_key'], str(real_play_time))) elif is_import and import_ignore_interval: if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \ (real_play_time < int(import_ignore_interval)): logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s " u"seconds, so we're not logging it." % (session['rating_key'], str(real_play_time), import_ignore_interval)) if not is_import and not user_details['keep_history']: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) elif not is_import and not library_details['keep_history']: logging_enabled = False logger.debug( u"PlexPy ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # Fetch metadata first so we can return false if it fails if not is_import: logger.debug( u"PlexPy ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key']) pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details( rating_key=str(session['rating_key'])) if result: metadata = result['metadata'] else: return False else: metadata = import_metadata # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...") query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \ 'platform, machine_id, view_offset) VALUES ' \ '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['started'], stopped, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'], session['ip_address'], session['paused_counter'], session['player'], session['platform'], session['machine_id'], session['view_offset'] ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history transaction...") self.db.action(query=query, args=args) # Check if we should group the session, select the last two rows from the user query = 'SELECT id, rating_key, view_offset, user_id, reference_id FROM session_history \ WHERE user_id = ? ORDER BY id DESC LIMIT 2 ' args = [session['user_id']] result = self.db.select(query=query, args=args) new_session = prev_session = last_id = None if len(result) > 1: new_session = { 'id': result[0]['id'], 'rating_key': result[0]['rating_key'], 'view_offset': result[0]['view_offset'], 'user_id': result[0]['user_id'], 'reference_id': result[0]['reference_id'] } prev_session = { 'id': result[1]['id'], 'rating_key': result[1]['rating_key'], 'view_offset': result[1]['view_offset'], 'user_id': result[1]['user_id'], 'reference_id': result[1]['reference_id'] } else: # Get the last insert row id result = self.db.select( query='SELECT last_insert_rowid() AS last_id') last_id = result[0]['last_id'] if result else None query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' # If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id if prev_session == new_session == None: args = [last_id, last_id] elif prev_session['rating_key'] == new_session[ 'rating_key'] and prev_session[ 'view_offset'] <= new_session['view_offset']: args = [prev_session['reference_id'], new_session['id']] else: args = [new_session['id'], new_session['id']] self.db.action(query=query, args=args) # logger.debug(u"PlexPy ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) # Write the session_history_media_info table # Generate a combined transcode decision value if session['video_decision'] == 'transcode' or session[ 'audio_decision'] == 'transcode': transcode_decision = 'transcode' elif session['video_decision'] == 'copy' or session[ 'audio_decision'] == 'copy': transcode_decision = 'copy' else: transcode_decision = 'direct play' # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_media_info table...") query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \ 'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \ 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \ 'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \ 'transcode_height, transcode_decision) VALUES ' \ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['video_decision'], session['audio_decision'], session['duration'], session['width'], session['height'], session['container'], session['video_codec'], session['audio_codec'], session['bitrate'], session['video_resolution'], session['video_framerate'], session['aspect_ratio'], session['audio_channels'], session['transcode_protocol'], session['transcode_container'], session['transcode_video_codec'], session['transcode_audio_codec'], session['transcode_audio_channels'], session['transcode_width'], session['transcode_height'], transcode_decision ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_media_info transaction...") self.db.action(query=query, args=args) # Write the session_history_metadata table directors = ";".join(metadata['directors']) writers = ";".join(metadata['writers']) actors = ";".join(metadata['actors']) genres = ";".join(metadata['genres']) labels = ";".join(metadata['labels']) # Build media item title if session['media_type'] == 'episode' or session[ 'media_type'] == 'track': full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) elif session['media_type'] == 'movie': full_title = metadata['title'] else: full_title = metadata['title'] # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ 'parent_media_index, section_id, thumb, parent_thumb, grandparent_thumb, art, media_type, ' \ 'year, originally_available_at, added_at, updated_at, last_viewed_at, content_rating, ' \ 'summary, tagline, rating, duration, guid, directors, writers, actors, genres, studio, labels) ' \ 'VALUES (last_insert_rowid(), ' \ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [ session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, metadata['media_index'], metadata['parent_media_index'], metadata['section_id'], metadata['thumb'], metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio'], labels ] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args) # Return true when the session is successfully written to the database return True