Ejemplo n.º 1
0
def uninstall_geoip_db():
    logger.debug(u"Tautulli Helpers :: Uninstalling the GeoLite2 database...")
    try:
        os.remove(plexpy.CONFIG.GEOIP_DB)
    except Exception as e:
        logger.error(
            u"Tautulli Helpers :: Failed to uninstall the GeoLite2 database: %s"
            % e)
        return False

    plexpy.CONFIG.__setattr__('GEOIP_DB_INSTALLED', 0)
    plexpy.CONFIG.write()

    logger.debug(
        u"Tautulli Helpers :: GeoLite2 database uninstalled successfully.")

    plexpy.schedule_job(update_geoip_db,
                        'Update GeoLite2 database',
                        hours=0,
                        minutes=0,
                        seconds=0)

    return True
Ejemplo n.º 2
0
def import_from_plexwatch(database=None,
                          table_name=None,
                          import_ignore_interval=0):

    try:
        connection = sqlite3.connect(database, timeout=20)
        connection.row_factory = sqlite3.Row
    except sqlite3.OperationalError:
        logger.error(u"PlexPy Importer :: Invalid filename.")
        return None
    except ValueError:
        logger.error(u"PlexPy Importer :: Invalid filename.")
        return None

    try:
        connection.execute('SELECT ratingKey from %s' % table_name)
    except sqlite3.OperationalError:
        logger.error(
            u"PlexPy Importer :: Database specified does not contain the required fields."
        )
        return None

    logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")

    logger.debug(
        u"PlexPy Importer :: Disabling monitoring while import in progress.")
    plexpy.schedule_job(activity_pinger.check_active_sessions,
                        'Check for active sessions',
                        hours=0,
                        minutes=0,
                        seconds=0)
    plexpy.schedule_job(activity_pinger.check_recently_added,
                        'Check for recently added items',
                        hours=0,
                        minutes=0,
                        seconds=0)
    plexpy.schedule_job(activity_pinger.check_server_response,
                        'Check for server response',
                        hours=0,
                        minutes=0,
                        seconds=0)

    ap = activity_processor.ActivityProcessor()
    user_data = users.Users()

    # Get the latest friends list so we can pull user id's
    try:
        plextv.refresh_users()
    except:
        logger.debug(
            u"PlexPy Importer :: Unable to refresh the users list. Aborting import."
        )
        return None

    query = 'SELECT time AS started, ' \
            'stopped, ' \
            'cast(ratingKey as text) AS rating_key, ' \
            'null AS user_id, ' \
            'user, ' \
            'ip_address, ' \
            'paused_counter, ' \
            'platform AS player, ' \
            'null AS platform, ' \
            'null as machine_id, ' \
            'parentRatingKey as parent_rating_key, ' \
            'grandparentRatingKey as grandparent_rating_key, ' \
            'null AS media_type, ' \
            'null AS view_offset, ' \
            'xml, ' \
            'rating as content_rating,' \
            'summary,' \
            'title AS full_title,' \
            '(case when orig_title_ep = "" then orig_title else ' \
            'orig_title_ep end) as title,' \
            '(case when orig_title_ep != "" then orig_title else ' \
            'null end) as grandparent_title ' \
            'FROM ' + table_name + ' ORDER BY id'

    result = connection.execute(query)

    for row in result:
        # Extract the xml from the Plexwatch db xml field.
        extracted_xml = extract_plexwatch_xml(row['xml'])

        # If we get back None from our xml extractor skip over the record and log error.
        if not extracted_xml:
            logger.error(
                u"PlexPy Importer :: Skipping record with ratingKey %s due to malformed xml."
                % str(row['rating_key']))
            continue

        # Skip line if we don't have a ratingKey to work with
        if not row['rating_key']:
            logger.error(
                u"PlexPy Importer :: Skipping record due to null ratingRey.")
            continue

        # If the user_id no longer exists in the friends list, pull it from the xml.
        if user_data.get_user_id(user=row['user']):
            user_id = user_data.get_user_id(user=row['user'])
        else:
            user_id = extracted_xml['user_id']

        session_history = {
            'started': row['started'],
            'stopped': row['stopped'],
            'rating_key': row['rating_key'],
            'title': row['title'],
            'parent_title': extracted_xml['parent_title'],
            'grandparent_title': row['grandparent_title'],
            'user_id': user_id,
            'user': row['user'],
            'ip_address': row['ip_address'],
            'paused_counter': row['paused_counter'],
            'player': row['player'],
            'platform': extracted_xml['platform'],
            'machine_id': extracted_xml['machine_id'],
            'parent_rating_key': row['parent_rating_key'],
            'grandparent_rating_key': row['grandparent_rating_key'],
            'media_type': extracted_xml['media_type'],
            'view_offset': extracted_xml['view_offset'],
            'video_decision': extracted_xml['video_decision'],
            'audio_decision': extracted_xml['audio_decision'],
            'duration': extracted_xml['duration'],
            'width': extracted_xml['width'],
            'height': extracted_xml['height'],
            'container': extracted_xml['container'],
            'video_codec': extracted_xml['video_codec'],
            'audio_codec': extracted_xml['audio_codec'],
            'bitrate': extracted_xml['bitrate'],
            'video_resolution': extracted_xml['video_resolution'],
            'video_framerate': extracted_xml['video_framerate'],
            'aspect_ratio': extracted_xml['aspect_ratio'],
            'audio_channels': extracted_xml['audio_channels'],
            'transcode_protocol': extracted_xml['transcode_protocol'],
            'transcode_container': extracted_xml['transcode_container'],
            'transcode_video_codec': extracted_xml['transcode_video_codec'],
            'transcode_audio_codec': extracted_xml['transcode_audio_codec'],
            'transcode_audio_channels':
            extracted_xml['transcode_audio_channels'],
            'transcode_width': extracted_xml['transcode_width'],
            'transcode_height': extracted_xml['transcode_height']
        }

        session_history_metadata = {
            'rating_key': helpers.latinToAscii(row['rating_key']),
            'parent_rating_key': row['parent_rating_key'],
            'grandparent_rating_key': row['grandparent_rating_key'],
            'title': row['title'],
            'parent_title': extracted_xml['parent_title'],
            'grandparent_title': row['grandparent_title'],
            'media_index': extracted_xml['media_index'],
            'parent_media_index': extracted_xml['parent_media_index'],
            'thumb': extracted_xml['thumb'],
            'parent_thumb': extracted_xml['parent_thumb'],
            'grandparent_thumb': extracted_xml['grandparent_thumb'],
            'art': extracted_xml['art'],
            'media_type': extracted_xml['media_type'],
            'year': extracted_xml['year'],
            'originally_available_at':
            extracted_xml['originally_available_at'],
            'added_at': extracted_xml['added_at'],
            'updated_at': extracted_xml['updated_at'],
            'last_viewed_at': extracted_xml['last_viewed_at'],
            'content_rating': row['content_rating'],
            'summary': row['summary'],
            'tagline': extracted_xml['tagline'],
            'rating': extracted_xml['rating'],
            'duration': extracted_xml['duration'],
            'guid': extracted_xml['guid'],
            'section_id': extracted_xml['section_id'],
            'directors': extracted_xml['directors'],
            'writers': extracted_xml['writers'],
            'actors': extracted_xml['actors'],
            'genres': extracted_xml['genres'],
            'studio': extracted_xml['studio'],
            'full_title': row['full_title']
        }

        # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
        # Just make sure that the ratingKey is indeed an integer
        if session_history_metadata['rating_key'].isdigit():
            ap.write_session_history(
                session=session_history,
                import_metadata=session_history_metadata,
                is_import=True,
                import_ignore_interval=import_ignore_interval)
        else:
            logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" %
                         session_history_metadata['rating_key'])

    logger.debug(u"PlexPy Importer :: PlexWatch data import complete.")
    import_users()

    logger.debug(u"PlexPy Importer :: Re-enabling monitoring.")
    plexpy.initialize_scheduler()
Ejemplo n.º 3
0
def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0):

    try:
        connection = sqlite3.connect(database, timeout=20)
        connection.row_factory = sqlite3.Row
    except sqlite3.OperationalError:
        logger.error(u"PlexPy Importer :: Invalid filename.")
        return None
    except ValueError:
        logger.error(u"PlexPy Importer :: Invalid filename.")
        return None

    try:
        connection.execute('SELECT ratingKey from %s' % table_name)
    except sqlite3.OperationalError:
        logger.error(u"PlexPy Importer :: Database specified does not contain the required fields.")
        return None

    logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")

    logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
    plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
                        hours=0, minutes=0, seconds=0)
    plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items',
                        hours=0, minutes=0, seconds=0)
    plexpy.schedule_job(activity_pinger.check_server_response, 'Check for Plex remote access',
                        hours=0, minutes=0, seconds=0)

    ap = activity_processor.ActivityProcessor()
    user_data = users.Users()

    # Get the latest friends list so we can pull user id's
    try:
        plextv.refresh_users()
    except:
        logger.debug(u"PlexPy Importer :: Unable to refresh the users list. Aborting import.")
        return None

    query = 'SELECT time AS started, ' \
            'stopped, ' \
            'cast(ratingKey as text) AS rating_key, ' \
            'null AS user_id, ' \
            'user, ' \
            'ip_address, ' \
            'paused_counter, ' \
            'platform AS player, ' \
            'null AS platform, ' \
            'null as machine_id, ' \
            'parentRatingKey as parent_rating_key, ' \
            'grandparentRatingKey as grandparent_rating_key, ' \
            'null AS media_type, ' \
            'null AS view_offset, ' \
            'xml, ' \
            'rating as content_rating,' \
            'summary,' \
            'title AS full_title,' \
            '(case when orig_title_ep = "" then orig_title else ' \
            'orig_title_ep end) as title,' \
            '(case when orig_title_ep != "" then orig_title else ' \
            'null end) as grandparent_title ' \
            'FROM ' + table_name + ' ORDER BY id'

    result = connection.execute(query)

    for row in result:
        # Extract the xml from the Plexwatch db xml field.
        extracted_xml = extract_plexwatch_xml(row['xml'])

        # If we get back None from our xml extractor skip over the record and log error.
        if not extracted_xml:
            logger.error(u"PlexPy Importer :: Skipping record with ratingKey %s due to malformed xml."
                         % str(row['rating_key']))
            continue

        # Skip line if we don't have a ratingKey to work with
        if not row['rating_key']:
            logger.error(u"PlexPy Importer :: Skipping record due to null ratingKey.")
            continue

        # If the user_id no longer exists in the friends list, pull it from the xml.
        if user_data.get_user_id(user=row['user']):
            user_id = user_data.get_user_id(user=row['user'])
        else:
            user_id = extracted_xml['user_id']

        session_history = {'started': row['started'],
                           'stopped': row['stopped'],
                           'rating_key': row['rating_key'],
                           'title': row['title'],
                           'parent_title': extracted_xml['parent_title'],
                           'grandparent_title': row['grandparent_title'],
                           'user_id': user_id,
                           'user': row['user'],
                           'ip_address': row['ip_address'],
                           'paused_counter': row['paused_counter'],
                           'player': row['player'],
                           'platform': extracted_xml['platform'],
                           'machine_id': extracted_xml['machine_id'],
                           'parent_rating_key': row['parent_rating_key'],
                           'grandparent_rating_key': row['grandparent_rating_key'],
                           'media_type': extracted_xml['media_type'],
                           'view_offset': extracted_xml['view_offset'],
                           'video_decision': extracted_xml['video_decision'],
                           'audio_decision': extracted_xml['audio_decision'],
                           'duration': extracted_xml['duration'],
                           'width': extracted_xml['width'],
                           'height': extracted_xml['height'],
                           'container': extracted_xml['container'],
                           'video_codec': extracted_xml['video_codec'],
                           'audio_codec': extracted_xml['audio_codec'],
                           'bitrate': extracted_xml['bitrate'],
                           'video_resolution': extracted_xml['video_resolution'],
                           'video_framerate': extracted_xml['video_framerate'],
                           'aspect_ratio': extracted_xml['aspect_ratio'],
                           'audio_channels': extracted_xml['audio_channels'],
                           'transcode_protocol': extracted_xml['transcode_protocol'],
                           'transcode_container': extracted_xml['transcode_container'],
                           'transcode_video_codec': extracted_xml['transcode_video_codec'],
                           'transcode_audio_codec': extracted_xml['transcode_audio_codec'],
                           'transcode_audio_channels': extracted_xml['transcode_audio_channels'],
                           'transcode_width': extracted_xml['transcode_width'],
                           'transcode_height': extracted_xml['transcode_height']
                           }

        session_history_metadata = {'rating_key': helpers.latinToAscii(row['rating_key']),
                                    'parent_rating_key': row['parent_rating_key'],
                                    'grandparent_rating_key': row['grandparent_rating_key'],
                                    'title': row['title'],
                                    'parent_title': extracted_xml['parent_title'],
                                    'grandparent_title': row['grandparent_title'],
                                    'media_index': extracted_xml['media_index'],
                                    'parent_media_index': extracted_xml['parent_media_index'],
                                    'thumb': extracted_xml['thumb'],
                                    'parent_thumb': extracted_xml['parent_thumb'],
                                    'grandparent_thumb': extracted_xml['grandparent_thumb'],
                                    'art': extracted_xml['art'],
                                    'media_type': extracted_xml['media_type'],
                                    'year': extracted_xml['year'],
                                    'originally_available_at': extracted_xml['originally_available_at'],
                                    'added_at': extracted_xml['added_at'],
                                    'updated_at': extracted_xml['updated_at'],
                                    'last_viewed_at': extracted_xml['last_viewed_at'],
                                    'content_rating': row['content_rating'],
                                    'summary': row['summary'],
                                    'tagline': extracted_xml['tagline'],
                                    'rating': extracted_xml['rating'],
                                    'duration': extracted_xml['duration'],
                                    'guid': extracted_xml['guid'],
                                    'section_id': extracted_xml['section_id'],
                                    'directors': extracted_xml['directors'],
                                    'writers': extracted_xml['writers'],
                                    'actors': extracted_xml['actors'],
                                    'genres': extracted_xml['genres'],
                                    'studio': extracted_xml['studio'],
                                    'full_title': row['full_title']
                                    }

        # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
        # Just make sure that the ratingKey is indeed an integer
        if session_history_metadata['rating_key'].isdigit():
            ap.write_session_history(session=session_history,
                                     import_metadata=session_history_metadata,
                                     is_import=True,
                                     import_ignore_interval=import_ignore_interval)
        else:
            logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])

    logger.debug(u"PlexPy Importer :: PlexWatch data import complete.")
    import_users()

    logger.debug(u"PlexPy Importer :: Re-enabling monitoring.")
    plexpy.initialize_scheduler()
Ejemplo n.º 4
0
def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0):

    try:
        connection = sqlite3.connect(database, timeout=20)
    except sqlite3.OperationalError:
        logger.error('PlexPy Importer :: Invalid filename.')
        return None
    except ValueError:
        logger.error('PlexPy Importer :: Invalid filename.')
        return None

    try:
        connection.execute('SELECT ratingKey from %s' % table_name)
    except sqlite3.OperationalError:
        logger.error('PlexPy Importer :: Database specified does not contain the required fields.')
        return None

    logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")

    logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
    plexpy.schedule_job(monitor.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=0)

    monitor_processing = monitor.MonitorProcessing()
    data_factory = datafactory.DataFactory()

    # Get the latest friends list so we can pull user id's
    try:
        plextv.refresh_users()
    except:
        logger.debug(u"PlexPy Importer :: Unable to refresh the users list. Aborting import.")
        return None

    query = 'SELECT time AS started, ' \
            'stopped, ' \
            'ratingKey AS rating_key, ' \
            'null AS user_id, ' \
            'user, ' \
            'ip_address, ' \
            'paused_counter, ' \
            'platform AS player, ' \
            'null AS platform, ' \
            'null as machine_id, ' \
            'parentRatingKey as parent_rating_key, ' \
            'grandparentRatingKey as grandparent_rating_key, ' \
            'null AS media_type, ' \
            'null AS view_offset, ' \
            'xml, ' \
            'rating as content_rating,' \
            'summary,' \
            'title AS full_title,' \
            'orig_title AS title, ' \
            'orig_title_ep AS grandparent_title ' \
            'FROM ' + table_name + ' ORDER BY id'

    result = connection.execute(query)

    for row in result:
        # Extract the xml from the Plexwatch db xml field.
        extracted_xml = extract_plexwatch_xml(row[14])

        # If the user_id no longer exists in the friends list, pull it from the xml.
        if data_factory.get_user_id(user=row[4]):
            user_id = data_factory.get_user_id(user=row[4])
        else:
            user_id = extracted_xml['user_id']

        session_history = {'started': row[0],
                           'stopped': row[1],
                           'rating_key': row[2],
                           'title': extracted_xml['title'],
                           'parent_title': extracted_xml['parent_title'],
                           'grandparent_title': extracted_xml['grandparent_title'],
                           'user_id': user_id,
                           'user': row[4],
                           'ip_address': row[5],
                           'paused_counter': row[6],
                           'player': row[7],
                           'platform': extracted_xml['platform'],
                           'machine_id': extracted_xml['machine_id'],
                           'parent_rating_key': row[10],
                           'grandparent_rating_key': row[11],
                           'media_type': extracted_xml['media_type'],
                           'view_offset': extracted_xml['view_offset'],
                           'video_decision': extracted_xml['video_decision'],
                           'audio_decision': extracted_xml['audio_decision'],
                           'duration': extracted_xml['duration'],
                           'width': extracted_xml['width'],
                           'height': extracted_xml['height'],
                           'container': extracted_xml['container'],
                           'video_codec': extracted_xml['video_codec'],
                           'audio_codec': extracted_xml['audio_codec'],
                           'bitrate': extracted_xml['bitrate'],
                           'video_resolution': extracted_xml['video_resolution'],
                           'video_framerate': extracted_xml['video_framerate'],
                           'aspect_ratio': extracted_xml['aspect_ratio'],
                           'audio_channels': extracted_xml['audio_channels'],
                           'transcode_protocol': extracted_xml['transcode_protocol'],
                           'transcode_container': extracted_xml['transcode_container'],
                           'transcode_video_codec': extracted_xml['transcode_video_codec'],
                           'transcode_audio_codec': extracted_xml['transcode_audio_codec'],
                           'transcode_audio_channels': extracted_xml['transcode_audio_channels'],
                           'transcode_width': extracted_xml['transcode_width'],
                           'transcode_height': extracted_xml['transcode_height']
                           }

        session_history_metadata = {'rating_key': row[2],
                                    'parent_rating_key': row[10],
                                    'grandparent_rating_key': row[11],
                                    'title': extracted_xml['title'],
                                    'parent_title': extracted_xml['parent_title'],
                                    'grandparent_title': extracted_xml['grandparent_title'],
                                    'index': extracted_xml['media_index'],
                                    'parent_index': extracted_xml['parent_media_index'],
                                    'thumb': extracted_xml['thumb'],
                                    'parent_thumb': extracted_xml['parent_thumb'],
                                    'grandparent_thumb': extracted_xml['grandparent_thumb'],
                                    'art': extracted_xml['art'],
                                    'media_type': extracted_xml['media_type'],
                                    'year': extracted_xml['year'],
                                    'originally_available_at': extracted_xml['originally_available_at'],
                                    'added_at': extracted_xml['added_at'],
                                    'updated_at': extracted_xml['updated_at'],
                                    'last_viewed_at': extracted_xml['last_viewed_at'],
                                    'content_rating': row[15],
                                    'summary': row[16],
                                    'rating': extracted_xml['rating'],
                                    'duration': extracted_xml['duration'],
                                    'guid': extracted_xml['guid'],
                                    'directors': extracted_xml['directors'],
                                    'writers': extracted_xml['writers'],
                                    'actors': extracted_xml['actors'],
                                    'genres': extracted_xml['genres'],
                                    'studio': extracted_xml['studio'],
                                    'full_title': row[17]
                                    }

        # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
        # Just make sure that the ratingKey is indeed an integer
        if str(row[2]).isdigit():
            monitor_processing.write_session_history(session=session_history,
                                                     import_metadata=session_history_metadata,
                                                     is_import=True,
                                                     import_ignore_interval=import_ignore_interval)
        else:
            logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" % str(row[2]))

    logger.debug(u"PlexPy Importer :: PlexWatch data import complete.")

    logger.debug(u"PlexPy Importer :: Re-enabling monitoring.")
    plexpy.initialize_scheduler()
Ejemplo n.º 5
0
def install_geoip_db(update=False):
    if not plexpy.CONFIG.MAXMIND_LICENSE_KEY:
        logger.error(
            u"Tautulli Helpers :: Failed to download GeoLite2 database file from MaxMind: Missing MaxMindLicense Key"
        )
        return False

    maxmind_db = 'GeoLite2-City'
    maxmind_url = 'https://download.maxmind.com/app/geoip_download?edition_id={db}&suffix={{suffix}}&license_key={key}'.format(
        db=maxmind_db, key=plexpy.CONFIG.MAXMIND_LICENSE_KEY)
    geolite2_db_url = maxmind_url.format(suffix='tar.gz')
    geolite2_md5_url = maxmind_url.format(suffix='tar.gz.md5')
    geolite2_gz = maxmind_db + '.tar.gz'
    geolite2_md5 = geolite2_gz + '.md5'
    geolite2_db = maxmind_db + '.mmdb'
    geolite2_db_path = plexpy.CONFIG.GEOIP_DB or os.path.join(
        plexpy.DATA_DIR, geolite2_db)

    # Check path ends with .mmdb
    if os.path.splitext(geolite2_db_path)[1] != os.path.splitext(
            geolite2_db)[1]:
        geolite2_db_path = os.path.join(geolite2_db_path, geolite2_db)

    temp_gz = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_gz)
    temp_md5 = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_md5)

    # Retrieve the GeoLite2 gzip file
    logger.debug(
        u"Tautulli Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
    try:
        maxmind = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
                                      ca_certs=certifi.where())
        with maxmind.request('GET', geolite2_db_url,
                             preload_content=False) as r_db, open(
                                 temp_gz, 'wb') as f_db:
            shutil.copyfileobj(r_db, f_db)
        with maxmind.request('GET', geolite2_md5_url,
                             preload_content=False) as r_md5, open(
                                 temp_md5, 'wb') as f_md5:
            shutil.copyfileobj(r_md5, f_md5)
    except Exception as e:
        logger.error(
            u"Tautulli Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s"
            % e)
        return False

    # Check MD5 hash for GeoLite2 tar.gz file
    logger.debug(
        u"Tautulli Helpers :: Checking MD5 checksum for GeoLite2 gzip file...")
    try:
        hash_md5 = hashlib.md5()
        with open(temp_gz, 'rb') as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hash_md5.update(chunk)
        md5_hash = hash_md5.hexdigest()

        with open(temp_md5, 'r') as f:
            md5_checksum = f.read()

        if md5_hash != md5_checksum:
            logger.error(
                u"Tautulli Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
                "Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
            return False
    except Exception as e:
        logger.error(
            u"Tautulli Helpers :: Failed to generate MD5 checksum for GeoLite2 gzip file: %s"
            % e)
        return False

    # Extract the GeoLite2 database file
    logger.debug(u"Tautulli Helpers :: Extracting GeoLite2 database...")
    try:
        mmdb = None
        with tarfile.open(temp_gz, 'r:gz') as tar:
            for member in tar.getmembers():
                if geolite2_db in member.name:
                    member.name = os.path.basename(member.name)
                    tar.extractall(path=os.path.dirname(geolite2_db_path),
                                   members=[member])
                    mmdb = True
                    break
        if not mmdb:
            raise Exception("{} not found in gzip file.".format(geolite2_db))
    except Exception as e:
        logger.error(
            u"Tautulli Helpers :: Failed to extract the GeoLite2 database: %s"
            % e)
        return False

    # Delete temportary GeoLite2 gzip file
    logger.debug(
        u"Tautulli Helpers :: Deleting temporary GeoLite2 gzip file...")
    try:
        os.remove(temp_gz)
        os.remove(temp_md5)
    except Exception as e:
        logger.warn(
            u"Tautulli Helpers :: Failed to remove temporary GeoLite2 gzip file: %s"
            % e)

    plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db_path)
    plexpy.CONFIG.__setattr__('GEOIP_DB_INSTALLED', int(time.time()))
    plexpy.CONFIG.write()

    logger.debug(
        u"Tautulli Helpers :: GeoLite2 database installed successfully.")

    if not update:
        plexpy.schedule_job(update_geoip_db,
                            'Update GeoLite2 database',
                            hours=12,
                            minutes=0,
                            seconds=0)

    return plexpy.CONFIG.GEOIP_DB_INSTALLED
Ejemplo n.º 6
0
    def initialize_scheduler(self):
        """
        Start the scheduled background tasks. Re-schedule if interval settings changed.
        """
        if self.server_shutdown or not self.CONFIG.PMS_IS_ENABLED:
            for job in self.SCHED.get_jobs():
                plexpy.schedule_job(self.SCHED,
                                    None,
                                    job.id,
                                    hours=0,
                                    minutes=0,
                                    seconds=0)
        else:
            self.SCHED_LIST = []

            checker_jobname = '%s: Check Server Connection' % self.CONFIG.PMS_NAME
            self.SCHED_LIST.append({
                'name': checker_jobname,
                'time': {
                    'hours': 0,
                    'minutes': 0,
                    'seconds': 30
                },
                'func': activity_pinger.connect_server,
                'args': [self],
            })

            rclone_jobname = '%s: Check Rclone Mount Status' % self.CONFIG.PMS_NAME
            if plexpy.CONFIG.MONITOR_RCLONE:
                rclone_time = (60 if self.CONFIG.MONITOR_RCLONE_MOUNT else 0)
                self.SCHED_LIST.append({
                    'name': rclone_jobname,
                    'time': {
                        'hours': 0,
                        'minutes': 0,
                        'seconds': rclone_time
                    },
                    'func': activity_pinger.check_rclone_status,
                    'args': [self],
                })
            elif self.SCHED.get_job(rclone_jobname):
                plexpy.schedule_job(self.SCHED,
                                    None,
                                    rclone_jobname,
                                    hours=0,
                                    minutes=0,
                                    seconds=0)

            # Start the Plex and rclone checkers if the server is supposed to be connected.
            plexpy.schedule_joblist(lock=self.SCHED_LOCK,
                                    scheduler=self.SCHED,
                                    jobList=self.SCHED_LIST)

            library_hours = self.CONFIG.REFRESH_LIBRARIES_INTERVAL if 1 <= self.CONFIG.REFRESH_LIBRARIES_INTERVAL <= 24 else 12
            self.SCHED_LIST.append({
                'name':
                '%s: Check Plex remote access' % self.CONFIG.PMS_NAME,
                'time': {
                    'hours': 0,
                    'minutes': 0,
                    'seconds': 60 * bool(self.CONFIG.MONITOR_REMOTE_ACCESS)
                },
                'func':
                activity_pinger.check_server_access,
                'args': [self],
            })

            self.SCHED_LIST.append({
                'name':
                '%s: Check for Plex updates' % self.CONFIG.PMS_NAME,
                'time': {
                    'hours': 0,
                    'minutes': 15 * bool(self.CONFIG.MONITOR_PMS_UPDATES),
                    'seconds': 0
                },
                'func':
                activity_pinger.check_server_updates,
                'args': [self],
            })

            self.SCHED_LIST.append({
                'name':
                '%s: Refresh Libraries List' % self.CONFIG.PMS_NAME,
                'time': {
                    'hours': library_hours,
                    'minutes': 0,
                    'seconds': 0
                },
                'func':
                self.refresh_libraries,
                'args': [],
            })

            self.SCHED_LIST.append({
                'name': '%s: Websocket ping' % self.CONFIG.PMS_NAME,
                'time': {
                    'hours': 0,
                    'minutes': 0,
                    'seconds':
                    10 * bool(plexpy.CONFIG.WEBSOCKET_MONITOR_PING_PONG)
                },
                'func': self.WS.send_ping,
                'args': [],
            })

            if self.WS_CONNECTED:
                plexpy.schedule_joblist(lock=self.SCHED_LOCK,
                                        scheduler=self.SCHED,
                                        jobList=self.SCHED_LIST)
            else:
                # Cancel all jobs except the PMS connection and rclone status checkers.
                for job in self.SCHED.get_jobs():
                    if job.id not in [checker_jobname, rclone_jobname]:
                        plexpy.schedule_job(self.SCHED,
                                            None,
                                            job.id,
                                            hours=0,
                                            minutes=0,
                                            seconds=0)
Ejemplo n.º 7
0
def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0):

    try:
        connection = sqlite3.connect(database, timeout=20)
        connection.row_factory = sqlite3.Row
    except sqlite3.OperationalError:
        logger.error("PlexPy Importer :: Invalid filename.")
        return None
    except ValueError:
        logger.error("PlexPy Importer :: Invalid filename.")
        return None

    try:
        connection.execute("SELECT ratingKey from %s" % table_name)
    except sqlite3.OperationalError:
        logger.error("PlexPy Importer :: Database specified does not contain the required fields.")
        return None

    logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")

    logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
    plexpy.schedule_job(
        activity_pinger.check_active_sessions, "Check for active sessions", hours=0, minutes=0, seconds=0
    )

    ap = activity_processor.ActivityProcessor()
    user_data = users.Users()

    # Get the latest friends list so we can pull user id's
    try:
        plextv.refresh_users()
    except:
        logger.debug(u"PlexPy Importer :: Unable to refresh the users list. Aborting import.")
        return None

    query = (
        "SELECT time AS started, "
        "stopped, "
        "cast(ratingKey as text) AS rating_key, "
        "null AS user_id, "
        "user, "
        "ip_address, "
        "paused_counter, "
        "platform AS player, "
        "null AS platform, "
        "null as machine_id, "
        "parentRatingKey as parent_rating_key, "
        "grandparentRatingKey as grandparent_rating_key, "
        "null AS media_type, "
        "null AS view_offset, "
        "xml, "
        "rating as content_rating,"
        "summary,"
        "title AS full_title,"
        '(case when orig_title_ep = "" then orig_title else '
        "orig_title_ep end) as title,"
        '(case when orig_title_ep != "" then orig_title else '
        "null end) as grandparent_title "
        "FROM " + table_name + " ORDER BY id"
    )

    result = connection.execute(query)

    for row in result:
        # Extract the xml from the Plexwatch db xml field.
        extracted_xml = extract_plexwatch_xml(row["xml"])

        # If we get back None from our xml extractor skip over the record and log error.
        if not extracted_xml:
            logger.error(
                u"PlexPy Importer :: Skipping line with ratingKey %s due to malformed xml." % str(row["rating_key"])
            )
            continue

        # If the user_id no longer exists in the friends list, pull it from the xml.
        if user_data.get_user_id(user=row["user"]):
            user_id = user_data.get_user_id(user=row["user"])
        else:
            user_id = extracted_xml["user_id"]

        session_history = {
            "started": row["started"],
            "stopped": row["stopped"],
            "rating_key": row["rating_key"],
            "title": row["title"],
            "parent_title": extracted_xml["parent_title"],
            "grandparent_title": row["grandparent_title"],
            "user_id": user_id,
            "user": row["user"],
            "ip_address": row["ip_address"],
            "paused_counter": row["paused_counter"],
            "player": row["player"],
            "platform": extracted_xml["platform"],
            "machine_id": extracted_xml["machine_id"],
            "parent_rating_key": row["parent_rating_key"],
            "grandparent_rating_key": row["grandparent_rating_key"],
            "media_type": extracted_xml["media_type"],
            "view_offset": extracted_xml["view_offset"],
            "video_decision": extracted_xml["video_decision"],
            "audio_decision": extracted_xml["audio_decision"],
            "duration": extracted_xml["duration"],
            "width": extracted_xml["width"],
            "height": extracted_xml["height"],
            "container": extracted_xml["container"],
            "video_codec": extracted_xml["video_codec"],
            "audio_codec": extracted_xml["audio_codec"],
            "bitrate": extracted_xml["bitrate"],
            "video_resolution": extracted_xml["video_resolution"],
            "video_framerate": extracted_xml["video_framerate"],
            "aspect_ratio": extracted_xml["aspect_ratio"],
            "audio_channels": extracted_xml["audio_channels"],
            "transcode_protocol": extracted_xml["transcode_protocol"],
            "transcode_container": extracted_xml["transcode_container"],
            "transcode_video_codec": extracted_xml["transcode_video_codec"],
            "transcode_audio_codec": extracted_xml["transcode_audio_codec"],
            "transcode_audio_channels": extracted_xml["transcode_audio_channels"],
            "transcode_width": extracted_xml["transcode_width"],
            "transcode_height": extracted_xml["transcode_height"],
        }

        session_history_metadata = {
            "rating_key": helpers.latinToAscii(row["rating_key"]),
            "parent_rating_key": row["parent_rating_key"],
            "grandparent_rating_key": row["grandparent_rating_key"],
            "title": row["title"],
            "parent_title": extracted_xml["parent_title"],
            "grandparent_title": row["grandparent_title"],
            "index": extracted_xml["media_index"],
            "parent_index": extracted_xml["parent_media_index"],
            "thumb": extracted_xml["thumb"],
            "parent_thumb": extracted_xml["parent_thumb"],
            "grandparent_thumb": extracted_xml["grandparent_thumb"],
            "art": extracted_xml["art"],
            "media_type": extracted_xml["media_type"],
            "year": extracted_xml["year"],
            "originally_available_at": extracted_xml["originally_available_at"],
            "added_at": extracted_xml["added_at"],
            "updated_at": extracted_xml["updated_at"],
            "last_viewed_at": extracted_xml["last_viewed_at"],
            "content_rating": row["content_rating"],
            "summary": row["summary"],
            "tagline": extracted_xml["tagline"],
            "rating": extracted_xml["rating"],
            "duration": extracted_xml["duration"],
            "guid": extracted_xml["guid"],
            "directors": extracted_xml["directors"],
            "writers": extracted_xml["writers"],
            "actors": extracted_xml["actors"],
            "genres": extracted_xml["genres"],
            "studio": extracted_xml["studio"],
            "full_title": row["full_title"],
        }

        # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
        # Just make sure that the ratingKey is indeed an integer
        if session_history_metadata["rating_key"].isdigit():
            ap.write_session_history(
                session=session_history,
                import_metadata=session_history_metadata,
                is_import=True,
                import_ignore_interval=import_ignore_interval,
            )
        else:
            logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" % session_history_metadata["rating_key"])

    logger.debug(u"PlexPy Importer :: PlexWatch data import complete.")
    import_users()

    logger.debug(u"PlexPy Importer :: Re-enabling monitoring.")
    plexpy.initialize_scheduler()
Ejemplo n.º 8
0
def update_section_ids():
    from plexpy import pmsconnect, activity_pinger
    import threading

    plexpy.CONFIG.UPDATE_SECTION_IDS = -1

    logger.info(u"PlexPy Libraries :: Updating section_id's in database.")

    logger.debug(u"PlexPy Libraries :: Disabling monitoring while update in progress.")
    plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
                        hours=0, minutes=0, seconds=0)
    plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items',
                        hours=0, minutes=0, seconds=0)
    plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response',
                        hours=0, minutes=0, seconds=0)

    monitor_db = database.MonitorDatabase()

    try:
        query = 'SELECT id, rating_key FROM session_history_metadata WHERE section_id IS NULL'
        result = monitor_db.select(query=query)
    except Exception as e:
        logger.warn(u"PlexPy Libraries :: Unable to execute database query for update_section_ids: %s." % e)

        logger.warn(u"PlexPy Libraries :: Unable to update section_id's in database.")
        plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 1)
        plexpy.CONFIG.write()

        logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
        plexpy.initialize_scheduler()
        return None

    # Add thread filter to the logger
    logger.debug(u"PlexPy Libraries :: Disabling logging in the current thread while update in progress.")
    thread_filter = logger.NoThreadFilter(threading.current_thread().name)
    for handler in logger.logger.handlers:
        handler.addFilter(thread_filter)

    pms_connect = pmsconnect.PmsConnect()

    error_keys = set()
    for item in result:
        id = item['id']
        rating_key = item['rating_key']
        metadata = pms_connect.get_metadata_details(rating_key=rating_key)

        if metadata:
            metadata = metadata['metadata']
            section_keys = {'id': id}
            section_values = {'section_id': metadata['section_id']}
            monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values)
        else:
            error_keys.add(rating_key)

    # Remove thread filter from the logger
    for handler in logger.logger.handlers:
        handler.removeFilter(thread_filter)
    logger.debug(u"PlexPy Libraries :: Re-enabling logging in the current thread.")

    if error_keys:
        logger.info(u"PlexPy Libraries :: Updated all section_id's in database except for rating_keys: %s." %
                     ', '.join(str(key) for key in error_keys))
    else:
        logger.info(u"PlexPy Libraries :: Updated all section_id's in database.")

    plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 0)
    plexpy.CONFIG.write()

    logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
    plexpy.initialize_scheduler()

    return True