def _send(payload):
    if not settings.allow_ga:
        return

    data = payload.copy()
    data.update(_common)
    for k, v in data.items():
        if type(v) is unicode:
            data[k] = v.encode('utf8')
    data_str = urlencode(data)

    try:
        common.debug('GA: %s' % json.dumps(data, indent=4))
        req = urlopen(_ga_url, data_str)
        code = req.getcode()
        if code > 399:
            common.error('GA status code %s' % code)
        else:
            common.debug('Successful GA')
    except Exception as e:
        common.error('Failed to request GA: %s' % e)
Ejemplo n.º 2
0
    def remove_item(self, job_data, library_home=None):  # pylint: disable=unused-argument
        """Remove an item from the Kodi library, delete it from disk, remove add-on database references"""
        videoid = job_data['videoid']
        common.debug('Removing {} ({}) from add-on library', videoid, job_data['title'])
        try:
            # Remove the STRM file exported
            exported_file_path = G.py2_decode(xbmc.translatePath(job_data['file_path']))
            common.delete_file_safe(exported_file_path)

            parent_folder = G.py2_decode(xbmc.translatePath(os.path.dirname(exported_file_path)))

            # Remove the NFO file of the related STRM file
            nfo_file = os.path.splitext(exported_file_path)[0] + '.nfo'
            common.delete_file_safe(nfo_file)

            dirs, files = common.list_dir(parent_folder)

            # Remove the tvshow NFO file (only when it is the last file in the folder)
            tvshow_nfo_file = common.join_folders_paths(parent_folder, 'tvshow.nfo')

            # (users have the option of removing even single seasons)
            if xbmcvfs.exists(tvshow_nfo_file) and not dirs and len(files) == 1:
                xbmcvfs.delete(tvshow_nfo_file)
                # Delete parent folder
                xbmcvfs.rmdir(parent_folder)

            # Delete parent folder when empty
            if not dirs and not files:
                xbmcvfs.rmdir(parent_folder)

            # Remove videoid records from add-on database
            remove_videoid_from_db(videoid)
        except common.ItemNotFound:
            common.warn('The videoid {} not exists in the add-on library database', videoid)
        except Exception as exc:  # pylint: disable=broad-except
            import traceback
            common.error(G.py2_decode(traceback.format_exc(), 'latin-1'))
            ui.show_addon_error_info(exc)
Ejemplo n.º 3
0
def extract_session_data(content, validate=False):
    """
    Call all the parsers we need to extract all
    the session relevant data from the HTML page
    """
    common.debug('Extracting session data...')
    react_context = extract_json(content, 'reactContext')
    if validate:
        validate_login(react_context)

    user_data = extract_userdata(react_context)
    if user_data.get('membershipStatus') == 'ANONYMOUS':
        # Possible known causes:
        # -Login password has been changed
        # -In the login request, 'Content-Type' specified is not compliant with data passed or no more supported
        # -Expired profiles cookies!? (not verified)
        # In these cases it is mandatory to login again
        raise InvalidMembershipStatusAnonymous
    if user_data.get('membershipStatus') != 'CURRENT_MEMBER':
        # When NEVER_MEMBER it is possible that the account has not been confirmed or renewed
        common.error('Can not login, the Membership status is {}',
                     user_data.get('membershipStatus'))
        raise InvalidMembershipStatusError(user_data.get('membershipStatus'))

    api_data = extract_api_data(react_context)
    # Note: falcor cache does not exist if membershipStatus is not CURRENT_MEMBER
    # falcor cache is not more used to extract profiles data
    # falcor_cache = extract_json(content, 'falcorCache')

    # Save only some info of the current profile from user data
    g.LOCAL_DB.set_value('build_identifier', user_data.get('BUILD_IDENTIFIER'), TABLE_SESSION)
    if not g.LOCAL_DB.get_value('esn', table=TABLE_SESSION):
        g.LOCAL_DB.set_value('esn', generate_esn(user_data), TABLE_SESSION)
    g.LOCAL_DB.set_value('locale_id', user_data.get('preferredLocale').get('id', 'en-US'))
    # Save api urls
    for key, path in list(api_data.items()):
        g.LOCAL_DB.set_value(key, path, TABLE_SESSION)
    return api_data
Ejemplo n.º 4
0
 def _login(self, modal_error_message=False):
     """Perform account login"""
     # If exists get the current esn value before extract a new session data
     current_esn = g.get_esn()
     try:
         # First we get the authentication url without logging in, required for login API call
         react_context = website.extract_json(self._get('profiles'),
                                              'reactContext')
         auth_url = website.extract_api_data(react_context)['auth_url']
         common.debug('Logging in...')
         login_response = self._post('login',
                                     data=_login_payload(
                                         common.get_credentials(),
                                         auth_url))
         try:
             website.validate_login(login_response)
         except LoginValidateError as exc:
             self.session.cookies.clear()
             common.purge_credentials()
             if modal_error_message:
                 ui.show_ok_dialog(common.get_local_string(30008),
                                   unicode(exc))
                 return False
             raise
         website.extract_session_data(login_response)
     except InvalidMembershipStatusError:
         ui.show_error_info(common.get_local_string(30008),
                            common.get_local_string(30180), False, True)
         return False
     except Exception as exc:
         import traceback
         common.error(traceback.format_exc())
         self.session.cookies.clear()
         raise exc
     common.info('Login successful')
     ui.show_notification(common.get_local_string(30109))
     self.update_session_data(current_esn)
     return True
 def _initialize_connection(self):
     try:
         common.debug('Trying connection to the MySQL database {}', self.database)
         self.conn = mysql.connector.connect(**self.config)
         if self.conn.is_connected():
             db_info = self.conn.get_server_info()
             common.debug('MySQL database connection was successful (MySQL server ver. {})',
                          db_info)
     except mysql.connector.Error as exc:
         if exc.errno == 1049 and not self.is_connection_test:
             # Database does not exist, create a new one
             try:
                 db_create_mysql.create_database(self.config.copy())
                 self._initialize_connection()
                 return
             except mysql.connector.Error as e:
                 common.error('MySql error {}:', e)
                 raise MySQLConnectionError
         common.error('MySql error {}:', exc)
         raise MySQLConnectionError
     finally:
         if self.conn and self.conn.is_connected():
             self.conn.close()
Ejemplo n.º 6
0
    def run(self):
        """Monitor and process the event queue"""
        common.debug('[Event queue monitor] Thread started')
        monitor = xbmc.Monitor()

        while not monitor.abortRequested() and not self._stop_requested:
            try:
                # Take the first queued item
                event = self.queue_events.get_nowait()
                # Process the request
                continue_queue = self._process_event_request(event)
                if not continue_queue:
                    # Ban future requests from this event id
                    self.banned_events_ids += [event.get_event_id()]
            except queue.Empty:
                pass
            except Exception as exc:  # pylint: disable=broad-except
                common.error('[Event queue monitor] An error has occurred: {}',
                             exc)
                import traceback
                common.error(traceback.format_exc())
                self.clear_queue()
            monitor.waitForAbort(1)
Ejemplo n.º 7
0
    def _initialize_connection(self):
        try:

            common.debug('Trying connection to the database {}', self.db_filename)
            self.conn = sql.connect(self.db_file_path, check_same_thread=False)
            cur = self.conn.cursor()
            cur.execute(str('SELECT SQLITE_VERSION()'))
            common.debug('Database connection {} was successful (SQLite ver. {})',
                         self.db_filename, cur.fetchone()[0])
            cur.row_factory = lambda cursor, row: row[0]
            cur.execute(str('SELECT name FROM sqlite_master WHERE type=\'table\' '
                            'AND name NOT LIKE \'sqlite_%\''))
            list_tables = cur.fetchall()
            if not list_tables:
                # If no tables exist create a new one
                self.conn.close()
                db_create_sqlite.create_database(self.db_file_path, self.db_filename)
        except sql.Error as exc:
            common.error('SQLite error {}:', exc.args[0])
            raise SQLiteConnectionError
        finally:
            if self.conn:
                self.conn.close()
 def prefetch_login(self):
     """Check if we have stored credentials.
     If so, do the login before the user requests it"""
     try:
         common.get_credentials()
         if not self.is_logged_in():
             self._login()
         else:
             # A hack way to full load requests module without blocking the service startup
             common.send_signal(signal='startup_requests_module',
                                non_blocking=True)
         self.is_prefetch_login = True
     except requests.exceptions.RequestException as exc:
         # It was not possible to connect to the web service, no connection, network problem, etc
         import traceback
         common.error('Login prefetch: request exception {}', exc)
         common.debug(traceback.format_exc())
     except MissingCredentialsError:
         common.info('Login prefetch: No stored credentials are available')
     except (LoginFailedError, LoginValidateError):
         ui.show_notification(common.get_local_string(30009))
     except InvalidMembershipStatusError:
         ui.show_notification(common.get_local_string(30180), time=10000)
 def _process_event_request(self, event):
     """Do the event post request"""
     event.status = Event.STATUS_REQUESTED
     # Request attempts can be made up to a maximum of 3 times per event
     while event.is_attempts_granted():
         common.info('EVENT [{}] - Executing request (attempt {})', event,
                     event.req_attempt)
         params = {
             'reqAttempt': event.req_attempt,
             'reqPriority': 20 if event.event_type == EVENT_START else 0,
             'reqName': 'events/{}'.format(event)
         }
         url = ENDPOINTS['events'] + '?' + urlencode(params).replace(
             '%2F', '/')
         try:
             response = self.chunked_request(url,
                                             event.request_data,
                                             g.get_esn(),
                                             disable_msl_switch=False)
             event.set_response(response)
             break
         except Exception as exc:  # pylint: disable=broad-except
             common.error('EVENT [{}] - The request has failed: {}', event,
                          exc)
     if event.event_type == EVENT_STOP:
         self.clear_queue()
         if event.event_data['allow_request_update_lolomo']:
             if event.event_data['is_in_mylist']:
                 # If video is in my list, invalidate the continueWatching list (update lolomo context data)
                 api.update_lolomo_context('continueWatching')
             api.update_videoid_bookmark(event.get_video_id())
     # Below commented lines: let future requests continue to be sent, unstable connections like wi-fi cause problems
     # if not event.is_response_success():
     # The event request is unsuccessful then there is some problem,
     # no longer make any future requests from this event id
     #     return False
     return True
Ejemplo n.º 10
0
def load(account_hash):
    """Load cookies for a given account and check them for validity"""
    filename = cookie_filename(account_hash)
    common.debug('Loading cookies from {}', filename)
    if not xbmcvfs.exists(xbmc.translatePath(filename)):
        common.debug('Cookies file does not exist')
        raise MissingCookiesError()
    try:
        cookie_file = xbmcvfs.File(filename, 'rb')
        if g.PY_IS_VER2:
            # pickle.loads on py2 wants string
            cookie_jar = pickle.loads(cookie_file.read())
        else:
            cookie_jar = pickle.loads(cookie_file.readBytes())
    except Exception as exc:
        import traceback
        common.error('Failed to load cookies from file: {exc}', exc=exc)
        common.error(traceback.format_exc())
        raise MissingCookiesError()
    finally:
        cookie_file.close()
    # Clear flwssn cookie if present, as it is trouble with early expiration
    try:
        cookie_jar.clear(domain='.netflix.com', path='/', name='flwssn')
    except KeyError:
        pass

    debug_output = 'Loaded cookies:\n'
    for cookie in cookie_jar:
        remaining_ttl = ((cookie.expires or 0) -
                         time() / 60) if cookie.expires else None
        debug_output += '{} (expires {} - remaining TTL {})\n'.format(
            cookie.name, cookie.expires, remaining_ttl)
    common.debug(debug_output)
    # if expired(cookie_jar):
    #     raise CookiesExpiredError()
    return cookie_jar
 def _login(self, modal_error_message=False):
     """Perform account login"""
     try:
         # First we get the authentication url without logging in, required for login API call
         react_context = website.extract_json(self._get('login'),
                                              'reactContext')
         auth_url = website.extract_api_data(react_context)['auth_url']
         common.debug('Logging in...')
         login_response = self._post('login',
                                     data=_login_payload(
                                         common.get_credentials(),
                                         auth_url))
         try:
             website.extract_session_data(login_response,
                                          validate=True,
                                          update_profiles=True)
             common.info('Login successful')
             ui.show_notification(common.get_local_string(30109))
             cookies.save(self.account_hash, self.session.cookies)
             return True
         except (LoginValidateError,
                 LoginValidateErrorIncorrectPassword) as exc:
             self.session.cookies.clear()
             common.purge_credentials()
             if not modal_error_message:
                 raise
             ui.show_ok_dialog(common.get_local_string(30008), unicode(exc))
     except InvalidMembershipStatusError:
         ui.show_error_info(common.get_local_string(30008),
                            common.get_local_string(30180), False, True)
     except Exception:  # pylint: disable=broad-except
         import traceback
         common.error(g.py2_decode(traceback.format_exc(), 'latin-1'))
         self.session.cookies.clear()
         raise
     return False
Ejemplo n.º 12
0
 def _initialize_connection(self):
     # If database file do not exist create a new one
     # if not common.file_exists(db_filename, g.DATA_PATH):
     #     db_utils.create_database(self.db_file_path)
     # TODO: Temporary when stabilized it will be possible to implement the db code creation
     # If database file do not exist copy a new one
     db_filename = os.path.basename(self.db_file_path)
     db_create_sqlite.check_database_file(db_filename)
     try:
         common.debug(
             'Trying connection to the database {}'.format(db_filename))
         self.conn = sql.connect(self.db_file_path)
         cur = self.conn.cursor()
         cur.execute('SELECT SQLITE_VERSION()')
         common.debug(
             'Database connection {} was successful (SQLite ver. {})'.
             format(db_filename,
                    cur.fetchone()[0]))
     except sql.Error as e:
         common.error("SQLite error {}:".format(e.args[0]))
         raise SQLiteConnectionError
     finally:
         if self.conn:
             self.conn.close()
Ejemplo n.º 13
0
 def do_POST(self):
     """Handle cache POST requests"""
     # The arguments of the method to call are stored in the 'Params' header
     params = json.loads(self.headers['Params'])
     # common.debug('Handling Cache HTTP POST IPC call to {} {}', self.path[1:], params.get('identifier'))
     try:
         if 'data' in params:
             # If argument 'data' exists, inject the data
             length = int(self.headers.get('content-length', 0))
             params['data'] = self.rfile.read(length) or None
         result = _call(g.CACHE_MANAGEMENT, self.path[1:], params)
         self.send_response(200)
         self.end_headers()
         if result is not None:
             self.wfile.write(result)
     except InvalidPathError:
         self.send_response(404)
         self.end_headers()
     except Exception as exc:  # pylint: disable=broad-except
         if exc.__class__.__name__ != 'CacheMiss':
             import traceback
             common.error(traceback.format_exc())
         self.send_response(500, exc.__class__.__name__)
         self.end_headers()
Ejemplo n.º 14
0
def parse_profiles(data):
    """Parse profile information from Netflix response"""
    profiles_list = jgraph_get_list('profilesList', data)
    try:
        if not profiles_list:
            raise InvalidProfilesError(
                'It has not been possible to obtain the list of profiles.')
        sort_order = 0
        current_guids = []
        for index, profile_data in iteritems(profiles_list):  # pylint: disable=unused-variable
            summary = jgraph_get('summary', profile_data)
            guid = summary['guid']
            current_guids.append(guid)
            common.debug('Parsing profile {}', summary['guid'])
            avatar_url = _get_avatar(profile_data, data, guid)
            is_active = summary.pop('isActive')
            g.LOCAL_DB.set_profile(guid, is_active, sort_order)
            g.SHARED_DB.set_profile(guid, sort_order)
            # Add profile language description translated from locale
            summary['language_desc'] = g.py2_decode(
                xbmc.convertLanguage(summary['language'][:2],
                                     xbmc.ENGLISH_NAME))
            for key, value in iteritems(summary):
                if key in PROFILE_DEBUG_INFO:
                    common.debug('Profile info {}', {key: value})
                if key == 'profileName':  # The profile name is coded as HTML
                    value = parse_html(value)
                g.LOCAL_DB.set_profile_config(key, value, guid)
            g.LOCAL_DB.set_profile_config('avatar', avatar_url, guid)
            sort_order += 1
        _delete_non_existing_profiles(current_guids)
    except Exception:
        import traceback
        common.error(g.py2_decode(traceback.format_exc(), 'latin-1'))
        common.error('Profile list data: {}', profiles_list)
        raise InvalidProfilesError
Ejemplo n.º 15
0
    def get_media_url(self, host, media_id):
        url = self.get_url(host, media_id)

        html = common.webread(url)
        if not len(html):
            raise ResolverError('H265 resolver: no html from ' + url)

        streams = self._extract_streams(html)

        if not streams:
            raise ResolverError('H265 resolver: no streams found in ' + url)

        urls, labels = zip(*streams)

        if len(labels) == 1:
            ind = 0
        else:
            heading = xbmcaddon.Addon().getLocalizedString(33100)
            ind = common.select(heading, labels)
            if ind < 0:
                common.error("H265 resolver: stream selection cancelled")
                return ''

        return urls[ind] + '|User-Agent=' + urllib.quote(xbmcaddon.Addon().getSetting('user_agent'))
Ejemplo n.º 16
0
 def _execute_non_query(self, query, params=None, cursor=None):
     try:
         if cursor is None:
             cursor = self.get_cursor()
         if params is not None:
             cursor.execute(query, params)
         else:
             cursor.execute(query)
     except sql.Error as exc:
         common.error('SQLite error {}:', exc.args[0])
         raise SQLiteError
     except ValueError as exc_ve:
         common.error('Value {}', str(params))
         common.error('Value type {}', type(params))
         raise exc_ve
Ejemplo n.º 17
0
 def _execute_query(self, query, params=None, cursor=None):
     try:
         if cursor is None:
             cursor = self.get_cursor()
         if params is not None:
             cursor.execute(query, params)
         else:
             cursor.execute(query)
         return cursor
     except sql.Error as e:
         common.error("SQLite error {}:".format(e.args[0]))
         raise SQLiteError
     except ValueError as exc_ve:
         common.error('Value {}'.format(str(params)))
         common.error('Value type {}'.format(type(params)))
         raise exc_ve
Ejemplo n.º 18
0
 def _execute_query(self, query, params=None, cursor=None):
     try:
         if cursor is None:
             cursor = self.get_cursor()
         query = query.replace("?", "%s")  # sqlite use '?' placeholder
         if params is not None:
             cursor.execute(query, params)
         else:
             cursor.execute(query)
         return cursor
     except mysql.connector.Error as exc:
         common.error('MySQL error {}:', exc.args[0])
         raise MySQLError
     except ValueError as exc_ve:
         common.error('Value {}', str(params))
         common.error('Value type {}', type(params))
         raise exc_ve
Ejemplo n.º 19
0
 def _execute_non_query(self, query, params=None, cursor=None, **kwargs):
     try:
         if cursor is None:
             cursor = self.get_cursor()
         query = query.replace("?", "%s")  # sqlite use '?' placeholder
         if params is not None:
             results = cursor.execute(query, params, kwargs)
         else:
             results = cursor.execute(query, kwargs)
         if 'multi' in kwargs:
             for result in results:  # 'multi' is lazy statement run sql only when needed
                 pass
     except mysql.connector.Error as e:
         common.error("MySQL error {}:".format(e))
         raise MySQLError
     except ValueError as exc_ve:
         common.error('Value {}'.format(str(params)))
         common.error('Value type {}'.format(type(params)))
         raise exc_ve
Ejemplo n.º 20
0
 def _execute_non_query(self, query, params=None, cursor=None, **kwargs):  # pylint: disable=arguments-differ
     try:
         if cursor is None:
             cursor = self.get_cursor()
         query = query.replace("?", "%s")  # sqlite use '?' placeholder
         if params is not None:
             results = cursor.execute(query, params, kwargs)
         else:
             results = cursor.execute(query, kwargs)
         if 'multi' in kwargs:
             # 'multi' is lazy statement run sql only when needed
             for result in results:  # pylint: disable=unused-variable
                 pass
     except mysql.connector.Error as exc:
         common.error('MySQL error {}:', exc)
         raise MySQLError
     except ValueError as exc_ve:
         common.error('Value {}', str(params))
         common.error('Value type {}', type(params))
         raise exc_ve
def auto_update_library(sync_with_mylist, silent):
    """
    Perform an auto update of the exported items to Kodi library,
    so check if there is new seasons/episodes.
    If sync_with_mylist is enabled the Kodi library will be also synchronized
    with the Netflix "My List".
    :param sync_with_mylist: True to enable sync with My List
    :param silent: don't display user interface while performing an operation
    :return: None
    """
    if _is_auto_update_library_running():
        return
    execute_lib_tasks_method = execute_library_tasks_silently if silent else execute_library_tasks
    common.info(
        'Starting auto update library - check updates for tv shows (sync with My List is {})',
        'ENABLED' if sync_with_mylist else 'DISABLED')
    g.SHARED_DB.set_value('library_auto_update_is_running', True)
    g.SHARED_DB.set_value('library_auto_update_start_time', datetime.now())
    try:
        videoids_to_update = []

        # Get the list of the exported items to Kodi library
        exported_tvshows_videoids_values = g.SHARED_DB.get_tvshows_id_list()
        exported_movies_videoids_values = g.SHARED_DB.get_movies_id_list()

        if sync_with_mylist:
            # Get My List videoids of the chosen profile
            # Use make_http_call instead make_http because call AddonSignals on same instance makes problems
            mylist_video_id_list, mylist_video_id_list_type = common.make_http_call(
                'get_mylist_videoids_profile_switch', None)

            # Check if tv shows have been removed from the My List
            for videoid_value in exported_tvshows_videoids_values:
                if unicode(videoid_value) in mylist_video_id_list:
                    continue
                # The tv show no more exist in My List so remove it from library
                videoid = common.VideoId.from_path(
                    [common.VideoId.SHOW, videoid_value])
                execute_lib_tasks_method(videoid, [remove_item])

            # Check if movies have been removed from the My List
            for videoid_value in exported_movies_videoids_values:
                if unicode(videoid_value) in mylist_video_id_list:
                    continue
                # The movie no more exist in My List so remove it from library
                videoid = common.VideoId.from_path(
                    [common.VideoId.MOVIE, videoid_value])
                execute_lib_tasks_method(videoid, [remove_item])

            # Add missing tv shows / movies of My List to library
            for index, video_id in enumerate(mylist_video_id_list):
                if (int(video_id) not in exported_tvshows_videoids_values and
                        int(video_id) not in exported_movies_videoids_values):
                    videoids_to_update.append(
                        common.VideoId(
                            **{
                                ('movieid' if (mylist_video_id_list_type[index] == 'movie') else 'tvshowid'):
                                video_id
                            }))

        # Add the exported tv shows to be updated to the list..
        tvshows_videoids_to_upd = [
            common.VideoId.from_path([common.VideoId.SHOW, videoid_value])
            for videoid_value in g.SHARED_DB.get_tvshows_id_list(
                VidLibProp['exclude_update'], False)
        ]
        # ..and avoids any duplication caused by possible unexpected errors
        videoids_to_update.extend(
            list(set(tvshows_videoids_to_upd) - set(videoids_to_update)))

        # Add missing tv shows/movies or update existing tv shows
        _update_library(videoids_to_update, exported_tvshows_videoids_values,
                        silent)

        common.debug('Auto update of the library completed')
        g.SHARED_DB.set_value('library_auto_update_is_running', False)
        if not g.ADDON.getSettingBool('lib_auto_upd_disable_notification'):
            ui.show_notification(common.get_local_string(30220), time=5000)
        common.debug(
            'Notify service to communicate to Kodi of update the library')
        common.send_signal(common.Signals.LIBRARY_UPDATE_REQUESTED)
    except Exception:  # pylint: disable=broad-except
        import traceback
        common.error('An error has occurred in the library auto update')
        common.error(traceback.format_exc())
        g.SHARED_DB.set_value('library_auto_update_is_running', False)
Ejemplo n.º 22
0
            '/'.join(pathitems)))
    else:
        nav.execute(NAV_HANDLERS[root_handler], pathitems[1:],
                    g.REQUEST_PARAMS)


if __name__ == '__main__':
    # pylint: disable=broad-except
    # Initialize variables in common module scope
    # (necessary when reusing language invoker)
    common.info('Started (Version {})'.format(g.VERSION))
    common.info('URL is {}'.format(g.URL))
    success = False

    try:
        g.initial_addon_configuration()
        route(filter(None, g.PATH.split('/')))
        success = True
    except common.BackendNotReady:
        ui.show_backend_not_ready()
    except Exception as exc:
        import traceback
        common.error(traceback.format_exc())
        ui.show_addon_error_info(exc)

    if not success:
        xbmcplugin.endOfDirectory(g.PLUGIN_HANDLE, succeeded=success)

    g.CACHE.commit()
    common.log_time_trace()
Ejemplo n.º 23
0
def extract_session_data(content, validate=False, update_profiles=False):
    """
    Call all the parsers we need to extract all
    the session relevant data from the HTML page
    """
    common.debug('Extracting session data...')
    react_context = extract_json(content, 'reactContext')
    if validate:
        validate_login(react_context)

    user_data = extract_userdata(react_context)
    if user_data.get('membershipStatus') == 'ANONYMOUS':
        # Possible known causes:
        # -Login password has been changed
        # -In the login request, 'Content-Type' specified is not compliant with data passed or no more supported
        # -Expired profiles cookies!? (not verified)
        # In these cases it is mandatory to login again
        raise InvalidMembershipStatusAnonymous
    if user_data.get('membershipStatus') != 'CURRENT_MEMBER':
        # When NEVER_MEMBER it is possible that the account has not been confirmed or renewed
        common.error('Can not login, the Membership status is {}',
                     user_data.get('membershipStatus'))
        raise InvalidMembershipStatusError(user_data.get('membershipStatus'))

    api_data = extract_api_data(react_context)
    # Note: Falcor cache does not exist if membershipStatus is not CURRENT_MEMBER
    falcor_cache = extract_json(content, 'falcorCache')

    if update_profiles:
        parse_profiles(falcor_cache)

    if common.is_debug_verbose():
        # Only for debug purpose not sure if can be useful
        try:
            common.debug(
                'ReactContext profileGateState {} ({})', PROFILE_GATE_STATES[
                    react_context['models']['profileGateState']['data']],
                react_context['models']['profileGateState']['data'])
        except KeyError:
            common.error('ReactContext unknown profileGateState {}',
                         react_context['models']['profileGateState']['data'])

    # Profile idle timeout (not sure if will be useful, to now for documentation purpose)
    # NOTE: On the website this value is used to update the profilesNewSession cookie expiration after a profile switch
    #       and also to update the expiration of this cookie on each website interaction.
    #       When the session is expired the 'profileGateState' will be 0 and the website return auto. to profiles page
    # g.LOCAL_DB.set_value('profile_gate_idle_timer', user_data.get('idle_timer', 30), TABLE_SESSION)

    # 21/05/2020 - Netflix has introduced a new paging type called "loco" similar to the old "lolomo"
    # Extract loco root id
    loco_root = falcor_cache['loco']['value'][1]
    g.LOCAL_DB.set_value('loco_root_id', loco_root, TABLE_SESSION)

    # Check if the profile session is still active
    #  (when a session expire in the website, the screen return automatically to the profiles page)
    is_profile_session_active = 'componentSummary' in falcor_cache['locos'][
        loco_root]

    # Extract loco root request id
    if is_profile_session_active:
        component_summary = falcor_cache['locos'][loco_root][
            'componentSummary']['value']
        # Note: 18/06/2020 now the request id is the equal to reactContext models/serverDefs/data/requestId
        g.LOCAL_DB.set_value('loco_root_requestid',
                             component_summary['requestId'], TABLE_SESSION)
    else:
        g.LOCAL_DB.set_value('loco_root_requestid', '', TABLE_SESSION)

    # Extract loco continueWatching id and index
    #   The following commented code was needed for update_loco_context in api_requests.py, but currently
    #   seem not more required to update the continueWatching list then we keep this in case of future nf changes
    # -- INIT --
    # cw_list_data = jgraph_get('continueWatching', falcor_cache['locos'][loco_root], falcor_cache)
    # if cw_list_data:
    #     context_index = falcor_cache['locos'][loco_root]['continueWatching']['value'][2]
    #     g.LOCAL_DB.set_value('loco_continuewatching_index', context_index, TABLE_SESSION)
    #     g.LOCAL_DB.set_value('loco_continuewatching_id',
    #                          jgraph_get('componentSummary', cw_list_data)['id'], TABLE_SESSION)
    # elif is_profile_session_active:
    #     # Todo: In the new profiles, there is no 'continueWatching' context
    #     #  How get or generate the continueWatching context?
    #     #  NOTE: it was needed for update_loco_context in api_requests.py
    #     cur_profile = jgraph_get_path(['profilesList', 'current'], falcor_cache)
    #     common.warn('Context continueWatching not found in locos for profile guid {}.',
    #                 jgraph_get('summary', cur_profile)['guid'])
    #     g.LOCAL_DB.set_value('loco_continuewatching_index', '', TABLE_SESSION)
    #     g.LOCAL_DB.set_value('loco_continuewatching_id', '', TABLE_SESSION)
    # else:
    #     common.warn('Is not possible to find the context continueWatching, the profile session is no more active')
    #     g.LOCAL_DB.set_value('loco_continuewatching_index', '', TABLE_SESSION)
    #     g.LOCAL_DB.set_value('loco_continuewatching_id', '', TABLE_SESSION)
    # -- END --

    # Save only some info of the current profile from user data
    g.LOCAL_DB.set_value('build_identifier', user_data.get('BUILD_IDENTIFIER'),
                         TABLE_SESSION)
    if not g.LOCAL_DB.get_value('esn', table=TABLE_SESSION):
        g.LOCAL_DB.set_value('esn',
                             common.generate_android_esn() or user_data['esn'],
                             TABLE_SESSION)
    g.LOCAL_DB.set_value('locale_id',
                         user_data.get('preferredLocale').get('id', 'en-US'))
    # Save api urls
    for key, path in list(api_data.items()):
        g.LOCAL_DB.set_value(key, path, TABLE_SESSION)

    api_data['is_profile_session_active'] = is_profile_session_active
    return api_data
Ejemplo n.º 24
0
        version = '_'.join(platform.mac_ver()[1])
        version = '' if version == '__' else ' ' + version
        return ua + ' (Mac OS X%s)' % version

    # unix
    ua += ' (Linux %s)' % platform.machine()
    dist = '/'.join(platform.linux_distribution()[:2])
    if dist != '/':
        ua += ' ' + dist
    return ua


try:
    _user_agent = _gen_ua()
except Exception as e:
    common.error('Something went wrong when constructing user agent: %s' % e)
    _user_agent = _default_ua

_common = dict(v=1,
               tid=config.tracking_id,
               cid=settings.client_id,
               ua=_user_agent,
               sr=_screen_size,
               ul=xbmc.getLanguage(xbmc.ENGLISH_NAME),
               an=common.addon_name,
               aid=common.addon_id,
               av=common.addon_version,
               aiid=_xbmc_version)

_ga_url = 'https://www.google-analytics.com/collect'
Ejemplo n.º 25
0
def extract_session_data(content, validate=False, update_profiles=False):
    """
    Call all the parsers we need to extract all
    the session relevant data from the HTML page
    """
    common.debug('Extracting session data...')
    react_context = extract_json(content, 'reactContext')
    if validate:
        validate_login(react_context)

    user_data = extract_userdata(react_context)
    if user_data.get('membershipStatus') == 'ANONYMOUS':
        # Possible known causes:
        # -Login password has been changed
        # -In the login request, 'Content-Type' specified is not compliant with data passed or no more supported
        # -Expired profiles cookies!? (not verified)
        # In these cases it is mandatory to login again
        raise InvalidMembershipStatusAnonymous
    if user_data.get('membershipStatus') != 'CURRENT_MEMBER':
        # When NEVER_MEMBER it is possible that the account has not been confirmed or renewed
        common.error('Can not login, the Membership status is {}',
                     user_data.get('membershipStatus'))
        raise InvalidMembershipStatusError(user_data.get('membershipStatus'))

    api_data = extract_api_data(react_context)
    # Note: Falcor cache does not exist if membershipStatus is not CURRENT_MEMBER
    falcor_cache = extract_json(content, 'falcorCache')

    if update_profiles:
        parse_profiles(falcor_cache)

    g.LOCAL_DB.set_value('is_loco_supported', user_data.get('isLocoSupported'), TABLE_SESSION)
    if user_data.get('isLocoSupported'):
        # 21/05/2020 - Netflix is introducing a new paging type called "loco", it is similar to "lolomo"
        # The lolomo data here is obtained by a separated request from update_lolomo_data in nfsession.py

        # Extract loco root id
        # NOTE: loco root ID is not same of lolomo root id
        loco_root = falcor_cache['loco']['value'][1]
        # g.LOCAL_DB.set_value('lolomo_root_id', loco_root, TABLE_SESSION)

        # Check if current 'profile session' is still active
        # Todo: 25/05/2020 - This not works, currently the "locos" list is always empty
        is_profile_session_active = 'componentSummary' in falcor_cache['locos'][loco_root]

        # Extract loco continueWatching id and index
        # Todo: 25/05/2020 - Without the "locos" list is not possible get this data here
        # g.LOCAL_DB.set_value('lolomo_continuewatching_index', '', TABLE_SESSION)
        # g.LOCAL_DB.set_value('lolomo_continuewatching_id', '', TABLE_SESSION)
    else:
        # Extract lolomo root id
        lolomo_root = falcor_cache['lolomo']['value'][1]
        g.LOCAL_DB.set_value('lolomo_root_id', lolomo_root, TABLE_SESSION)

        # Check if current 'profile session' is still active
        # What means 'profile session':
        # In web browser, after you select a profile and then you close the browse page,
        #   when you reopen it you will not be asked to select a profile again, this means that the same profile session
        #   still active, and the lolomo root id (and child contexts id's) are still the same.
        #   Here one way to understand this, is checking if there is an 'summary' entry in the lolomos dictionary.
        is_profile_session_active = 'summary' in falcor_cache['lolomos'][lolomo_root]

        # Extract lolomo continueWatching id and index
        cw_list_data = jgraph_get('continueWatching', falcor_cache['lolomos'][lolomo_root], falcor_cache)
        if cw_list_data:
            context_index = falcor_cache['lolomos'][lolomo_root]['continueWatching']['value'][2]
            g.LOCAL_DB.set_value('lolomo_continuewatching_index', context_index, TABLE_SESSION)
            g.LOCAL_DB.set_value('lolomo_continuewatching_id', jgraph_get('id', cw_list_data), TABLE_SESSION)
        elif is_profile_session_active:
            # Todo: In the new profiles, there is no 'continueWatching' context
            #  How get or generate the continueWatching context?
            #  (needed to update lolomo list for watched state sync, see update_lolomo_context in api_requests.py)
            cur_profile = jgraph_get_path(['profilesList', 'current'], falcor_cache)
            common.warn('Context continueWatching not found in lolomos for profile guid {}.',
                        jgraph_get('summary', cur_profile)['guid'])
            g.LOCAL_DB.set_value('lolomo_continuewatching_index', '', TABLE_SESSION)
            g.LOCAL_DB.set_value('lolomo_continuewatching_id', '', TABLE_SESSION)
        else:
            common.warn('Is not possible to find the context continueWatching, the profile session is no more active')

    # Save only some info of the current profile from user data
    g.LOCAL_DB.set_value('build_identifier', user_data.get('BUILD_IDENTIFIER'), TABLE_SESSION)
    if not g.LOCAL_DB.get_value('esn', table=TABLE_SESSION):
        g.LOCAL_DB.set_value('esn', common.generate_android_esn() or user_data['esn'], TABLE_SESSION)
    g.LOCAL_DB.set_value('locale_id', user_data.get('preferredLocale').get('id', 'en-US'))
    # Save api urls
    for key, path in list(api_data.items()):
        g.LOCAL_DB.set_value(key, path, TABLE_SESSION)

    api_data['is_profile_session_active'] = is_profile_session_active
    return api_data
Ejemplo n.º 26
0
def _raise_if_error(decoded_response):
    if any(key in decoded_response for key in ['error', 'errordata']):
        common.error('Full MSL error information:')
        common.error(json.dumps(decoded_response))
        raise MSLError(_get_error_details(decoded_response))
    return decoded_response
    if 'darwin' in sys.platform:
        version = '_'.join(platform.mac_ver()[1])
        version = '' if version == '__' else ' '+version
        return ua + ' (Mac OS X%s)' % version

    # unix
    ua += ' (Linux %s)' % platform.machine()
    dist = '/'.join(platform.linux_distribution()[:2])
    if dist != '/':
        ua += ' ' + dist
    return ua

try:
    _user_agent = _gen_ua()
except Exception as e:
    common.error('Something went wrong when constructing user agent: %s' % e)
    _user_agent = _default_ua


_common = dict(
    v    = 1,
    tid  = config.tracking_id,
    cid  = settings.client_id,
    ua   = _user_agent,
    sr   = _screen_size,
    ul   = xbmc.getLanguage(xbmc.ENGLISH_NAME),
    an   = common.addon_name,
    aid  = common.addon_id,
    av   = common.addon_version,
    aiid = _xbmc_version
)
Ejemplo n.º 28
0
    def auto_update_library(self, sync_with_mylist, show_prg_dialog=True, show_nfo_dialog=False, clear_on_cancel=False,
                            update_profiles=False):
        """
        Perform an auto update of the exported items in to Kodi library.
        - The main purpose is check if there are new seasons/episodes.
        - In the case "Sync Kodi library with My list" feature is enabled, will be also synchronized with My List.
        :param sync_with_mylist: if True, sync the Kodi library with Netflix My List
        :param show_prg_dialog: if True, will be show a progress dialog window and the errors will be notified to user
        :param show_nfo_dialog: if True, ask to user if want export NFO files (override custom NFO actions for videoid)
        :param clear_on_cancel: if True, when the user cancel the operations will be cleared the entire library
        :param update_profiles: if True, before perform sync_with_mylist will be updated the profiles
        """
        if is_auto_update_library_running(show_prg_dialog):
            return
        common.info('Start auto-updating of Kodi library {}', '(with sync of My List)' if sync_with_mylist else '')
        G.SHARED_DB.set_value('library_auto_update_is_running', True)
        G.SHARED_DB.set_value('library_auto_update_start_time', datetime.now())
        try:
            # Get the full list of the exported tvshows/movies as id (VideoId.value)
            exp_tvshows_videoids_values = G.SHARED_DB.get_tvshows_id_list()
            exp_movies_videoids_values = G.SHARED_DB.get_movies_id_list()

            # Get the exported tv shows (to be updated) as dict (key=videoid, value=type of task)
            videoids_tasks = {
                common.VideoId.from_path([common.VideoId.SHOW, videoid_value]): self.export_new_item
                for videoid_value in G.SHARED_DB.get_tvshows_id_list(VidLibProp['exclude_update'], False)
            }
            if sync_with_mylist and update_profiles:
                # Before do the sync with My list try to update the profiles in the database,
                # to do a sanity check of the features that are linked to the profiles
                self.ext_func_req_profiles_info(update_database=True)  # pylint: disable=not-callable
                sync_with_mylist = G.ADDON.getSettingBool('lib_sync_mylist')
            # If enabled sync the Kodi library with Netflix My List
            if sync_with_mylist:
                self._sync_my_list_ops(videoids_tasks, exp_tvshows_videoids_values, exp_movies_videoids_values)

            # Show a warning message when there are more than 100 titles to be updated, making too many metadata
            # requests may cause blocking of http communication from the server or temporary ban of the account
            if show_prg_dialog:
                total_titles_upd = sum(task != self.remove_item for task in videoids_tasks.values())
                if total_titles_upd >= 100 and not ui.ask_for_confirmation(
                        common.get_local_string(30122),
                        common.get_local_string(30059).format(total_titles_upd)):
                    return
            # Start the update operations
            ret = self._update_library(videoids_tasks, exp_tvshows_videoids_values, show_prg_dialog, show_nfo_dialog,
                                       clear_on_cancel)
            if not ret:
                return
            request_kodi_library_update(scan=True, clean=True)
            # Save date for completed operation to compute next update schedule (used in library_updater.py)
            G.SHARED_DB.set_value('library_auto_update_last_start', datetime.now())
            common.info('Auto update of the Kodi library completed')
            if not G.ADDON.getSettingBool('lib_auto_upd_disable_notification'):
                ui.show_notification(common.get_local_string(30220), time=5000)
        except Exception as exc:  # pylint: disable=broad-except
            import traceback
            common.error('An error has occurred in the library auto update: {}', exc)
            common.error(G.py2_decode(traceback.format_exc(), 'latin-1'))
        finally:
            G.SHARED_DB.set_value('library_auto_update_is_running', False)
Ejemplo n.º 29
0
def run(argv):
    # pylint: disable=broad-except,ungrouped-imports,too-many-branches
    # Initialize globals right away to avoid stale values from the last addon invocation.
    # Otherwise Kodi's reuseLanguageInvoker will cause some really quirky behavior!
    # PR: https://github.com/xbmc/xbmc/pull/13814
    g.init_globals(argv)

    reset_log_level_global_var()
    info('Started (Version {})'.format(g.VERSION_RAW))
    info('URL is {}'.format(g.URL))
    success = True

    window_cls = Window(10000)  # Kodi home window

    # If you use multiple Kodi profiles you need to distinguish the property of current profile
    prop_nf_service_status = g.py2_encode('nf_service_status_' +
                                          get_current_kodi_profile_name())
    is_external_call = _check_addon_external_call(window_cls,
                                                  prop_nf_service_status)
    service_status = _get_service_status(window_cls, prop_nf_service_status)

    if service_status.get('status') != 'running':
        if not is_external_call:
            if service_status.get('status') == 'error':
                # The services are not started due to an error exception
                from resources.lib.kodi.ui import show_error_info
                show_error_info(
                    get_local_string(30105),
                    get_local_string(30240).format(
                        service_status.get('message')), False, False)
            else:
                # The services are not started yet
                from resources.lib.kodi.ui import show_backend_not_ready
                show_backend_not_ready()
        success = False

    if success:
        try:
            if _check_valid_credentials():
                if g.IS_ADDON_FIRSTRUN:
                    if check_addon_upgrade():
                        from resources.lib.config_wizard import run_addon_configuration
                        run_addon_configuration()
                route([part for part in g.PATH.split('/') if part])
            else:
                success = False
        except BackendNotReady:
            from resources.lib.kodi.ui import show_backend_not_ready
            show_backend_not_ready()
            success = False
        except InputStreamHelperError as exc:
            from resources.lib.kodi.ui import show_ok_dialog
            show_ok_dialog('InputStream Helper Add-on error', (
                'The operation has been cancelled.\r\n'
                'InputStream Helper has generated an internal error:\r\n{}\r\n\r\n'
                'Please report it to InputStream Helper github.'.format(exc)))
            success = False
        except HttpError401:
            # Http error 401 Client Error: Unauthorized for url ... issue (see _request in nfsession_requests.py)
            from resources.lib.kodi.ui import show_ok_dialog
            show_ok_dialog(get_local_string(30105), (
                'There was a communication problem with Netflix.\r\n'
                'This is a known and unresolvable issue, do not submit reports.\r\n'
                'You can try the operation again or exit.'))
            success = False
        except Exception as exc:
            import traceback
            from resources.lib.kodi.ui import show_addon_error_info
            error(g.py2_decode(traceback.format_exc(), 'latin-1'))
            show_addon_error_info(exc)
            success = False

    if not success:
        _handle_endofdirectory()
    log_time_trace()
Ejemplo n.º 30
0
def export_all_new_episodes():
    """
    Update the local Kodi library with new episodes of every exported shows
    """
    from resources.lib.cache import CACHE_COMMON
    from resources.lib.database.db_exceptions import ProfilesMissing
    if _export_all_new_episodes_running():
        return
    common.log('Starting to export new episodes for all tv shows')
    g.SHARED_DB.set_value('library_export_new_episodes_running', True)
    g.SHARED_DB.set_value('library_export_new_episode_start_time',
                          datetime.now())
    # Get the list of the tvshows exported to kodi library
    exported_videoids_values = g.SHARED_DB.get_tvshows_id_list()
    # Get the list of the tvshows exported but to exclude from updates
    excluded_videoids_values = g.SHARED_DB.get_tvshows_id_list(
        VidLibProp.exclude_update, True)

    # Before start to get updated mylist items, you have to select the owner account
    # TODO: in the future you can also add the possibility to synchronize from a chosen profile
    try:
        guid_owner_profile = g.LOCAL_DB.get_guid_owner_profile()
    except ProfilesMissing as exc:
        import traceback
        common.error(traceback.format_exc())
        ui.show_addon_error_info(exc)
        return
    if guid_owner_profile != g.LOCAL_DB.get_active_profile_guid():
        common.debug('Switching to owner account profile')
        api.activate_profile(guid_owner_profile)

    # Retrieve updated items from "my list"
    # Invalidate my-list cached data to force to obtain new data
    g.CACHE.invalidate_entry(CACHE_COMMON, 'my_list_items')
    mylist_videoids = api.mylist_items()

    # Check if any tvshow have been removed from the mylist
    for videoid_value in exported_videoids_values:
        if any(videoid.value == unicode(videoid_value)
               for videoid in mylist_videoids):
            continue
        # Tvshow no more exist in mylist so remove it from library
        videoid = common.VideoId.from_path(
            [common.VideoId.SHOW, videoid_value])
        execute_library_tasks_silently(videoid, [remove_item],
                                       sync_mylist=False)

    # Update or add tvshow in kodi library
    for videoid in mylist_videoids:
        # Only tvshows require be updated
        if videoid.mediatype != common.VideoId.SHOW:
            continue
        if videoid.value in excluded_videoids_values:
            continue
        if videoid.value in exported_videoids_values:
            # It is possible that the user has chosen not to export nfo for a tvshow
            nfo_export = g.SHARED_DB.get_tvshow_property(
                videoid.value, VidLibProp.nfo_export, False)
            nfo_settings = nfo.NFOSettings(nfo_export)
        else:
            nfo_settings = nfo.NFOSettings()
        export_new_episodes(videoid, True, nfo_settings)
        # add some randomness between show analysis to limit servers load and ban risks
        xbmc.sleep(random.randint(1000, 5001))

    g.SHARED_DB.set_value('library_export_new_episodes_running', False)
    if not g.ADDON.getSettingBool('disable_library_sync_notification'):
        ui.show_notification(common.get_local_string(30220), time=5000)
    common.debug('Notify service to update the library')
    common.send_signal(common.Signals.LIBRARY_UPDATE_REQUESTED)
Ejemplo n.º 31
0
def delete(account_hash):
    """Delete cookies for an account from the disk"""
    try:
        xbmcvfs.delete(cookie_filename(account_hash))
    except Exception as exc:  # pylint: disable=broad-except
        common.error('Failed to delete cookies on disk: {exc}', exc=exc)
    def __init__(self):
        super(AndroidMSLCrypto, self).__init__()
        self.crypto_session = None
        self.keyset_id = None
        self.key_id = None
        self.hmac_key_id = None
        try:
            self.crypto_session = xbmcdrm.CryptoSession(
                'edef8ba9-79d6-4ace-a3c8-27dcd51d21ed', 'AES/CBC/NoPadding',
                'HmacSHA256')
            common.debug('Widevine CryptoSession successful constructed')
        except Exception:  # pylint: disable=broad-except
            import traceback
            common.error(g.py2_decode(traceback.format_exc(), 'latin-1'))
            raise MSLError('Failed to construct Widevine CryptoSession')

        drm_info = {
            'version':
            self.crypto_session.GetPropertyString('version'),
            'system_id':
            self.crypto_session.GetPropertyString('systemId'),
            #  'device_unique_id': self.crypto_session.GetPropertyByteArray('deviceUniqueId')
            'hdcp_level':
            self.crypto_session.GetPropertyString('hdcpLevel'),
            'hdcp_level_max':
            self.crypto_session.GetPropertyString('maxHdcpLevel'),
            'security_level':
            self.crypto_session.GetPropertyString('securityLevel')
        }

        if not drm_info['version']:
            # Possible cases where no data is obtained:
            # - Device with custom ROM or without Widevine support
            # - Using Kodi debug build with a InputStream Adaptive release build (yes users do it)
            raise MSLError(
                'It was not possible to get the data from Widevine CryptoSession.\r\n'
                'Your system is not Widevine certified or you have a wrong Kodi version installed.'
            )

        g.LOCAL_DB.set_value('drm_system_id', drm_info['system_id'],
                             TABLE_SESSION)
        g.LOCAL_DB.set_value('drm_security_level', drm_info['security_level'],
                             TABLE_SESSION)
        g.LOCAL_DB.set_value('drm_hdcp_level', drm_info['hdcp_level'],
                             TABLE_SESSION)

        common.debug('Widevine version: {}', drm_info['version'])
        if drm_info['system_id']:
            common.debug('Widevine CryptoSession system id: {}',
                         drm_info['system_id'])
        else:
            common.warn('Widevine CryptoSession system id not obtained!')
        common.debug('Widevine CryptoSession security level: {}',
                     drm_info['security_level'])
        if g.ADDON.getSettingBool('force_widevine_l3'):
            common.warn(
                'Widevine security level is forced to L3 by user settings!')
        common.debug('Widevine CryptoSession current hdcp level: {}',
                     drm_info['hdcp_level'])
        common.debug('Widevine CryptoSession max hdcp level supported: {}',
                     drm_info['hdcp_level_max'])
        common.debug('Widevine CryptoSession algorithms: {}',
                     self.crypto_session.GetPropertyString('algorithms'))