def chunked_request(self, endpoint, request_data, esn, disable_msl_switch=True, force_auth_credential=False): """Do a POST request and process the chunked response""" self._mastertoken_checks() auth_data = self._check_user_id_token(disable_msl_switch, force_auth_credential) LOG.debug('Chunked request will be executed with auth data: {}', auth_data) chunked_response = self._process_chunked_response( self._post(endpoint, self.msl_request(request_data, esn, auth_data)), save_uid_token_to_owner=auth_data['user_id_token'] is None) return chunked_response['result']
def export_item(self, job_data, library_home): """Create strm file for an item and add it to the library""" # Paths must be legal to ensure NFS compatibility destination_folder = common.join_folders_paths( library_home, job_data['root_folder_name'], job_data['folder_name']) common.create_folder(destination_folder) if job_data['create_strm_file']: strm_file_path = common.join_folders_paths( destination_folder, job_data['filename'] + '.strm') insert_videoid_to_db(job_data['videoid'], strm_file_path, job_data['nfo_data'] is not None) common.write_strm_file(job_data['videoid'], strm_file_path) if job_data['create_nfo_file']: nfo_file_path = common.join_folders_paths( destination_folder, job_data['filename'] + '.nfo') common.write_nfo_file(job_data['nfo_data'], nfo_file_path) LOG.debug('Exported {}: {}', job_data['videoid'], job_data['title'])
def perform_key_handshake(self, data=None): # pylint: disable=unused-argument """Perform a key handshake and initialize crypto keys""" esn = get_esn() if not esn: LOG.warn('Cannot perform key handshake, missing ESN') return False LOG.info('Performing key handshake with ESN: {}', common.censure(esn) if G.ADDON.getSetting('esn') else esn) response = _process_json_response( self._post(ENDPOINTS['manifest'], self.handshake_request(esn))) header_data = self.decrypt_header_data(response['headerdata'], False) self.crypto.parse_key_response(header_data, esn, True) # Delete all the user id tokens (are correlated to the previous mastertoken) self.crypto.clear_user_id_tokens() LOG.debug('Key handshake successful') return True
def lazy_login_wrapper(*args, **kwargs): try: # Before call a method, check if the credentials exists if not _check_valid_credentials(): return False return func(*args, **kwargs) except (NotLoggedInError, LoginValidateError): # Exceptions raised by nfsession: "login" / "assert_logged_in" / "website_extract_session_data" LOG.debug('Tried to perform an action without being logged in') try: from resources.lib.utils.api_requests import login if not login(ask_credentials=not check_credentials()): return False LOG.debug('Account logged in, try executing again {}', func.__name__) return func(*args, **kwargs) except MissingCredentialsError: # Cancelled from user or left an empty field return False
def home(self, pathitems=None, cache_to_disc=True, is_autoselect_profile=False): # pylint: disable=unused-argument """Show home listing""" if not is_autoselect_profile and 'switch_profile_guid' in self.params: # This is executed only when you have selected a profile from the profile list if not activate_profile(self.params['switch_profile_guid']): xbmcplugin.endOfDirectory(G.PLUGIN_HANDLE, succeeded=False) return LOG.debug('Showing home listing') list_data, extra_data = common.make_call('get_mainmenu') # pylint: disable=unused-variable finalize_directory( convert_list_to_dir_items(list_data), G.CONTENT_FOLDER, title=(G.LOCAL_DB.get_profile_config('profileName', '???') + ' - ' + common.get_local_string(30097))) end_of_directory(True, cache_to_disc)
def do_POST(self): LOG.debug('HTTP Server: received POST request {}', self.path) parsed_url = urlparse(self.path) endpoint, func_name = parsed_url.path.rsplit('/', 1) length = int(self.headers.get('content-length', 0)) data = self.rfile.read(length) or None if endpoint == IPC_ENDPOINT_MSL: handle_msl_request(self, func_name, data) elif endpoint == IPC_ENDPOINT_CACHE: handle_cache_request(self, func_name, data) elif endpoint == IPC_ENDPOINT_NFSESSION: handle_request(self, self.server.netflix_session, func_name, data) elif endpoint == IPC_ENDPOINT_NFSESSION_TEST and LOG.is_enabled: handle_request_test(self, self.server.netflix_session, func_name, data) else: self.send_error(404, 'Not found') self.end_headers()
def _perform_service_changes(previous_ver, current_ver): """Perform actions for an version bump""" LOG.debug('Initialize service upgrade operations, from version {} to {})', previous_ver, current_ver) # Clear cache (prevents problems when netflix change data structures) G.CACHE.clear() # Delete all stream continuity data - if user has upgraded from Kodi 18 to Kodi 19 if previous_ver and is_less_version(previous_ver, '1.13'): # There is no way to determine if the user has migrated from Kodi 18 to Kodi 19, # then we assume that add-on versions prior to 1.13 was on Kodi 18 # The am_stream_continuity.py on Kodi 18 works differently and the existing data can not be used on Kodi 19 G.SHARED_DB.clear_stream_continuity() if previous_ver and is_less_version(previous_ver, '1.9.0'): # In the version 1.9.0 has been changed the COOKIE_ filename with a static filename from resources.lib.upgrade_actions import rename_cookie_file rename_cookie_file() if previous_ver and is_less_version(previous_ver, '1.12.0'): # In the version 1.13.0: # - 'force_widevine' on setting.xml has been moved # as 'widevine_force_seclev' in TABLE_SESSION with different values: # force_widevine = G.ADDON.getSettingString('force_widevine') # # Old values: Disabled|Widevine L3|Widevine L3 (ID-4445) # # New values: Disabled|L3|L3 (ID 4445) # if force_widevine == 'Widevine L3': # G.LOCAL_DB.set_value('widevine_force_seclev', 'L3', table=TABLE_SESSION) # elif force_widevine == 'Widevine L3 (ID-4445)': # G.LOCAL_DB.set_value('widevine_force_seclev', 'L3 (ID 4445)', table=TABLE_SESSION) # # - 'esn' on setting.xml is not more used but if was set the value need to be copied on 'esn' on TABLE_SESSION: # esn = G.ADDON.getSettingString('esn') # if esn: # from resources.lib.utils.esn import set_esn # set_esn(esn) # - 'suspend_settings_monitor' is not more used G.LOCAL_DB.delete_key('suspend_settings_monitor') # In the version 1.14.0 the new settings.xml format has been introduced # the migration of the settings (commented above) from this version is no more possible from resources.lib.kodi import ui ui.show_ok_dialog( 'Netflix add-on upgrade', 'This add-on upgrade has reset your ESN code, if you had set an ESN code manually ' 'you must re-enter it again in the Expert settings, otherwise simply ignore this message.' ) # Always leave this to last - After the operations set current version G.LOCAL_DB.set_value('service_previous_version', current_ver)
def do_POST(self): """Loads the licence for the requested resource""" try: url_parse = urlparse(self.path) LOG.debug('Handling HTTP POST IPC call to {}', url_parse.path) if '/license' in url_parse: length = int(self.headers.get('content-length', 0)) data = self.rfile.read(length).decode('utf-8').split('!') b64license = self.server.msl_handler.get_license( challenge=data[0], sid=base64.standard_b64decode(data[1]).decode('utf-8')) self.send_response(200) self.end_headers() self.wfile.write(base64.standard_b64decode(b64license)) else: func_name = self.path[1:] length = int(self.headers.get('content-length', 0)) data = json.loads(self.rfile.read(length)) or None try: result = self.server.msl_handler.http_ipc_slots[func_name]( data) if isinstance( result, dict ) and common.IPC_EXCEPTION_PLACEHOLDER in result: self.send_response(500, json.dumps(result)) self.end_headers() return self.send_response(200) self.end_headers() self.wfile.write(json.dumps(result).encode('utf-8')) except KeyError: self.send_response( 500, json.dumps( common.ipc_convert_exc_to_json( class_name='SlotNotImplemented', message='The specified slot {} does not exist'. format(func_name)))) self.end_headers() except Exception as exc: import traceback LOG.error(traceback.format_exc()) self.send_response(500 if isinstance(exc, MSLError) else 400) self.end_headers()
def get_license(self, license_data): """ Requests and returns a license for the given challenge and sid :param license_data: The license data provided by isa :return: Base64 representation of the license key or False unsuccessful """ LOG.debug('Requesting license') challenge, sid = license_data.decode('utf-8').split('!') sid = base64.standard_b64decode(sid).decode('utf-8') timestamp = int(time.time() * 10000) xid = str(timestamp + 1610) params = [{ 'drmSessionId': sid, 'clientTime': int(timestamp / 10000), 'challengeBase64': challenge, 'xid': xid }] self.manifest_challenge = challenge endpoint_url = ENDPOINTS['license'] + create_req_params(0, 'prefetch/license') try: response = self.msl_requests.chunked_request(endpoint_url, self.msl_requests.build_request_data(self.last_license_url, params, 'drmSessionId'), get_esn()) except MSLError as exc: if exc.err_number == '1044' and common.get_system_platform() == 'android': msg = ('This title is not available to watch instantly. Please try another title.\r\n' 'To try to solve this problem you can force "Widevine L3" from the add-on Expert settings.\r\n' 'More info in the Wiki FAQ on add-on GitHub.') raise MSLError(msg) from exc raise # This xid must be used also for each future Event request, until playback stops G.LOCAL_DB.set_value('xid', xid, TABLE_SESSION) self.licenses_xid.insert(0, xid) self.licenses_session_id.insert(0, sid) self.licenses_release_url.insert(0, response[0]['links']['releaseLicense']['href']) if self.msl_requests.msl_switch_requested: self.msl_requests.msl_switch_requested = False self.bind_events() return base64.standard_b64decode(response[0]['licenseResponseBase64'])
def remove_item(self, job_data, library_home=None): # pylint: disable=unused-argument """Remove an item from the Kodi library, delete it from disk, remove add-on database references""" videoid = job_data['videoid'] LOG.debug('Removing {} ({}) from add-on library', videoid, job_data['title']) try: # Remove the STRM file exported exported_file_path = xbmcvfs.translatePath(job_data['file_path']) common.delete_file_safe(exported_file_path) parent_folder = xbmcvfs.translatePath( os.path.dirname(exported_file_path)) # Remove the NFO file of the related STRM file nfo_file = os.path.splitext(exported_file_path)[0] + '.nfo' common.delete_file_safe(nfo_file) dirs, files = common.list_dir(parent_folder) # Remove the tvshow NFO file (only when it is the last file in the folder) tvshow_nfo_file = common.join_folders_paths( parent_folder, 'tvshow.nfo') # (users have the option of removing even single seasons) if xbmcvfs.exists(tvshow_nfo_file) and not dirs and len( files) == 1: xbmcvfs.delete(tvshow_nfo_file) # Delete parent folder xbmcvfs.rmdir(parent_folder) # Delete parent folder when empty if not dirs and not files: xbmcvfs.rmdir(parent_folder) # Remove videoid records from add-on database remove_videoid_from_db(videoid) except ItemNotFound: LOG.warn( 'The videoid {} not exists in the add-on library database', videoid) except Exception as exc: # pylint: disable=broad-except import traceback LOG.error(traceback.format_exc()) ui.show_addon_error_info(exc)
def _perform_addon_changes(previous_ver, current_ver): """Perform actions for an version bump""" cancel_playback = False LOG.debug('Initialize addon upgrade operations, from version {} to {})', previous_ver, current_ver) if previous_ver and is_less_version(previous_ver, '0.15.9'): import resources.lib.kodi.ui as ui msg = ( 'This update resets the settings to auto-update library.\r\n' 'Therefore only in case you are using auto-update must be reconfigured.' ) ui.show_ok_dialog('Netflix upgrade', msg) if previous_ver and is_less_version(previous_ver, '1.7.0'): from resources.lib.upgrade_actions import migrate_library migrate_library() cancel_playback = True # Always leave this to last - After the operations set current version G.LOCAL_DB.set_value('addon_previous_version', current_ver) return cancel_playback
def key_request_data(self): """Return a key request dict""" # No key update supported -> remove existing keys self.crypto_session.RemoveKeys() key_request = self.crypto_session.GetKeyRequest( # pylint: disable=assignment-from-none bytes([10, 122, 0, 108, 56, 43]), 'application/xml', True, dict()) if not key_request: raise MSLError('Widevine CryptoSession getKeyRequest failed!') LOG.debug('Widevine CryptoSession getKeyRequest successful. Size: {}', len(key_request)) # Save the key request (challenge data) required for manifest requests # Todo: to be implemented if/when it becomes mandatory key_request = base64.standard_b64encode(key_request).decode('utf-8') # G.LOCAL_DB.set_value('drm_session_challenge', key_request, TABLE_SESSION) return [{'scheme': 'WIDEVINE', 'keydata': {'keyrequest': key_request}}]
def _get_owner_user_id_token(self): """A way to get the user token id of owner profile""" # In order to get a user id token of another (non-owner) profile you must make a request with SWITCH_PROFILE # authentication scheme (a custom authentication for netflix), and this request can be directly included # in the MSL manifest request. # But in order to execute this switch profile, you need to have the user id token of the main (owner) profile. # The only way (found to now) to get it immediately, is send a logblob event request, and save the # user id token obtained in the response. LOG.debug('Requesting logblog') params = {'reqAttempt': 1, 'reqPriority': 0, 'reqName': EVENT_BIND} url = ENDPOINTS['logblobs'] + '?' + urlencode(params).replace( '%2F', '/') response = self.chunked_request(url, self.build_request_data( '/logblob', generate_logblobs_params()), get_esn(), force_auth_credential=True) LOG.debug('Response of logblob request: {}', response)
def activate_profile(self, guid): """Set the profile identified by guid as active""" LOG.debug('Switching to profile {}', guid) if xbmc.Player().isPlayingVideo(): # Change the current profile while a video is playing can cause problems with outgoing HTTP requests # (MSL/NFSession) causing a failure in the HTTP request or sending data on the wrong profile raise Warning( 'It is not possible select a profile while a video is playing.' ) current_active_guid = G.LOCAL_DB.get_active_profile_guid() if guid == current_active_guid: LOG.info( 'The profile guid {} is already set, activation not needed.', guid) return timestamp = time.time() LOG.info('Activating profile {}', guid) # 20/05/2020 - The method 1 not more working for switching PIN locked profiles # INIT Method 1 - HTTP mode # response = self._get('switch_profile', params={'tkn': guid}) # self.nfsession.auth_url = self.website_extract_session_data(response)['auth_url'] # END Method 1 # INIT Method 2 - API mode try: self.get_safe(endpoint='activate_profile', params={ 'switchProfileGuid': guid, '_': int(timestamp * 1000), 'authURL': self.auth_url }) except HttpError401 as exc: # Profile guid not more valid raise_from( InvalidProfilesError( 'Unable to access to the selected profile.'), exc) # Retrieve browse page to update authURL response = self.get_safe('browse') self.auth_url = website.extract_session_data(response)['auth_url'] # END Method 2 G.LOCAL_DB.switch_active_profile(guid) G.CACHE_MANAGEMENT.identifier_prefix = guid cookies.save(self.session.cookies)
def route(pathitems): """Route to the appropriate handler""" LOG.debug('Routing navigation request') root_handler = pathitems[0] if pathitems else G.MODE_DIRECTORY if root_handler == G.MODE_PLAY: from resources.lib.navigation.player import play play(videoid=pathitems[1:]) elif root_handler == G.MODE_PLAY_STRM: from resources.lib.navigation.player import play_strm play_strm(videoid=pathitems[1:]) elif root_handler == 'extrafanart': LOG.warn('Route: ignoring extrafanart invocation') return False else: nav_handler = _get_nav_handler(root_handler) if not nav_handler: raise InvalidPathError('No root handler for path {}'.format('/'.join(pathitems))) _execute(nav_handler, pathitems[1:], G.REQUEST_PARAMS) return True
def _get_macos_uuid(): # pylint: disable=broad-except import subprocess sp_dict_values = None try: with subprocess.Popen( ['/usr/sbin/system_profiler', 'SPHardwareDataType', '-detaillevel', 'full', '-xml'], stdout=subprocess.PIPE) as proc: output_data = proc.communicate()[0].decode('utf-8') if output_data: sp_dict_values = _parse_osx_xml_plist_data(output_data) except Exception as exc: LOG.debug('Failed to fetch OSX/IOS system profile {}', exc) if sp_dict_values: if 'UUID' in list(sp_dict_values.keys()): return sp_dict_values['UUID'] if 'serialnumber' in list(sp_dict_values.keys()): return sp_dict_values['serialnumber'] return None
def prefetch_login(self): """Check if we have stored credentials. If so, do the login before the user requests it""" from requests import exceptions try: common.get_credentials() if not self.is_logged_in(): self.login() return True except MissingCredentialsError: pass except exceptions.RequestException as exc: # It was not possible to connect to the web service, no connection, network problem, etc import traceback LOG.error('Login prefetch: request exception {}', exc) LOG.debug(G.py2_decode(traceback.format_exc(), 'latin-1')) except Exception as exc: # pylint: disable=broad-except LOG.warn('Login prefetch: failed {}', exc) return False
def route_search_nav(pathitems, perpetual_range_start, dir_update_listing, params): path = pathitems[2] if len(pathitems) > 2 else 'list' LOG.debug('Routing "search" navigation to: {}', path) ret = True if path == 'list': search_list() elif path == 'add': ret = search_add() elif path == 'edit': search_edit(params['row_id']) elif path == 'remove': search_remove(params['row_id']) elif path == 'clear': ret = search_clear() else: ret = search_query(path, perpetual_range_start, dir_update_listing) if not ret: xbmcplugin.endOfDirectory(G.PLUGIN_HANDLE, succeeded=False)
def req_episodes(self, videoid, perpetual_range_start=None): """Retrieve the episodes of a season""" if videoid.mediatype != common.VideoId.SEASON: raise InvalidVideoId(f'Cannot request episode list for {videoid}') LOG.debug('Requesting episode list for {}', videoid) paths = ([['seasons', videoid.seasonid, 'summary']] + build_paths( ['seasons', videoid.seasonid, 'episodes', RANGE_PLACEHOLDER], EPISODES_PARTIAL_PATHS) + build_paths( ['videos', videoid.tvshowid], ART_PARTIAL_PATHS + [['title']])) call_args = { 'paths': paths, 'length_params': ['stdlist_wid', ['seasons', videoid.seasonid, 'episodes']], 'perpetual_range_start': perpetual_range_start } path_response = self.nfsession.perpetual_path_request(**call_args) return EpisodeList(videoid, path_response)
def logout(self): """Logout of the current account and reset the session""" LOG.debug('Logging out of current account') # Perform the website logout self.get('logout') G.settings_monitor_suspend(True) # Disable and reset auto-update / auto-sync features G.ADDON.setSettingInt('lib_auto_upd_mode', 1) G.ADDON.setSettingBool('lib_sync_mylist', False) G.SHARED_DB.delete_key('sync_mylist_profile_guid') # Disable and reset the profile guid of profile auto-selection G.LOCAL_DB.set_value('autoselect_profile_guid', '') # Disable and reset the selected profile guid for library playback G.LOCAL_DB.set_value('library_playback_profile_guid', '') G.settings_monitor_suspend(False) # Delete cookie and credentials self.session.cookies.clear() cookies.delete() common.purge_credentials() # Reset the ESN obtained from website/generated G.LOCAL_DB.set_value('esn', '', TABLE_SESSION) # Reinitialize the MSL handler (delete msl data file, then reset everything) common.send_signal(signal=common.Signals.REINITIALIZE_MSL_HANDLER, data=True) G.CACHE.clear(clear_database=True) LOG.info('Logout successful') ui.show_notification(common.get_local_string(30113)) self._init_session() common.container_update('path', True) # Go to a fake page to clear screen # Open root page common.container_update(G.BASE_URL, True)
def _perform_service_changes(previous_ver, current_ver): """Perform actions for an version bump""" LOG.debug('Initialize service upgrade operations, from version {} to {})', previous_ver, current_ver) # Clear cache (prevents problems when netflix change data structures) G.CACHE.clear() if previous_ver and is_less_version(previous_ver, '1.2.0'): # In the version 1.2.0 has been implemented a new cache management from resources.lib.upgrade_actions import delete_cache_folder delete_cache_folder() # In the version 1.2.0 has been implemented in auto-update mode setting the option to disable the feature try: lib_auto_upd_mode = G.ADDON.getSettingInt('lib_auto_upd_mode') with G.SETTINGS_MONITOR.ignore_events(1): G.ADDON.setSettingInt('lib_auto_upd_mode', lib_auto_upd_mode + 1) except TypeError: # In case of a previous rollback this could fails with G.SETTINGS_MONITOR.ignore_events(1): G.ADDON.setSettingInt('lib_auto_upd_mode', 1) if previous_ver and is_less_version(previous_ver, '1.9.0'): # In the version 1.9.0 has been changed the COOKIE_ filename with a static filename from resources.lib.upgrade_actions import rename_cookie_file rename_cookie_file() if previous_ver and is_less_version(previous_ver, '1.12.0'): # In the version 1.13.0: # - 'force_widevine' on setting.xml has been moved # as 'widevine_force_seclev' in TABLE_SESSION with different values: force_widevine = G.ADDON.getSettingString('force_widevine') # Old values: Disabled|Widevine L3|Widevine L3 (ID-4445) # New values: Disabled|L3|L3 (ID 4445) if force_widevine == 'Widevine L3': G.LOCAL_DB.set_value('widevine_force_seclev', 'L3', table=TABLE_SESSION) elif force_widevine == 'Widevine L3 (ID-4445)': G.LOCAL_DB.set_value('widevine_force_seclev', 'L3 (ID 4445)', table=TABLE_SESSION) # - 'esn' on setting.xml is not more used but if was set the value need to be copied on 'esn' on TABLE_SESSION: esn = G.ADDON.getSettingString('esn') if esn: from resources.lib.utils.esn import set_esn set_esn(esn) # - 'suspend_settings_monitor' is not more used G.LOCAL_DB.delete_key('suspend_settings_monitor') # Always leave this to last - After the operations set current version G.LOCAL_DB.set_value('service_previous_version', current_ver)
def compile_jobs_data(self, videoid, task_type, nfo_settings=None): """Compile a list of jobs data based on the videoid""" LOG.debug( 'Compiling list of jobs data for task handler "{}" and videoid "{}"', task_type.__name__, videoid) jobs_data = None try: if task_type == self.export_item: metadata = self.ext_func_get_metadata(videoid) # pylint: disable=not-callable if videoid.mediatype == common.VideoId.MOVIE: jobs_data = [ self._create_export_movie_job(videoid, metadata[0], nfo_settings) ] if videoid.mediatype in common.VideoId.TV_TYPES: jobs_data = self._create_export_tvshow_jobs( videoid, metadata, nfo_settings) if task_type == self.export_new_item: metadata = self.ext_func_get_metadata(videoid, True) # pylint: disable=not-callable jobs_data = self._create_export_new_episodes_jobs( videoid, metadata, nfo_settings) if task_type == self.remove_item: if videoid.mediatype == common.VideoId.MOVIE: jobs_data = [self._create_remove_movie_job(videoid)] if videoid.mediatype == common.VideoId.SHOW: jobs_data = self._create_remove_tvshow_jobs(videoid) if videoid.mediatype == common.VideoId.SEASON: jobs_data = self._create_remove_season_jobs(videoid) if videoid.mediatype == common.VideoId.EPISODE: jobs_data = [self._create_remove_episode_job(videoid)] except MetadataNotAvailable: LOG.warn( 'Unavailable metadata for videoid "{}", list of jobs not compiled', videoid) return None if jobs_data is None: LOG.error( 'Unexpected job compile case for task type "{}" and videoid "{}", list of jobs not compiled', task_type.__name__, videoid) return jobs_data
def _check_addon_external_call(window_cls, prop_nf_service_status): """Check system to verify if the calls to the add-on are originated externally""" # The calls that are made from outside do not respect and do not check whether the services required # for the add-on are actually working and operational, causing problems with the execution of the frontend. # A clear example are the Skin widgets, that are executed at Kodi startup immediately and this is cause of different # kinds of problems like widgets not loaded, add-on warning message, etc... # Cases where it can happen: # - Calls made by the Skin Widgets, Scripts, Kodi library # - Calls made by others Kodi windows (like file browser) # - Calls made by other add-ons # To try to solve the problem, when the service is not ready a loop will be started to freeze the add-on instance # until the service will be ready. is_other_plugin_name = getInfoLabel( 'Container.PluginName') != G.ADDON.getAddonInfo('id') limit_sec = 10 # Note to Kodi boolean condition "Window.IsMedia": # All widgets will be either on Home or in a Custom Window, so "Window.IsMedia" will be false # When the user is browsing the plugin, Window.IsMedia will be true because video add-ons open # in MyVideoNav.xml (which is a Media window) # This is not a safe solution, because DEPENDS ON WHICH WINDOW IS OPEN, # for example it can fail if you open add-on video browser while widget is still loading. # Needed a proper solution by script.skinshortcuts / script.skin.helper.service, and forks if is_other_plugin_name or not getCondVisibility("Window.IsMedia"): monitor = Monitor() sec_elapsed = 0 while not _get_service_status( window_cls, prop_nf_service_status).get('status') == 'running': if sec_elapsed >= limit_sec or monitor.abortRequested( ) or monitor.waitForAbort(0.5): break sec_elapsed += 0.5 LOG.debug( 'Add-on was initiated by an external call - workaround enabled time elapsed {}s', sec_elapsed) G.IS_ADDON_EXTERNAL_CALL = True return True return False
def _process_event_request(self, event_type, event_data, player_state): """Build and make the event post request""" if event_type == EVENT_START: # We get at every new video playback a fresh LoCo data self.loco_data = self.nfsession.get_loco_data() url = event_data['manifest']['links']['events']['href'] from resources.lib.services.nfsession.msl.msl_request_builder import MSLRequestBuilder request_data = MSLRequestBuilder.build_request_data( url, self._build_event_params(event_type, event_data, player_state, event_data['manifest'], self.loco_data)) # Request attempts can be made up to a maximum of 3 times per event LOG.info('EVENT [{}] - Executing request', event_type) endpoint_url = ENDPOINTS['events'] + create_req_params( 20 if event_type == EVENT_START else 0, f'events/{event_type}') try: response = self.chunked_request(endpoint_url, request_data, get_esn(), disable_msl_switch=False) # Malformed/wrong content in requests are ignored without returning any error in the response or exception LOG.debug('EVENT [{}] - Request response: {}', event_type, response) if event_type == EVENT_STOP: if event_data['allow_request_update_loco']: if 'list_context_name' in self.loco_data: self.nfsession.update_loco_context( self.loco_data['root_id'], self.loco_data['list_context_name'], self.loco_data['list_id'], self.loco_data['list_index']) else: LOG.warn( 'EventsHandler: LoCo list not updated due to missing list context data' ) video_id = request_data['params']['sessionParams'][ 'uiplaycontext']['video_id'] self.nfsession.update_videoid_bookmark(video_id) self.loco_data = None except Exception as exc: # pylint: disable=broad-except LOG.error('EVENT [{}] - The request has failed: {}', event_type, exc)
def _import_library_remove(self, remove_titles, remove_folders): if not remove_folders: return False # If there are STRM files that it was not possible to import them, # we will ask to user if you want to delete them tot_folders = len(remove_folders) if tot_folders > 50: remove_titles = remove_titles[:50] + ['...'] message = common.get_local_string(30246).format(tot_folders) + '[CR][CR]' + ', '.join(remove_titles) if not ui.ask_for_confirmation(common.get_local_string(30140), message): return False # Delete all folders LOG.info('Start deleting folders') with ui.ProgressDialog(True, max_value=tot_folders) as progress_bar: for file_path in remove_folders: progress_bar.set_message('{}/{}'.format(progress_bar.value, tot_folders)) LOG.debug('Deleting folder: {}', file_path) common.delete_folder(file_path) progress_bar.perform_step() return True
def clear(self, buckets=None, clear_database=True): """ Clear the cache :param buckets: list of buckets to clear, if not specified clear all the cache :param clear_database: if True clear also the database data """ LOG.debug('Performing cache clearing') if buckets is None: # Clear all cache self.memory_cache = {} if clear_database: self._clear_db() else: # Clear only specified buckets for bucket in buckets: if bucket['name'] in self.memory_cache: del self.memory_cache[bucket['name']] if clear_database: self._clear_db(bucket)
def do_GET(self): """Loads the XML manifest for the requested resource""" try: url_parse = urlparse(self.path) LOG.debug('Handling HTTP GET IPC call to {}', url_parse.path) if '/manifest' not in url_parse: self.send_response(404) self.end_headers() return params = parse_qs(url_parse.query) data = self.server.msl_handler.load_manifest(int(params['id'][0])) self.send_response(200) self.send_header('Content-type', 'application/xml') self.end_headers() self.wfile.write(data) except Exception as exc: import traceback LOG.error(traceback.format_exc()) self.send_response(500 if isinstance(exc, MSLError) else 400) self.end_headers()
def lazy_login_wrapper(*args, **kwargs): if _check_valid_credentials(): try: return func(*args, **kwargs) except NotLoggedInError: # Exception raised by nfsession: "login" / "assert_logged_in" / "website_extract_session_data" LOG.debug('Tried to perform an action without being logged in') try: from resources.lib.utils.api_requests import login if login(ask_credentials=not check_credentials()): LOG.debug('Account logged in, try executing again {}', func.__name__) return func(*args, **kwargs) except MissingCredentialsError: # Cancelled from user or left an empty field pass except LoginError as exc: # Login not valid from resources.lib.kodi.ui import show_ok_dialog show_ok_dialog(get_local_string(30008), str(exc)) return False
def __init__(self, params): LOG.debug('Initializing "Directory" with params: {}', params) self.params = params # After build url the param value is converted as string self.perpetual_range_start = ( None if self.params.get('perpetual_range_start') == 'None' else self.params.get('perpetual_range_start')) if 'dir_update_listing' in self.params: self.dir_update_listing = self.params[ 'dir_update_listing'] == 'True' else: self.dir_update_listing = bool(self.perpetual_range_start) if self.perpetual_range_start == '0': # For cache identifier purpose self.perpetual_range_start = None if 'switch_profile_guid' in params: guid_passed = self.params.get('guid') guid_active = G.LOCAL_DB.get_active_profile_guid() if guid_passed != '' and guid_passed != guid_active: activate_profile(params['switch_profile_guid'])
def route(pathitems): """Route to the appropriate handler""" LOG.debug('Routing navigation request') if pathitems: if 'extrafanart' in pathitems: LOG.warn('Route: ignoring extrafanart invocation') return False root_handler = pathitems[0] else: root_handler = G.MODE_DIRECTORY if root_handler == G.MODE_PLAY: from resources.lib.navigation.player import play play(videoid=pathitems[1:]) elif root_handler == G.MODE_PLAY_STRM: from resources.lib.navigation.player import play_strm play_strm(videoid=pathitems[1:]) else: nav_handler = _get_nav_handler(root_handler, pathitems) _execute(nav_handler, pathitems[1:], G.REQUEST_PARAMS, root_handler) return True