def perpetual_path_request( self, paths, length_params, perpetual_range_start=None, request_size=apipaths.PATH_REQUEST_SIZE_PAGINATED, no_limit_req=False): """ Perform a perpetual path request against the Shakti API to retrieve a possibly large video list. :param paths: The paths that compose the request :param length_params: A list of two values, e.g. ['stdlist', [...]]: 1: A key of LENGTH_ATTRIBUTES that define where read the total number of objects 2: A list of keys used to get the list of objects in the JSON data of received response :param perpetual_range_start: defines the starting point of the range of objects to be requested :param request_size: defines the size of the range, the total number of objects that will be received :param no_limit_req: if True, the perpetual cycle of requests will be 'unlimited' :return: Union of all JSON raw data received """ # When the requested video list's size is larger than 'request_size', # multiple path requests will be executed with forward shifting range selectors # and the results will be combined into one path response. response_type, length_args = length_params context_name = length_args[0] response_length = apipaths.LENGTH_ATTRIBUTES[response_type] # Note: when the request is made with 'genres' or 'seasons' context, # the response strangely does not respect the number of objects # requested, returning 1 more item, i couldn't understand why if context_name in ['genres', 'seasons']: request_size -= 1 response_size = request_size + 1 if context_name in ['genres', 'seasons']: response_size += 1 number_of_requests = 100 if no_limit_req else int( G.ADDON.getSettingInt('page_results') / 45) perpetual_range_start = int( perpetual_range_start) if perpetual_range_start else 0 range_start = perpetual_range_start range_end = range_start + request_size merged_response = {} for n_req in range(number_of_requests): path_response = self.path_request( _set_range_selector(paths, range_start, range_end)) if not path_response: break if not common.check_path_exists(length_args, path_response): # It may happen that the number of items to be received # is equal to the number of the response_size # so a second round will be performed, which will return an empty list break common.merge_dicts(path_response, merged_response) response_count = response_length(path_response, *length_args) if response_count < response_size: # There are no other elements to request break range_start += response_size if n_req == (number_of_requests - 1): merged_response['_perpetual_range_selector'] = { 'next_start': range_start } LOG.debug( '{} has other elements, added _perpetual_range_selector item', response_type) else: range_end = range_start + request_size if perpetual_range_start > 0: previous_start = perpetual_range_start - (response_size * number_of_requests) if '_perpetual_range_selector' in merged_response: merged_response['_perpetual_range_selector'][ 'previous_start'] = previous_start else: merged_response['_perpetual_range_selector'] = { 'previous_start': previous_start } return merged_response
def __init__(self): super(AndroidMSLCrypto, self).__init__() self.crypto_session = None self.keyset_id = None self.key_id = None self.hmac_key_id = None try: self.crypto_session = xbmcdrm.CryptoSession( 'edef8ba9-79d6-4ace-a3c8-27dcd51d21ed', 'AES/CBC/NoPadding', 'HmacSHA256') LOG.debug('Widevine CryptoSession successful constructed') except Exception as exc: # pylint: disable=broad-except import traceback LOG.error(G.py2_decode(traceback.format_exc(), 'latin-1')) raise_from(MSLError('Failed to construct Widevine CryptoSession'), exc) drm_info = { 'version': self.crypto_session.GetPropertyString('version'), 'system_id': self.crypto_session.GetPropertyString('systemId'), # 'device_unique_id': self.crypto_session.GetPropertyByteArray('deviceUniqueId') 'hdcp_level': self.crypto_session.GetPropertyString('hdcpLevel'), 'hdcp_level_max': self.crypto_session.GetPropertyString('maxHdcpLevel'), 'security_level': self.crypto_session.GetPropertyString('securityLevel') } if not drm_info['version']: # Possible cases where no data is obtained: # - Device with custom ROM or without Widevine support # - Using Kodi debug build with a InputStream Adaptive release build (yes users do it) raise MSLError( 'It was not possible to get the data from Widevine CryptoSession.\r\n' 'Your system is not Widevine certified or you have a wrong Kodi version installed.' ) G.LOCAL_DB.set_value('drm_system_id', drm_info['system_id'], TABLE_SESSION) G.LOCAL_DB.set_value('drm_security_level', drm_info['security_level'], TABLE_SESSION) G.LOCAL_DB.set_value('drm_hdcp_level', drm_info['hdcp_level'], TABLE_SESSION) LOG.debug('Widevine version: {}', drm_info['version']) if drm_info['system_id']: LOG.debug('Widevine CryptoSession system id: {}', drm_info['system_id']) else: LOG.warn('Widevine CryptoSession system id not obtained!') LOG.debug('Widevine CryptoSession security level: {}', drm_info['security_level']) if G.ADDON.getSettingBool('force_widevine_l3'): LOG.warn( 'Widevine security level is forced to L3 by user settings!') LOG.debug('Widevine CryptoSession current hdcp level: {}', drm_info['hdcp_level']) LOG.debug('Widevine CryptoSession max hdcp level supported: {}', drm_info['hdcp_level_max']) LOG.debug('Widevine CryptoSession algorithms: {}', self.crypto_session.GetPropertyString('algorithms'))
def select_port(service): """Select an unused port on the host machine for a server and store it in the settings""" port = select_unused_port() G.LOCAL_DB.set_value('{}_service_port'.format(service.lower()), port) LOG.info('[{}] Picked Port: {}'.format(service, port)) return port
def call_initialize(self, data): """ Initialize the manager with data when the addon initiates a playback. """ self._call_if_enabled(self.initialize, data=data) LOG.debug('Initialized {}: {}', self.name, self)
def _play(videoid, is_played_from_strm=False): """Play an episode or movie as specified by the path""" is_upnext_enabled = G.ADDON.getSettingBool('UpNextNotifier_enabled') LOG.info('Playing {}{}{}', videoid, ' [STRM file]' if is_played_from_strm else '', ' [external call]' if G.IS_ADDON_EXTERNAL_CALL else '') # Profile switch when playing from a STRM file (library) if is_played_from_strm: if not _profile_switch(): xbmcplugin.endOfDirectory(G.PLUGIN_HANDLE, succeeded=False) return # Get metadata of videoid try: metadata = api.get_metadata(videoid) LOG.debug('Metadata is {}', metadata) except MetadataNotAvailable: LOG.warn('Metadata not available for {}', videoid) metadata = [{}, {}] # Check parental control PIN pin_result = _verify_pin(metadata[0].get('requiresPin', False)) if not pin_result: if pin_result is not None: ui.show_notification(common.get_local_string(30106), time=8000) xbmcplugin.endOfDirectory(G.PLUGIN_HANDLE, succeeded=False) return # Generate the xbmcgui.ListItem to be played list_item = get_inputstream_listitem(videoid) # STRM file resume workaround (Kodi library) resume_position = _strm_resume_workaroud(is_played_from_strm, videoid) if resume_position == '': xbmcplugin.setResolvedUrl(handle=G.PLUGIN_HANDLE, succeeded=False, listitem=list_item) return info_data = None event_data = {} videoid_next_episode = None # Get Infolabels and Arts for the videoid to be played, and for the next video if it is an episode (for UpNext) if is_played_from_strm or is_upnext_enabled or G.IS_ADDON_EXTERNAL_CALL: if is_upnext_enabled and videoid.mediatype == common.VideoId.EPISODE: # When UpNext is enabled, get the next episode to play videoid_next_episode = _upnext_get_next_episode_videoid( videoid, metadata) info_data = infolabels.get_info_from_netflix( [videoid, videoid_next_episode] if videoid_next_episode else [videoid]) info, arts = info_data[videoid.value] # When a item is played from Kodi library or Up Next add-on is needed set info and art to list_item list_item.setInfo('video', info) list_item.setArt(arts) # Get event data for videoid to be played (needed for sync of watched status with Netflix) if (G.ADDON.getSettingBool('ProgressManager_enabled') and videoid.mediatype in [common.VideoId.MOVIE, common.VideoId.EPISODE]): if not is_played_from_strm or is_played_from_strm and G.ADDON.getSettingBool( 'sync_watched_status_library'): event_data = _get_event_data(videoid) event_data['videoid'] = videoid.to_dict() event_data['is_played_by_library'] = is_played_from_strm if 'raspberrypi' in common.get_system_platform(): _raspberry_disable_omxplayer() # Start and initialize the action controller (see action_controller.py) LOG.debug('Sending initialization signal') common.send_signal(common.Signals.PLAYBACK_INITIATED, { 'videoid': videoid.to_dict(), 'videoid_next_episode': videoid_next_episode.to_dict() if videoid_next_episode else None, 'metadata': metadata, 'info_data': info_data, 'is_played_from_strm': is_played_from_strm, 'resume_position': resume_position, 'event_data': event_data }, non_blocking=True) # Send callback after send the initialization signal # to give a bit of more time to the action controller (see note in initialize_playback of action_controller.py) xbmcplugin.setResolvedUrl(handle=G.PLUGIN_HANDLE, succeeded=True, listitem=list_item)
def init_globals(self, argv, reinitialize_database=False, reload_settings=False): """Initialized globally used module variables. Needs to be called at start of each plugin instance!""" # IS_ADDON_FIRSTRUN: specifies if the add-on has been initialized for the first time # (reuseLanguageInvoker not used yet) self.IS_ADDON_FIRSTRUN = self.IS_ADDON_FIRSTRUN is None self.IS_ADDON_EXTERNAL_CALL = False # xbmcaddon.Addon must be created at every instance otherwise it does not read any new changes to the settings self.ADDON = xbmcaddon.Addon() self.URL = urlparse(argv[0]) self.REQUEST_PATH = G.py2_decode(unquote(self.URL[2][1:])) try: self.PARAM_STRING = argv[2][1:] except IndexError: self.PARAM_STRING = '' self.REQUEST_PARAMS = dict(parse_qsl(self.PARAM_STRING)) if self.IS_ADDON_FIRSTRUN: # Global variables that do not need to be generated at every instance self.ADDON_ID = self.py2_decode(self.ADDON.getAddonInfo('id')) self.PLUGIN = self.py2_decode(self.ADDON.getAddonInfo('name')) self.VERSION_RAW = self.py2_decode(self.ADDON.getAddonInfo('version')) self.VERSION = self.remove_ver_suffix(self.VERSION_RAW) self.ICON = self.py2_decode(self.ADDON.getAddonInfo('icon')) self.DEFAULT_FANART = self.py2_decode(self.ADDON.getAddonInfo('fanart')) self.ADDON_DATA_PATH = self.py2_decode(self.ADDON.getAddonInfo('path')) # Add-on folder self.DATA_PATH = self.py2_decode(self.ADDON.getAddonInfo('profile')) # Add-on user data folder self.CACHE_PATH = os.path.join(self.DATA_PATH, 'cache') self.COOKIE_PATH = os.path.join(self.DATA_PATH, 'COOKIE') try: self.PLUGIN_HANDLE = int(argv[1]) self.IS_SERVICE = False self.BASE_URL = '{scheme}://{netloc}'.format(scheme=self.URL[0], netloc=self.URL[1]) except IndexError: self.PLUGIN_HANDLE = 0 self.IS_SERVICE = True self.BASE_URL = '{scheme}://{netloc}'.format(scheme='plugin', netloc=self.ADDON_ID) from resources.lib.common.kodi_ops import GetKodiVersion self.KODI_VERSION = GetKodiVersion() # Add absolute paths of embedded py packages (packages not supplied by Kodi) packages_paths = [ os.path.join(self.ADDON_DATA_PATH, 'packages', 'mysql-connector-python') ] # On PY2 sys.path list can contains values as unicode type and string type at same time, # here we will add only unicode type so filter values by unicode. # This fixes comparison errors between str/unicode sys_path_filtered = [value for value in sys.path if isinstance(value, unicode)] for path in packages_paths: # packages_paths has unicode type values path = G.py2_decode(xbmc.translatePath(path)) if path not in sys_path_filtered: # Add embedded package path to python system directory # The "path" will add an unicode type to avoids problems with OS using symbolic characters sys.path.insert(0, path) # Initialize the log from resources.lib.utils.logging import LOG LOG.initialize(self.ADDON_ID, self.PLUGIN_HANDLE, self.ADDON.getSettingString('debug_log_level'), self.ADDON.getSettingBool('enable_timing')) self.IPC_OVER_HTTP = self.ADDON.getSettingBool('enable_ipc_over_http') self._init_database(self.IS_ADDON_FIRSTRUN or reinitialize_database) if self.IS_ADDON_FIRSTRUN or reload_settings: # Put here all the global variables that need to be updated on service side # when the user changes the add-on settings if self.IS_SERVICE: # Initialize the cache if reload_settings: self.CACHE_MANAGEMENT.load_ttl_values() else: from resources.lib.services.cache.cache_management import CacheManagement self.CACHE_MANAGEMENT = CacheManagement() # Reset the "settings monitor" of the service in case of add-on crash self.settings_monitor_suspend(False) from resources.lib.common.cache import Cache self.CACHE = Cache()
def compare_mastertoken(self, mastertoken): """Check if the new MasterToken is different from current due to renew""" if not self._mastertoken_is_newer_that(mastertoken): LOG.debug('MSL mastertoken is changed due to renew') self.set_mastertoken(mastertoken) self._save_msl_data()
def _get_manifest(self, viewable_id, esn): cache_identifier = f'{esn}_{viewable_id}' try: # The manifest must be requested once and maintained for its entire duration manifest = G.CACHE.get(CACHE_MANIFESTS, cache_identifier) expiration = int(manifest['expiration'] / 1000) if (expiration - time.time()) < 14400: # Some devices remain active even longer than 48 hours, if the manifest is at the limit of the deadline # when requested by am_stream_continuity.py / events_handler.py will cause problems # if it is already expired, so we guarantee a minimum of safety ttl of 4h (14400s = 4 hours) raise CacheMiss() if LOG.is_enabled: LOG.debug('Manifest for {} obtained from the cache', viewable_id) # Save the manifest to disk as reference common.save_file_def('manifest.json', json.dumps(manifest).encode('utf-8')) return manifest except CacheMiss: pass isa_addon = xbmcaddon.Addon('inputstream.adaptive') hdcp_override = isa_addon.getSettingBool('HDCPOVERRIDE') hdcp_4k_capable = common.is_device_4k_capable( ) or G.ADDON.getSettingBool('enable_force_hdcp') hdcp_version = [] if not hdcp_4k_capable and hdcp_override: hdcp_version = ['1.4'] if hdcp_4k_capable and hdcp_override: hdcp_version = ['2.2'] LOG.info('Requesting manifest for {} with ESN {} and HDCP {}', viewable_id, common.censure(esn) if len(esn) > 50 else esn, hdcp_version) profiles = enabled_profiles() from pprint import pformat LOG.info('Requested profiles:\n{}', pformat(profiles, indent=2)) params = { 'type': 'standard', 'viewableId': [viewable_id], 'profiles': profiles, 'flavor': 'PRE_FETCH', 'drmType': 'widevine', 'drmVersion': 25, 'usePsshBox': True, 'isBranching': False, 'isNonMember': False, 'isUIAutoPlay': False, 'useHttpsStreams': True, 'imageSubtitleHeight': 1080, 'uiVersion': G.LOCAL_DB.get_value('ui_version', '', table=TABLE_SESSION), 'uiPlatform': 'SHAKTI', 'clientVersion': G.LOCAL_DB.get_value('client_version', '', table=TABLE_SESSION), 'desiredVmaf': 'plus_lts', # phone_plus_exp can be used to mobile, not tested 'supportsPreReleasePin': True, 'supportsWatermark': True, 'supportsUnequalizedDownloadables': True, 'showAllSubDubTracks': False, 'titleSpecificData': { str(viewable_id): { 'unletterboxed': True } }, 'videoOutputInfo': [{ 'type': 'DigitalVideoOutputDescriptor', 'outputType': 'unknown', 'supportedHdcpVersions': hdcp_version, 'isHdcpEngaged': hdcp_override }], 'preferAssistiveAudio': False } if 'linux' in common.get_system_platform( ) and 'arm' in common.get_machine(): # 24/06/2020 To get until to 1080P resolutions under arm devices (ChromeOS), android excluded, # is mandatory to add the widevine challenge data (key request) to the manifest request. # Is not possible get the key request from the default_crypto, is needed to implement # the wv crypto (used for android) but currently InputStreamAdaptive support this interface only # under android OS. # As workaround: Initially we pass an hardcoded challenge data needed to play the first video, # then when ISA perform the license callback we replace it with the fresh license challenge data. params['challenge'] = self.manifest_challenge endpoint_url = ENDPOINTS['manifest'] + create_req_params( 0, 'prefetch/manifest') manifest = self.msl_requests.chunked_request( endpoint_url, self.msl_requests.build_request_data('/manifest', params), esn, disable_msl_switch=False) if LOG.is_enabled: # Save the manifest to disk as reference common.save_file_def('manifest.json', json.dumps(manifest).encode('utf-8')) # Save the manifest to the cache to retrieve it during its validity expiration = int(manifest['expiration'] / 1000) G.CACHE.add(CACHE_MANIFESTS, cache_identifier, manifest, expires=expiration) return manifest
def _get_video_raw_data(self, videoids): """Retrieve raw data for specified video id's""" video_ids = [int(videoid.value) for videoid in videoids] LOG.debug('Requesting video raw data for {}', video_ids) return self.nfsession.path_request( build_paths(['videos', video_ids], EVENT_PATHS))
def _save_changed_stream(self, stype, stream): LOG.debug('Save changed stream {} for {}', stream, stype) self.sc_settings[stype] = stream G.SHARED_DB.set_stream_continuity(G.LOCAL_DB.get_active_profile_guid(), self.videoid_parent.value, self.sc_settings)
def _on_change(self): LOG.debug( 'SettingsMonitor: settings have been changed, started checks') reboot_addon = False clean_cache = False use_mysql = G.ADDON.getSettingBool('use_mysql') use_mysql_old = G.LOCAL_DB.get_value('use_mysql', False, TABLE_SETTINGS_MONITOR) use_mysql_turned_on = use_mysql and not use_mysql_old LOG.debug('SettingsMonitor: Reloading global settings') G.init_globals(sys.argv, reinitialize_database=use_mysql != use_mysql_old, reload_settings=True) # Check the MySQL connection status after reinitialization of service global settings use_mysql_after = G.ADDON.getSettingBool('use_mysql') if use_mysql_turned_on and use_mysql_after: G.LOCAL_DB.set_value('use_mysql', True, TABLE_SETTINGS_MONITOR) ui.show_notification(G.ADDON.getLocalizedString(30202)) if not use_mysql_after and use_mysql_old: G.LOCAL_DB.set_value('use_mysql', False, TABLE_SETTINGS_MONITOR) _esn_checks() # Check menu settings changes for menu_id, menu_data in iteritems(G.MAIN_MENU_ITEMS): # Check settings changes in show/hide menu if menu_data.get('has_show_setting', True): show_menu_new_setting = bool( G.ADDON.getSettingBool('_'.join(('show_menu', menu_id)))) show_menu_old_setting = G.LOCAL_DB.get_value( 'menu_{}_show'.format(menu_id), True, TABLE_SETTINGS_MONITOR) if show_menu_new_setting != show_menu_old_setting: G.LOCAL_DB.set_value('menu_{}_show'.format(menu_id), show_menu_new_setting, TABLE_SETTINGS_MONITOR) reboot_addon = True # Check settings changes in sort order of menu if menu_data.get('has_sort_setting'): menu_sortorder_new_setting = int( G.ADDON.getSettingInt('menu_sortorder_' + menu_data['path'][1])) menu_sortorder_old_setting = G.LOCAL_DB.get_value( 'menu_{}_sortorder'.format(menu_id), 0, TABLE_SETTINGS_MONITOR) if menu_sortorder_new_setting != menu_sortorder_old_setting: G.LOCAL_DB.set_value('menu_{}_sortorder'.format(menu_id), menu_sortorder_new_setting, TABLE_SETTINGS_MONITOR) # We remove the cache to allow get the new results in the chosen order G.CACHE.clear([CACHE_COMMON, CACHE_MYLIST, CACHE_SEARCH]) # Check changes on content profiles # This is necessary because it is possible that some manifests # could be cached using the previous settings (see msl_handler - load_manifest) menu_keys = [ 'enable_dolby_sound', 'enable_vp9_profiles', 'enable_hevc_profiles', 'enable_hdr_profiles', 'enable_dolbyvision_profiles', 'enable_force_hdcp', 'disable_webvtt_subtitle' ] collect_int = '' for menu_key in menu_keys: collect_int += unicode(int(G.ADDON.getSettingBool(menu_key))) collect_int_old = G.LOCAL_DB.get_value('content_profiles_int', '', TABLE_SETTINGS_MONITOR) if collect_int != collect_int_old: G.LOCAL_DB.set_value('content_profiles_int', collect_int, TABLE_SETTINGS_MONITOR) G.CACHE.clear([CACHE_MANIFESTS]) # Check if Progress Manager settings is changed progress_manager_enabled = G.ADDON.getSettingBool( 'ProgressManager_enabled') progress_manager_enabled_old = G.LOCAL_DB.get_value( 'progress_manager_enabled', False, TABLE_SETTINGS_MONITOR) if progress_manager_enabled != progress_manager_enabled_old: G.LOCAL_DB.set_value('progress_manager_enabled', progress_manager_enabled, TABLE_SETTINGS_MONITOR) common.send_signal(signal=common.Signals.SWITCH_EVENTS_HANDLER, data=progress_manager_enabled) # Avoid perform these operations when the add-on is installed from scratch and there are no credentials if (clean_cache or reboot_addon) and not common.check_credentials(): reboot_addon = False if reboot_addon: LOG.debug('SettingsMonitor: addon will be rebooted') # Open root page common.container_update( common.build_url(['root'], mode=G.MODE_DIRECTORY))
def create_database(config): """Create a new database""" db_name = config.pop('database', None) LOG.debug('The MySQL database {} does not exist, creating a new one', db_name) conn = mysql.connector.connect(**config) cur = conn.cursor() schema = ('CREATE DATABASE netflix_addon ' 'CHARACTER SET utf8mb4 ' 'COLLATE utf8mb4_unicode_ci;') cur.execute(schema) table = ( 'CREATE TABLE netflix_addon.profiles (' 'ID INT(11) NOT NULL AUTO_INCREMENT,' 'Guid VARCHAR(50) NOT NULL,' 'SortOrder INT(11) NOT NULL,' 'PRIMARY KEY (ID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') alter_tbl = ('ALTER TABLE netflix_addon.profiles ' 'ADD UNIQUE INDEX Guid(Guid);') cur.execute(table) cur.execute(alter_tbl) table = ( 'CREATE TABLE netflix_addon.shared_app_config (' 'ID INT(11) NOT NULL AUTO_INCREMENT,' 'Name VARCHAR(100) NOT NULL,' 'Value TEXT DEFAULT NULL,' 'PRIMARY KEY (ID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') alter_tbl = ('ALTER TABLE netflix_addon.shared_app_config ' 'ADD UNIQUE INDEX Name_UNIQUE(Name);') cur.execute(table) cur.execute(alter_tbl) table = ( 'CREATE TABLE netflix_addon.stream_continuity (' 'ProfileGuid VARCHAR(50) NOT NULL,' 'VideoID INT(11) NOT NULL,' 'Value TEXT DEFAULT NULL,' 'DateLastModified VARCHAR(50) NOT NULL,' 'PRIMARY KEY (ProfileGuid, VideoID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') alter_tbl = ( 'ALTER TABLE netflix_addon.stream_continuity ' 'ADD CONSTRAINT FK_streamcontinuity_ProfileGuid FOREIGN KEY (ProfileGuid)' 'REFERENCES netflix_addon.profiles(Guid) ON DELETE CASCADE ON UPDATE CASCADE;' ) cur.execute(table) cur.execute(alter_tbl) table = ( 'CREATE TABLE netflix_addon.video_lib_episodes (' 'EpisodeID INT(11) NOT NULL,' 'SeasonID INT(11) NOT NULL,' 'FilePath TEXT DEFAULT NULL,' 'PRIMARY KEY (EpisodeID, SeasonID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') cur.execute(table) table = ( 'CREATE TABLE netflix_addon.video_lib_movies (' 'MovieID INT(11) NOT NULL,' 'FilePath TEXT DEFAULT NULL,' 'NfoExport VARCHAR(5) NOT NULL DEFAULT \'False\',' 'PRIMARY KEY (MovieID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') cur.execute(table) table = ( 'CREATE TABLE netflix_addon.video_lib_seasons (' 'TvShowID INT(11) NOT NULL,' 'SeasonID INT(11) NOT NULL,' 'PRIMARY KEY (TvShowID, SeasonID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') cur.execute(table) table = ( 'CREATE TABLE netflix_addon.video_lib_tvshows (' 'TvShowID INT(11) NOT NULL,' 'ExcludeUpdate VARCHAR(5) NOT NULL DEFAULT \'False\',' 'NfoExport VARCHAR(5) NOT NULL DEFAULT \'False\',' 'PRIMARY KEY (TvShowID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') alter_tbl = ('ALTER TABLE netflix_addon.video_lib_tvshows ' 'ADD UNIQUE INDEX UK_videolibtvshows_TvShowID(TvShowID);') cur.execute(table) cur.execute(alter_tbl) table = ( 'CREATE TABLE netflix_addon.watched_status_override (' 'ProfileGuid VARCHAR(50) NOT NULL,' 'VideoID INT(11) NOT NULL,' 'Value TEXT DEFAULT NULL,' 'PRIMARY KEY (ProfileGuid, VideoID))' 'ENGINE = INNODB, CHARACTER SET utf8mb4, COLLATE utf8mb4_unicode_ci;') alter_tbl = ( 'ALTER TABLE netflix_addon.watched_status_override ' 'ADD CONSTRAINT FK_watchedstatusoverride_ProfileGuid FOREIGN KEY (ProfileGuid)' 'REFERENCES netflix_addon.profiles(Guid) ON DELETE CASCADE ON UPDATE CASCADE;' ) cur.execute(table) cur.execute(alter_tbl) if conn and conn.is_connected(): conn.close()
def _request(self, method, endpoint, session_refreshed, **kwargs): endpoint_conf = ENDPOINTS[endpoint] url = (_api_url(endpoint_conf['address']) if endpoint_conf['is_api_call'] else _document_url( endpoint_conf['address'], kwargs)) data, headers, params = self._prepare_request_properties( endpoint_conf, kwargs) retry = 1 while True: try: LOG.debug('Executing {verb} request to {url}', verb='GET' if method == self.session.get else 'POST', url=url) start = time.perf_counter() response = method(url=url, verify=self.verify_ssl, headers=headers, params=params, data=data, timeout=8) LOG.debug('Request took {}s', time.perf_counter() - start) LOG.debug('Request returned status code {}', response.status_code) break except req_exceptions.ConnectionError as exc: LOG.error('HTTP request error: {}', exc) if retry == 3: raise retry += 1 LOG.warn('Another attempt will be performed ({})', retry) # for redirect in response.history: # LOG.warn('Redirected to: [{}] {}', redirect.status_code, redirect.url) if not session_refreshed: # We refresh the session when happen: # Error 404: It happen when Netflix update the build_identifier version and causes the api address to change # Error 401: This is a generic error, can happen when the http request for some reason has failed, # we allow the refresh only for shakti endpoint, sometimes for unknown reasons it is necessary to update # the session for the request to be successful if response.status_code == 404 or (response.status_code == 401 and endpoint == 'shakti'): LOG.warn('Attempt to refresh the session due to HTTP error {}', response.status_code) if self.try_refresh_session_data(): return self._request(method, endpoint, True, **kwargs) if response.status_code == 401: raise HttpError401 response.raise_for_status() return (_raise_api_error(response.json() if response.content else {}) if endpoint_conf['is_api_call'] else response.content)
def _on_change(self): LOG.debug( 'SettingsMonitor: settings have been changed, started checks') reboot_addon = False clean_cache = False use_mysql = G.ADDON.getSettingBool('use_mysql') use_mysql_old = G.LOCAL_DB.get_value('use_mysql', False, TABLE_SETTINGS_MONITOR) use_mysql_turned_on = use_mysql and not use_mysql_old LOG.debug('SettingsMonitor: Reloading global settings') G.init_globals(sys.argv, reinitialize_database=use_mysql != use_mysql_old, reload_settings=True) # Check the MySQL connection status after reinitialization of service global settings use_mysql_after = G.ADDON.getSettingBool('use_mysql') if use_mysql_turned_on and use_mysql_after: G.LOCAL_DB.set_value('use_mysql', True, TABLE_SETTINGS_MONITOR) ui.show_notification(G.ADDON.getLocalizedString(30202)) if not use_mysql_after and use_mysql_old: G.LOCAL_DB.set_value('use_mysql', False, TABLE_SETTINGS_MONITOR) _check_esn() # Check menu settings changes for menu_id, menu_data in iteritems(G.MAIN_MENU_ITEMS): # Check settings changes in show/hide menu if menu_data.get('has_show_setting', True): show_menu_new_setting = bool( G.ADDON.getSettingBool('_'.join(('show_menu', menu_id)))) show_menu_old_setting = G.LOCAL_DB.get_value( 'menu_{}_show'.format(menu_id), True, TABLE_SETTINGS_MONITOR) if show_menu_new_setting != show_menu_old_setting: G.LOCAL_DB.set_value('menu_{}_show'.format(menu_id), show_menu_new_setting, TABLE_SETTINGS_MONITOR) reboot_addon = True # Check settings changes in sort order of menu if menu_data.get('has_sort_setting'): menu_sortorder_new_setting = int( G.ADDON.getSettingInt('menu_sortorder_' + menu_data['path'][1])) menu_sortorder_old_setting = G.LOCAL_DB.get_value( 'menu_{}_sortorder'.format(menu_id), 0, TABLE_SETTINGS_MONITOR) if menu_sortorder_new_setting != menu_sortorder_old_setting: G.LOCAL_DB.set_value('menu_{}_sortorder'.format(menu_id), menu_sortorder_new_setting, TABLE_SETTINGS_MONITOR) clean_cache = True # Checks for settings changes that require cache invalidation if not clean_cache: page_results = G.ADDON.getSettingInt('page_results') page_results_old = G.LOCAL_DB.get_value('page_results', 90, TABLE_SETTINGS_MONITOR) if page_results != page_results_old: G.LOCAL_DB.set_value('page_results', page_results, TABLE_SETTINGS_MONITOR) clean_cache = True _check_msl_profiles() _check_watched_status_sync() if clean_cache: # We remove the cache to allow get the new results with the new settings G.CACHE.clear([CACHE_COMMON, CACHE_MYLIST, CACHE_SEARCH]) # Avoid perform these operations when the add-on is installed from scratch and there are no credentials if reboot_addon and not common.check_credentials(): reboot_addon = False if reboot_addon: LOG.debug('SettingsMonitor: addon will be rebooted') # Open root page common.container_update( common.build_url(['root'], mode=G.MODE_DIRECTORY))
def __init__(self, server_address): """Initialization of CacheTCPServer""" LOG.info('Constructing CacheTCPServer') TCPServer.__init__(self, server_address, NetflixHttpRequestHandler)
def search_remove(row_id): """Remove a search item""" LOG.debug('Removing search item with ID {}', row_id) G.LOCAL_DB.delete_search_item(row_id) common.json_rpc('Input.Down') # Avoids selection back to the top common.container_refresh()
def req_subgenres(self, genre_id): """Retrieve sub-genres for the given genre""" LOG.debug('Requesting sub-genres of the genre {}', genre_id) path = [['genres', genre_id, 'subgenres', {'from': 0, 'to': 47}, ['id', 'name']]] path_response = self.nfsession.path_request(path) return SubgenreList(path_response)
def _on_change(self): # Reinitialize the log settings LOG.initialize(G.ADDON_ID, G.PLUGIN_HANDLE, G.ADDON.getSettingString('debug_log_level'), G.ADDON.getSettingBool('enable_timing')) LOG.debug( 'SettingsMonitor: settings have been changed, started checks') reboot_addon = False clean_buckets = [] use_mysql = G.ADDON.getSettingBool('use_mysql') use_mysql_old = G.LOCAL_DB.get_value('use_mysql', False, TABLE_SETTINGS_MONITOR) use_mysql_turned_on = use_mysql and not use_mysql_old # Update global settings G.IPC_OVER_HTTP = G.ADDON.getSettingBool('enable_ipc_over_http') if use_mysql != use_mysql_old: G.init_database() clean_buckets.append( CACHE_COMMON ) # Need to be cleaned to reload the Exported menu content G.CACHE_MANAGEMENT.load_ttl_values() # Verify the MySQL connection status after execute init_database() use_mysql_after = G.ADDON.getSettingBool('use_mysql') if use_mysql_turned_on and use_mysql_after: G.LOCAL_DB.set_value('use_mysql', True, TABLE_SETTINGS_MONITOR) ui.show_notification(G.ADDON.getLocalizedString(30202)) if not use_mysql_after and use_mysql_old: G.LOCAL_DB.set_value('use_mysql', False, TABLE_SETTINGS_MONITOR) # Check menu settings changes for menu_id, menu_data in G.MAIN_MENU_ITEMS.items(): # Check settings changes in show/hide menu if menu_data.get('has_show_setting', True): show_menu_new_setting = bool( G.ADDON.getSettingBool('_'.join(('show_menu', menu_id)))) show_menu_old_setting = G.LOCAL_DB.get_value( 'menu_{}_show'.format(menu_id), True, TABLE_SETTINGS_MONITOR) if show_menu_new_setting != show_menu_old_setting: G.LOCAL_DB.set_value('menu_{}_show'.format(menu_id), show_menu_new_setting, TABLE_SETTINGS_MONITOR) reboot_addon = True # Check settings changes in sort order of menu if menu_data.get('has_sort_setting'): menu_sortorder_new_setting = int( G.ADDON.getSettingInt('menu_sortorder_' + menu_data['path'][1])) menu_sortorder_old_setting = G.LOCAL_DB.get_value( 'menu_{}_sortorder'.format(menu_id), 0, TABLE_SETTINGS_MONITOR) if menu_sortorder_new_setting != menu_sortorder_old_setting: G.LOCAL_DB.set_value('menu_{}_sortorder'.format(menu_id), menu_sortorder_new_setting, TABLE_SETTINGS_MONITOR) clean_buckets += [CACHE_COMMON, CACHE_MYLIST, CACHE_SEARCH] # Checks for settings changes that require cache invalidation page_results = G.ADDON.getSettingInt('page_results') page_results_old = G.LOCAL_DB.get_value('page_results', 90, TABLE_SETTINGS_MONITOR) if page_results != page_results_old: G.LOCAL_DB.set_value('page_results', page_results, TABLE_SETTINGS_MONITOR) clean_buckets += [CACHE_COMMON, CACHE_MYLIST, CACHE_SEARCH] _check_msl_profiles(clean_buckets) _check_watched_status_sync() # Clean cache buckets if needed (to get new results and so on...) if clean_buckets: G.CACHE.clear([ dict(t) for t in {tuple(d.items()) for d in clean_buckets} ]) # Remove duplicates # Avoid perform these operations when the add-on is installed from scratch and there are no credentials if reboot_addon and not common.check_credentials(): reboot_addon = False if reboot_addon: LOG.debug('SettingsMonitor: addon will be rebooted') # Open root page common.container_update( common.build_url(['root'], mode=G.MODE_DIRECTORY))
def __init__(self, params): LOG.debug('Initializing "KeymapsActionExecutor" with params: {}', params) self.params = params
def try_refresh_session_data(self, raise_exception=False): """Refresh session data from the Netflix website""" from requests import exceptions try: self.auth_url = website.extract_session_data(self.get('browse'))['auth_url'] cookies.save(self.session.cookies) LOG.debug('Successfully refreshed session data') return True except MbrStatusError: raise except (WebsiteParsingError, MbrStatusAnonymousError) as exc: import traceback LOG.warn('Failed to refresh session data, login can be expired or the password has been changed ({})', type(exc).__name__) LOG.debug(G.py2_decode(traceback.format_exc(), 'latin-1')) self.session.cookies.clear() if isinstance(exc, MbrStatusAnonymousError): # This prevent the MSL error: No entity association record found for the user common.send_signal(signal=common.Signals.CLEAR_USER_ID_TOKENS) # Needed to do a new login common.purge_credentials() ui.show_notification(common.get_local_string(30008)) raise_from(NotLoggedInError, exc) except exceptions.RequestException: import traceback LOG.warn('Failed to refresh session data, request error (RequestException)') LOG.warn(G.py2_decode(traceback.format_exc(), 'latin-1')) if raise_exception: raise except Exception: # pylint: disable=broad-except import traceback LOG.warn('Failed to refresh session data, login expired (Exception)') LOG.debug(G.py2_decode(traceback.format_exc(), 'latin-1')) self.session.cookies.clear() if raise_exception: raise return False
def initialize(self, data): if not data['event_data']: LOG.warn('AMVideoEvents: disabled due to no event data') self.enabled = False return self.event_data = data['event_data']
def unregister_slot(callback, signal=None): """Remove a registered callback from AddonSignals""" name = signal if signal else callback.__name__ AddonSignals.unRegisterSlot(signaler_id=G.ADDON_ID, signal=name) LOG.debug('Unregistered AddonSignals slot {}'.format(name))
def _skip_section(self, section): LOG.debug('Entered section {}', section) if self.auto_skip: self._auto_skip(section) else: self._ask_to_skip(section)
def _notify_all(self, notification, data=None): LOG.debug('Notifying all action managers of {} (data={})', notification.__name__, data) for manager in self.action_managers: _notify_managers(manager, notification, data)
def create_database(db_file_path, db_filename): LOG.debug('The SQLite database {} is empty, creating tables', db_filename) if db_utils.LOCAL_DB_FILENAME == db_filename: _create_local_database(db_file_path) if db_utils.SHARED_DB_FILENAME == db_filename: _create_shared_database(db_file_path)
def onNotification(self, sender, method, data): # pylint: disable=unused-argument,too-many-branches """ Callback for Kodi notifications that handles and dispatches playback events """ # WARNING: Do not get playerid from 'data', # Because when Up Next add-on play a video while we are inside Netflix add-on and # not externally like Kodi library, the playerid become -1 this id does not exist if not self.is_tracking_enabled or not method.startswith('Player.'): return try: if method == 'Player.OnPlay': if self.init_count > 0: # In this case the user has chosen to play another video while another one is in playing, # then we send the missing Stop event for the current video self._on_playback_stopped() self._initialize_am() elif method == 'Player.OnAVStart': self._on_playback_started() if self._playback_tick is None or not self._playback_tick.is_alive( ): self._playback_tick = PlaybackTick(self.on_playback_tick) self._playback_tick.setDaemon(True) self._playback_tick.start() elif method == 'Player.OnSeek': self._on_playback_seek(json.loads(data)['player']['time']) elif method == 'Player.OnPause': self._is_pause_called = True self._on_playback_pause() elif method == 'Player.OnResume': # Kodi call this event instead the "Player.OnStop" event when you try to play a video # while another one is in playing (also if the current video is in pause) (not happen on RPI devices) # Can be one of following cases: # - When you use ctx menu "Play From Here", this happen when click to next button # - When you use UpNext add-on # - When you play a non-Netflix video when a Netflix video is in playback in background # - When you play a video over another in playback (back in menus) if not self._is_pause_called: return if self.init_count == 0: # This should never happen, we have to avoid this event when you try to play a video # while another non-netflix video is in playing return self._is_pause_called = False self._on_playback_resume() elif method == 'Player.OnStop': self.is_tracking_enabled = False if self.active_player_id is None: # if playback does not start due to an error in streams initialization # OnAVStart notification will not be called, then active_player_id will be None LOG.debug( 'ActionController: Player.OnStop event has been ignored' ) LOG.warn( 'ActionController: Action managers disabled due to a playback initialization error' ) self.action_managers = None self.init_count -= 1 return self._on_playback_stopped() except Exception: # pylint: disable=broad-except import traceback LOG.error(traceback.format_exc()) self.is_tracking_enabled = False if self._playback_tick and self._playback_tick.is_alive(): self._playback_tick.stop_join() self._playback_tick = None self.init_count = 0
def profiles(self, pathitems=None): # pylint: disable=unused-argument """Show profiles listing""" LOG.debug('Showing profiles listing') list_data, extra_data = common.make_call('get_profiles', {'request_update': True}) self._profiles(list_data, extra_data)
def on_playback_started(self, player_state): # pylint: disable=unused-argument LOG.debug('Sending initialization signal to Up Next Add-on') common.send_signal(common.Signals.UPNEXT_ADDON_INIT, self.upnext_info, non_blocking=True)
def delete(): """Delete cookies for an account from the disk""" try: xbmcvfs.delete(cookie_file_path()) except Exception as exc: # pylint: disable=broad-except LOG.error('Failed to delete cookies on disk: {exc}', exc=exc)
def remove_videoid_from_kodi_library(videoid): """Remove an item from the Kodi library database (not related files)""" try: # Get a single file result by searching by videoid kodi_library_items = [get_library_item_by_videoid(videoid)] LOG.debug( 'Removing {} ({}) from Kodi library', videoid, kodi_library_items[0].get('showtitle', kodi_library_items[0]['title'])) media_type = videoid.mediatype if videoid.mediatype in [VideoId.SHOW, VideoId.SEASON]: # Retrieve the all episodes in the export folder tvshow_path = os.path.dirname(kodi_library_items[0]['file']) filters = { 'and': [{ 'field': 'path', 'operator': 'startswith', 'value': tvshow_path }, { 'field': 'filename', 'operator': 'endswith', 'value': '.strm' }] } if videoid.mediatype == VideoId.SEASON: # Use the single file result to figure out what the season is, # then add a season filter to get only the episodes of the specified season filters['and'].append({ 'field': 'season', 'operator': 'is', 'value': str(kodi_library_items[0]['season']) }) kodi_library_items = get_library_items(VideoId.EPISODE, filters) media_type = VideoId.EPISODE rpc_params = { 'movie': ['VideoLibrary.RemoveMovie', 'movieid'], # We should never remove an entire show # 'show': ['VideoLibrary.RemoveTVShow', 'tvshowid'], # Instead we delete all episodes listed in the JSON query above 'show': ['VideoLibrary.RemoveEpisode', 'episodeid'], 'season': ['VideoLibrary.RemoveEpisode', 'episodeid'], 'episode': ['VideoLibrary.RemoveEpisode', 'episodeid'] } list_rpc_params = [] # Collect multiple json-rpc commands for item in kodi_library_items: params = rpc_params[media_type] list_rpc_params.append({params[1]: item[params[1]]}) rpc_method = rpc_params[media_type][0] # Execute all the json-rpc commands in one call json_rpc_multi(rpc_method, list_rpc_params) except ItemNotFound: LOG.warn('Cannot remove {} from Kodi library, item not present', videoid) except KeyError as exc: from resources.lib.kodi import ui ui.show_notification(get_local_string(30120), time=7500) LOG.error( 'Cannot remove {} from Kodi library, mediatype not supported', exc)