def __init__(self): profile_path = Utils.unicode(KodiUtils.translate_path(KodiUtils.get_addon_info('profile'))) ini_path = os.path.join(profile_path, 'onedrive.ini') if os.path.exists(ini_path): config = ConfigParser() account_manager = AccountManager(profile_path) config.read(ini_path) for driveid in config.sections(): Logger.notice('Migrating drive %s...' % driveid) account = { 'id' : driveid, 'name' : config.get(driveid, 'name')} account['drives'] = [{ 'id' : driveid, 'name' : '', 'type' : 'migrated' }] account['access_tokens'] = { 'access_token': config.get(driveid, 'access_token'), 'refresh_token': config.get(driveid, 'refresh_token'), 'expires_in': 0, 'date': 0 } try: account_manager.add_account(account) except Exception as e: raise UIException(32021, e) os.remove(ini_path) KodiUtils.set_addon_setting('migrated', 'true')
def start(self): port = self.get_port() KodiUtils.set_service_port(self.name, port) self._server = BaseServer((self._interface, port), self._handler, self, self.data) Logger.notice('Service \'%s\' started in port %s' % (self.name, port)) self._server.serve_forever()
def route(self): try: Logger.debug(self._addon_params) self._action = Utils.get_safe_value(self._addon_params, 'action') if self._action: self._rename_action() method = getattr(self, self._action) arguments = {} for name in inspect.getargspec(method)[0]: if name in self._addon_params: arguments[name] = self._addon_params[name] method(**arguments) else: self.list_accounts() except Exception as ex: self._handle_exception(ex) finally: self._progress_dialog.close() self._progress_dialog_bg.close() self._export_progress_dialog_bg.close() if self._pin_dialog: self._pin_dialog.close() if self._exporting: self._export_manager = ExportManager(self._account_manager._addon_data_path) export = self._export_manager.load()[self._exporting] export['exporting'] = False self._export_manager.save()
def __init__(self, _base_path): self.exports_db = SimpleKeyValueDb(_base_path, 'exports') self.export_items_db = SimpleKeyValueDb(_base_path, 'export-items') # only if not migrated. ignore if fails to read. config_path = os.path.join(_base_path, 'exports.cfg') if os.path.exists(config_path): with KodiUtils.lock: try: with open(config_path, 'rb') as fo: exports = json.loads(fo.read()) for exportid in exports: self.exports_db.set(exportid, exports[exportid]) os.rename(config_path, config_path + '.migrated') except Exception as ex: Logger.debug("Error migrating exports.") Logger.debug(ex) os.rename(config_path, config_path + '.failed') for filename in os.listdir(_base_path): config_path = os.path.join(_base_path, filename) with KodiUtils.lock: try: if filename[:7] == "export-" and filename[-6:] == ".items": exportid = filename.split(".")[0].split("-")[1] Logger.debug(exportid) with open(config_path, 'rb') as fo: items_info = json.loads(fo.read()) self.export_items_db.set(exportid, items_info) os.rename(config_path, config_path + '.migrated') except Exception as ex: Logger.debug("Error migrating export items from %s" % filename) Logger.debug(ex) os.rename(config_path, config_path + '.failed')
def do_GET(self): Logger.debug(self.path) data = self.path.split('/') code = 307 headers = {} content = Utils.get_file_buffer() if len(data) > 4 and data[1] == self.server.service.name: try: driveid = data[2] provider = self.server.data() account_manager = AccountManager( self.server.service.profile_path) provider.configure(account_manager, driveid) item = provider.get_item(item_driveid=data[3], item_id=data[4], include_download_info=True) headers['location'] = item['download_info']['url'] except Exception as e: httpex = ExceptionUtils.extract_exception(e, HTTPError) if httpex: code = httpex.code else: code = 500 ErrorReport.handle_exception(e) content.write(ExceptionUtils.full_stacktrace(e)) else: code = 404 self.write_response(code, content=content, headers=headers)
def __init__(self): profile_path = Utils.unicode( KodiUtils.translate_path(KodiUtils.get_addon_info('profile'))) ini_path = os.path.join(profile_path, 'onedrive.ini') if os.path.exists(ini_path): config = ConfigParser() account_manager = AccountManager(profile_path) config.read(ini_path) for driveid in config.sections(): Logger.notice('Migrating drive %s...' % driveid) account = {'id': driveid, 'name': config.get(driveid, 'name')} account['drives'] = [{ 'id': driveid, 'name': '', 'type': 'migrated' }] account['access_tokens'] = { 'access_token': config.get(driveid, 'access_token'), 'refresh_token': config.get(driveid, 'refresh_token'), 'expires_in': 0, 'date': 0 } try: account_manager.add_account(account) except Exception as e: raise UIException(32021, e) os.remove(ini_path) KodiUtils.set_addon_setting('migrated', 'true')
def create_nfo(item_id, item_driveid, nfo_path, provider): url = provider.get_item( item_driveid=item_driveid, item_id=item_id, include_download_info=True)["download_info"]["url"] headers = { "Authorization": "Bearer %s" % provider.get_access_tokens()['access_token'] } try: response = Request(url, None, headers).request() except: Logger.error('Error on request to: %s' % url) return False f = None try: f = KodiUtils.file(nfo_path, 'w') f.write(response) except Exception as err: Logger.error(err) return False finally: if f: f.close() return True
def process_schedules(self, export_map, now, startup=False): Logger.debug('now: %s, startup: %s' % (Utils.str(now), Utils.str(startup))) export_list = [] if startup: export_list.extend( Utils.get_safe_value(export_map, self._startup_type, [])) else: key = 'run_immediately' run_immediately_list = Utils.get_safe_value(export_map, key, []) export_list.extend(run_immediately_list) at = '%02d:%02d' % ( now.hour, now.minute, ) Logger.debug('at: %s' % Utils.str(at)) daily_list = Utils.get_safe_value( export_map, Utils.str(ExportScheduleDialog._daily_type) + at, []) export_list.extend(daily_list) Logger.debug('daily_list: %s' % Utils.str(daily_list)) weekday = now.weekday() + 11 weekday_list = Utils.get_safe_value(export_map, Utils.str(weekday) + at, []) export_list.extend(weekday_list) Logger.debug('weekday_list: %s' % Utils.str(weekday_list)) Logger.debug('export_list: %s' % Utils.str(export_list)) for export in export_list: self.run_export(export)
def do_GET(self): Logger.debug(self.path) data = self.path.split('/') code = 307 headers = {} content = Utils.get_file_buffer() if len(data) > 5 and data[1] == self.server.service.name: try: item = RpcUtil.rpc(data[2], 'get_item', kwargs = { 'driveid' : data[3], 'item_driveid' : data[4], 'item_id' : data[5], 'include_download_info' : True }) headers['location'] = item['download_info']['url'] except Exception as e: httpex = ExceptionUtils.extract_exception(e, HTTPError) if httpex: code = httpex.code else: code = 500 content.write(ExceptionUtils.full_stacktrace(e)) else: code = 404 self.write_response(code, content=content, headers=headers)
def get_scheduled_export_map(self): exports = self.export_manager.get_exports() export_map = {} for exportid in exports: export = exports[exportid] schedules = Utils.get_safe_value(export, 'schedules', []) exporting = Utils.get_safe_value(export, 'exporting', False) if not exporting: if Utils.get_safe_value(export, 'schedule', False) and schedules: for schedule in schedules: key = Utils.str( Utils.get_safe_value(schedule, 'type', '')) if key != self._startup_type: key += Utils.get_safe_value(schedule, 'at', '') export_map[key] = Utils.get_safe_value( export_map, key, []) export_map[key].append(export) key = 'run_immediately' if Utils.get_safe_value(export, key, False): export_map[key] = Utils.get_safe_value(export_map, key, []) export_map[key].append(export) export[key] = False self.export_manager.save_export(export) Logger.debug('scheduled export_map: %s' % Utils.str(export_map)) return export_map
def _execute_sql(self, query, data=None): result = None con = self._get_connection() with con: retries = 0 error = None while retries < 15 and not self._abort: try: con.execute("delete from cache where expiration < ?", (self._get_datetime(datetime.datetime.now()),)) if isinstance(data, list): result = con.executemany(query, data).fetchone() elif data: result = con.execute(query, data).fetchone() else: result = con.execute(query).fetchone() break except sqlite3.OperationalError as error: if "_database is locked" in error: retries += 1 Logger.debug("Cache query retrying #d [%s]: %s" % (retries, query, str(data),)) self._monitor.waitForAbort(0.3) else: break except Exception as error: break if error: Logger.debug("Error executing cache query [%s]: %s" % (query, str(error),)) con.close() return result
def do_POST(self): content = Utils.get_file_buffer() data = self.path.split('/') if len(data) > 1 and data[1] == self.server.service.name: try: size = int(self.headers.getheader('content-length', 0)) cmd = eval(self.rfile.read(size)) method = Utils.get_safe_value(cmd, 'method') if method: code = 200 args = Utils.get_safe_value(cmd, 'args', []) kwargs = Utils.get_safe_value(cmd, 'kwargs', {}) Logger.debug('Command received:\n%s' % cmd) content.write( repr(self.server.data.rpc(method, args, kwargs))) else: code = 400 content.write('Method required') except Exception as e: httpex = ExceptionUtils.extract_exception(e, HTTPError) if httpex: code = httpex.code else: code = 500 content.write(ExceptionUtils.full_stacktrace(e)) else: code = 404 self.write_response(code, content=content)
def process_path(self, addon_name, drive_name, path): headers = {} response = Utils.get_file_buffer() driveid = self.get_driveid(drive_name) if driveid: parts = self.path.split('/') if parts[len(parts) - 1]: response_code = 303 if path: key = '%s%s:children' % ( driveid, path[0:path.rfind('/')], ) Logger.debug('reading cache key: ' + key) children = self._children_cache.get(key) if not children and type(children) is NoneType: self.get_folder_items(driveid, path[0:path.rfind('/') + 1]) url = self.get_download_url(driveid, path) else: url = self.path + '/' headers['location'] = url else: response_code = 200 response.write(str(self.show_folder(driveid, path))) else: response_code = 404 response.write('Drive "%s" does not exist for addon "%s"' % (drive_name, addon_name)) return { 'response_code': response_code, 'content': response, 'headers': headers }
def _select_stream_format(self, driveid, item_driveid=None, item_id=None, auto=False): url = None if not auto: self._progress_dialog.update(0, self._addon.getLocalizedString(32009)) self._provider.configure(self._account_manager, driveid) self._provider.get_item(item_driveid, item_id) request = Request( 'https://drive.google.com/get_video_info', urllib.urlencode({'docid': item_id}), { 'authorization': 'Bearer %s' % self._provider.get_access_tokens()['access_token'] }) response_text = request.request() response_params = dict(urlparse.parse_qsl(response_text)) if not auto: self._progress_dialog.close() if Utils.get_safe_value(response_params, 'status', '') == 'ok': fmt_list = Utils.get_safe_value(response_params, 'fmt_list', '').split(',') stream_formats = [] for fmt in fmt_list: data = fmt.split('/') stream_formats.append(data[1]) stream_formats.append(self._addon.getLocalizedString(32015)) Logger.debug('Stream formats: %s' % Utils.str(stream_formats)) select = -1 if auto: select = self._auto_select_stream(stream_formats) else: select = self._dialog.select( self._addon.getLocalizedString(32016), stream_formats, 8000, 0) Logger.debug('Selected: %s' % Utils.str(select)) if select == -1: self._cancel_operation = True elif select != len(stream_formats) - 1: data = fmt_list[select].split('/') fmt_stream_map = Utils.get_safe_value(response_params, 'fmt_stream_map', '').split(',') for fmt in fmt_stream_map: stream_data = fmt.split('|') if stream_data[0] == data[0]: url = stream_data[1] break if url: cookie_header = '' for cookie in request.response_cookies: if cookie_header: cookie_header += ';' cookie_header += cookie.name + '=' + cookie.value url += '|cookie=' + urllib.quote(cookie_header) return url
def checkpoint(self): row = self._execute_sql("PRAGMA wal_checkpoint(TRUNCATE)") Logger.debug("Db '%s' checkpoint: #d, #d, #d" % ( self._name, row[0], row[1], row[2], ))
def fetch_tokens_info(self, pin_info, request_params={}): tokens_info = super(Dropbox, self).fetch_tokens_info(pin_info, request_params) if tokens_info: tokens_info['expires_in'] = 315360000 tokens_info['refresh_token'] = '-' Logger.notice(tokens_info) return tokens_info
def get_source_id(ip): data = ip.split('.') source_id = 0 Logger.debug("ip is:" + ip) for n in data: Logger.debug("part: " + n) source_id += int(n) return source_id
def start(self): Logger.notice('Service \'%s\' started.' % self.name) monitor = KodiUtils.get_system_monitor() while not self.abort: if monitor.waitForAbort(1): break del monitor del self.provider Logger.notice('Service stopped.')
def process_pending_changes(self, exportid, on_after_change=None, on_before_change=None): changes_done = [] pending_changes = self.export_manager.get_pending_changes(exportid) if pending_changes: export = self.export_manager.get_exports()[exportid] Logger.debug('*** Processing all changes for export "%s" in %s' % (export['name'], export['destination_folder'])) items_info = Utils.default( self.export_manager.get_items_info(exportid), {}) retry_changes = [] processed_changes = set() while len(pending_changes) > 0: change = pending_changes.popleft() change_id = change['id'] if change_id in processed_changes: continue processed_changes.add(change_id) if on_before_change: on_before_change(change, pending_changes, changes_done, retry_changes, export) change_type = self.process_change(change, items_info, export) self.export_manager.save_items_info(exportid, items_info) self.export_manager.save_pending_changes( exportid, pending_changes) is_retry = False if change_type: if change_type[-6:] == "_retry": is_retry = True retry_changes.append(change) Logger.debug('change marked for retry') else: changes_done.append(change) if change_type == 'create_folder' or ( change_type == 'create_folder_ignored' and Utils.get_safe_value(change, 'origin', '') == 'schedule'): before_add_item = lambda item: self.on_before_add_item( change, item) pending_changes.extendleft( self.get_folder_changes( export['driveid'], change, before_add_item)) self.export_manager.save_pending_changes( exportid, pending_changes) if on_after_change: on_after_change(change, change_type, pending_changes, changes_done, retry_changes, export) if is_retry: self.export_manager.save_retry_changes( exportid, deque(retry_changes)) return changes_done
def get_item(self, driveid, path): key = '%s%s' % (driveid, path,) Logger.debug('Testing item from cache: %s' % key) item = self._items_cache.get(key) if not item: provider = self._get_provider() provider.configure(self._account_manager, driveid) self.is_path_possible(driveid, path) item = provider.get_item(path=path, include_download_info = True) Logger.debug('Saving item in cache: %s' % key) self._items_cache.set(key, item) return item
def track_progress(self): Logger.debug('tracking progress started...') monitor = KodiUtils.get_system_monitor() while self.isPlaying(): KodiUtils.set_home_property('dbresume_position', Utils.str(self.getTime())) KodiUtils.set_home_property('dbresume_total', Utils.str(self.getTotalTime())) if monitor.waitForAbort(1): break del monitor Logger.debug('tracking progress finished')
def process_watch(self, export_map): exports = Utils.get_safe_value(export_map, 'watch', []) update_library = {} changes_by_drive = {} for export in exports: item_id = export['id'] driveid = export['driveid'] if driveid in changes_by_drive: changes = changes_by_drive[driveid] else: self.provider.configure(self._account_manager, export['driveid']) changes = self.provider.changes() changes_by_drive[driveid] = changes items_info = self.export_manager.get_items_info(item_id) if items_info: if changes and not Utils.get_safe_value( export, 'exporting', False): Logger.debug( '*** Processing changes for export "%s" in %s' % (export['name'], export['destination_folder'])) while True: changes_retry = [] changes_done = [] for change in changes: change_type = self.process_change( change, items_info, export) if change_type and change_type != 'retry': changes_done.append(change) self.export_manager.save_items_info( item_id, items_info) if Utils.get_safe_value( export, 'update_library', False): update_library[Utils.get_safe_value( export, 'content_type', 'None')] = True elif change_type and change_type == 'retry': changes_retry.append(change) for change in changes_done: changes_by_drive[driveid].remove(change) if changes_done and changes_retry: changes = changes_retry Logger.debug('Retrying pending changes...') else: break else: self.run_export(export) if update_library: if Utils.get_safe_value(update_library, 'video', False): KodiUtils.update_library('video') if Utils.get_safe_value(update_library, 'audio', False): KodiUtils.update_library('music')
def get_item(self, driveid, item_driveid=None, item_id=None, path=None, find_subtitles=False, include_download_info=False): self._provider.configure(self._account_manager, driveid) item_driveid = Utils.default(item_driveid, driveid) cache_key = self._addonid + '-drive-' + driveid + '-item_driveid-' + Utils.str( item_driveid) + '-item_id-' + Utils.str( item_id) + '-path-' + Utils.str(path) f = self._cache.get(cache_key) if not f: if item_id: path = item_id elif path == '/': path = '' self._parameters['path'] = path f = self._provider.post('/files/get_metadata', parameters=self._parameters, headers=self._headers) self._cache.set(cache_key, f, expiration=datetime.timedelta(seconds=59)) item = self._extract_item(f, driveid, include_download_info) if find_subtitles: subtitles = [] parent_path = Utils.get_parent_path(item['path_lower']) if parent_path == '/': parent_path = '' self._parameters['path'] = parent_path self._parameters['query'] = urllib.quote( Utils.remove_extension(item['name'])) self._parameters['mode'] = 'filename' del self._parameters['include_media_info'] files = self._provider.post('/files/search', parameters=self._parameters, headers=self._headers) for f in files['matches']: subtitle = self._extract_item(f['metadata'], driveid, include_download_info) if subtitle['name_extension'] == 'srt' or subtitle[ 'name_extension'] == 'sub' or subtitle[ 'name_extension'] == 'sbv': subtitles.append(subtitle) if subtitles: item['subtitles'] = subtitles Logger.notice(item) return item
def get_folder_items(self, driveid, path): provider = self._get_provider() provider.configure(self._account_manager, driveid) cache_path = path[:len(path)-1] request_path = cache_path if len(path) > 1 else path self.is_path_possible(driveid, request_path) key = '%s%s:items' % (driveid, cache_path,) items = self._items_cache.get(key) if not items and type(items) is NoneType: items = provider.get_folder_items(path=request_path, include_download_info=True) self._items_cache.set(key, items) children_names = [] cache_items = [] for item in items: quoted_name = urllib.quote(Utils.str(item['name'])) children_names.append(quoted_name) key = '%s%s%s' % (driveid, path, quoted_name,) Logger.debug('Adding item in cache for bulk: %s' % key) cache_items.append([key, item]) self._items_cache.setmany(cache_items) Logger.debug('Cache in bulk saved') key = '%s%s:children' % (driveid, cache_path,) Logger.debug('saving children names for: ' + key) self._children_cache.set(key, children_names) else: Logger.debug('items for %s served from cache' % path) return items
def _get_connection(self): profile_path = KodiUtils.get_addon_info("profile", self._addonid) if not KodiUtils.file_exists(profile_path): KodiUtils.mkdirs(profile_path) db = KodiUtils.translate_path("%s/cache_%s.db" % (profile_path, self._name,)) con = sqlite3.connect(db, timeout=30, isolation_level=None) con.execute('pragma journal_mode=wal;') rs = con.execute("select name from sqlite_master where type='table' AND name='cache'") if not rs.fetchone(): try: con.execute("create table cache(key text unique, value text, expiration integer)") except Exception as ex: Logger.debug(ex) return con
def saveProgress(self): dbid = KodiUtils.get_home_property('dbid') addonid = KodiUtils.get_home_property('addonid') if addonid and addonid == self.addonid: if dbid: Logger.debug('ok to save dbid: ' + dbid) dbtype = KodiUtils.get_home_property('dbtype') position = KodiUtils.get_home_property('dbresume_position') total = KodiUtils.get_home_property('dbresume_total') if dbtype and position and total: position = float(position) total = float(total) percent = position / total * 100 details = {} Logger.debug('position is %d of %d = %d percent' % (position, total, percent)) if percent >= 90: position = 0 total = 0 details['resume'] = {'position': 0, 'total': 0} details['lastplayed'] = KodiUtils.to_db_date_str( datetime.datetime.today()) details['playcount'] = int( KodiUtils.get_home_property('playcount')) + 1 elif position > 180: details['resume'] = { 'position': position, 'total': total } if details: Logger.debug( KodiUtils.save_video_details( dbtype, dbid, details)) Logger.debug('details saved to db - %s: %s' % (dbid, Utils.str(details)))
def _run_export(self, driveid, item_id=None): export_manager = ExportManager(self._account_manager._addon_data_path) export = export_manager.load()[item_id] Logger.debug('Running export:') Logger.debug(export) if Utils.get_safe_value(export, 'exporting', False): self._dialog.ok( self._addon_name, self._common_addon.getLocalizedString(32059) + ' ' + self._common_addon.getLocalizedString(32038)) else: export['exporting'] = True export_manager.save() export_folder = export['destination_folder'] if xbmcvfs.exists(export_folder): self.get_provider().configure(self._account_manager, driveid) self._export_progress_dialog_bg.create( self._addon_name + ' ' + self._common_addon.getLocalizedString(32024), self._common_addon.getLocalizedString(32025)) self._export_progress_dialog_bg.update(0) item = self.get_provider().get_item(export['item_driveid'], item_id) if self.cancel_operation(): return if self._child_count_supported: self._exporting_target = int(item['folder']['child_count']) self._exporting_target += 1 folder_name = Utils.unicode(item['name']) folder_path = os.path.join( os.path.join(export_folder, folder_name), '') if self._addon.getSetting( 'clean_folder') != 'true' or not xbmcvfs.exists( folder_path) or self._remove_folder(folder_path): self._exporting = item_id export_items_info = {} ExportManager.add_item_info(export_items_info, item_id, folder_name, folder_path, None) self.__export_folder(driveid, item, export_folder, export, export_items_info) export_manager.save_items_info(item_id, export_items_info) if Utils.get_safe_value(export, 'update_library', False) and self._content_type: database = self._content_type if database == 'audio': database = 'music' KodiUtils.update_library(database) else: error = self._common_addon.getLocalizedString( 32066) % folder_path Logger.debug(error) self._dialog.ok(self._addon_name, error) self._export_progress_dialog_bg.close() else: error = self._common_addon.getLocalizedString( 32026) % export_folder Logger.debug(error) self._dialog.ok(self._addon_name, error) export['exporting'] = False export_manager.save()
def get_subtitles(self, driveid, path): item = self.get_item(driveid, path) key = '%s%s-subtitles' % (driveid, path,) Logger.debug('Testing subtitles from cache: %s' % key) subtitles = self._items_cache.get(key) if not subtitles: provider = self._get_provider() provider.configure(self._account_manager, driveid) self.is_path_possible(driveid, path) item_driveid = Utils.default(Utils.get_safe_value(item, 'drive_id'), driveid) subtitles = provider.get_subtitles(item['parent'], item['name'], item_driveid) Logger.debug('Saving subtitles in cache: %s' % key) self._items_cache.set(key, item) return subtitles
def handle_resource_request(self, data): size = len(data) response = {'response_code': 404} if size > 2 and data[2]: addon_name = data[2] addonid = self.get_addonid(addon_name) Logger.debug('Redirector - addon id: %s' % addonid) if addonid: destination_port = KodiUtils.get_service_port(self.server.service.name, addonid) path = 'http://%s:%s' % (self.server.service._interface, destination_port,) + self.path Logger.debug('Redirector: %s' % path) response['response_code'] = 303 response['headers'] = {'location': path} return response
def is_path_possible(self, driveid, path): index = path.rfind('/') while index >= 0: filename = path[index+1:] path = path[0:index] key = '%s%s:children' % (driveid, path,) Logger.debug('testing possible path key: ' + key) children = self._children_cache.get(key) if children or type(children) is list: if filename and not filename in children: Logger.debug('Not found. From cache.') raise RequestException('Not found. From cache.', HTTPError(self.path, 404, 'Not found.', None, None), 'Request URL: %s' % self.path, None) return True index = path.rfind('/') return True
def __init__(self, provider_class): from clouddrive.common.service.source import SourceService self.abort = False self._system_monitor = KodiUtils.get_system_monitor() self.provider = provider_class() self.addonid = KodiUtils.get_addon_info('id') if KodiUtils.get_info_label('System.BuildVersion').startswith('17.'): KodiUtils.set_home_property('iskrypton', 'true') self.addon_name = KodiUtils.get_addon_info('name') self.url_pattern = 'http.*:%s/%s/%s/.*' % ( KodiUtils.get_addon_setting('port_directory_listing'), SourceService.name, urllib.quote(self.addon_name)) Logger.debug(self.url_pattern) self.player = KodiPlayer() self.player.set_source_url_matcher(re.compile(self.url_pattern)) self.player.set_addonid(self.addonid)