def verify_kodi_defaults(): ''' Make sure we have the kodi default folder in place. ''' node_path = xbmc.translatePath("special://profile/library/video") if not os.path.exists(node_path): try: shutil.copytree( src=xbmc.translatePath("special://xbmc/system/library/video"), dst=xbmc.translatePath("special://profile/library/video")) except Exception as error: LOG.warning(error) xbmcvfs.mkdir(node_path) for index, node in enumerate(['movies', 'tvshows', 'musicvideos']): file = os.path.join(node_path, node, "index.xml") if xbmcvfs.exists(file): xml = etree.parse(file).getroot() xml.set('order', str(17 + index)) tree = etree.ElementTree(xml) tree.write(file) playlist_path = xbmc.translatePath("special://profile/playlists/video") if not xbmcvfs.exists(playlist_path): xbmcvfs.mkdirs(playlist_path)
def get_credentials(): if (3, 0) <= sys.version_info < (3, 6): LOG.error("Python versions 3.0-3.5 are NOT supported.") if not xbmcvfs.exists(ADDON_DATA): xbmcvfs.mkdirs(ADDON_DATA) try: with open(os.path.join(ADDON_DATA, 'data.json'), 'rb') as infile: credentials = json.load(infile) except IOError: credentials = {} credentials['Servers'] = credentials.get('Servers', []) # Migration for #145 # TODO: CLEANUP for 1.0.0 release for server in credentials['Servers']: # Functionality removed in #60 if 'RemoteAddress' in server: del server['RemoteAddress'] if 'ManualAddress' in server: server['address'] = server['ManualAddress'] del server['ManualAddress'] # If manual is present, local should always be here, but better to be safe if 'LocalAddress' in server: del server['LocalAddress'] elif 'LocalAddress' in server: server['address'] = server['LocalAddress'] del server['LocalAddress'] if 'LastConnectionMode' in server: del server['LastConnectionMode'] return credentials
def _get_database(self): '''get reference to our sqllite _database - performs basic integrity check''' addon = xbmcaddon.Addon(ADDON_ID) dbpath = addon.getAddonInfo('profile') dbfile = xbmc.translatePath("%s/simplecache.db" % dbpath) if not xbmcvfs.exists(dbpath): xbmcvfs.mkdirs(dbpath) del addon try: connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) connection.execute('SELECT * FROM simplecache LIMIT 1') return connection except Exception as error: # our _database is corrupt or doesn't exist yet, we simply try to recreate it if xbmcvfs.exists(dbfile): xbmcvfs.delete(dbfile) try: connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) connection.execute("""CREATE TABLE IF NOT EXISTS simplecache( id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""" ) return connection except Exception as error: self._log_msg( "Exception while initializing _database: %s" % str(error), xbmc.LOGWARNING) self.close() return None
def writeConfig(cfile, value=''): cfgfile = os.path.join(configpath, cfile) cfglockfile = os.path.join(configpath, cfile + '.lock') if not xbmcvfs.exists(configpath): xbmcvfs.mkdirs(configpath) while True: if not xbmcvfs.exists(cfglockfile): l = xbmcvfs.File(cfglockfile, 'w') l.write(str(time.time())) l.close() if value == '': xbmcvfs.delete(cfgfile) else: f = xbmcvfs.File(cfgfile, 'w') f.write(value.__str__()) f.close() xbmcvfs.delete(cfglockfile) xbmcvfs.delete(cfglockfile) return True else: l = xbmcvfs.File(cfglockfile) modified = l.read() modified = float(modified) if modified else 0 l.close() if time.time() - modified > 0.1: xbmcvfs.delete(cfglockfile)
def writeConfig(cfile, value): cfgfile = OSPJoin(writeConfig.configPath, cfile) cfglockfile = OSPJoin(writeConfig.configPath, cfile + '.lock') if not xbmcvfs.exists(writeConfig.configPath): xbmcvfs.mkdirs(writeConfig.configPath) while True: if not xbmcvfs.exists(cfglockfile): l = xbmcvfs.File(cfglockfile, 'w') l.write(str(time.time())) l.close() if value == '': xbmcvfs.delete(cfgfile) else: f = xbmcvfs.File(cfgfile, 'w') f.write(value.__str__()) f.close() xbmcvfs.delete(cfglockfile) return True else: try: l = xbmcvfs.File(cfglockfile) modified = float(l.read()) l.close() if time.time() - modified > 0.1: xbmcvfs.delete(cfglockfile) except: pass
def verify_kodi_defaults(): ''' Make sure we have the kodi default folder in place. ''' source_base_path = xbmc.translatePath("special://xbmc/system/library/video") dest_base_path = xbmc.translatePath("special://profile/library/video") # Make sure the files exist in the local profile. # TODO: Investigate why this is needed. # I would think Kodi pulls data from the default profile # if we don't do this. for source_path, dirs, files in os.walk(source_base_path): relative_path = os.path.relpath(source_path, source_base_path) dest_path = os.path.join(dest_base_path, relative_path) if not os.path.exists(dest_path): os.mkdir(os.path.normpath(dest_path)) for file_name in files: dest_file = os.path.join(dest_path, file_name) copy = False if not os.path.exists(dest_file): copy = True elif os.path.splitext(file_name)[1].lower() == '.xml': try: etree.parse(dest_file) except etree.ParseError: LOG.warning("Unable to parse `{}`, recovering from default.".format(dest_file)) copy = True if copy: source_file = os.path.join(source_path, file_name) LOG.debug("Copying `{}` -> `{}`".format(source_file, dest_file)) xbmcvfs.copy(source_file, dest_file) # This code seems to enforce a fixed ordering. # Is it really desirable to force this on users? # The default (system wide) order is [10, 20, 30] in Kodi 19. for index, node in enumerate(['movies', 'tvshows', 'musicvideos']): file_name = os.path.join(dest_base_path, node, "index.xml") if xbmcvfs.exists(file_name): try: tree = etree.parse(file_name) except etree.ParseError: LOG.error("Unable to parse `{}`".format(file_name)) LOG.exception("We ensured the file was OK above, something is wrong!") tree.getroot().set('order', str(17 + index)) tree.write(file_name) playlist_path = xbmc.translatePath("special://profile/playlists/video") if not xbmcvfs.exists(playlist_path): xbmcvfs.mkdirs(playlist_path)
def SaveFile(filename, data, dirname=None): from contextlib import closing if dirname: filename = cleanName(filename) filename = os.path.join(dirname, filename) if not xbmcvfs.exists(dirname): xbmcvfs.mkdirs(cleanName(dirname.strip(), isfile=False)) filename = cleanName(filename, isfile=False) with closing(xbmcvfs.File(filename, 'w')) as outfile: outfile.write(bytearray(py2_decode(data).encode('utf-8')))
def backup(): ''' Jellyfin backup. ''' from helper.utils import delete_folder, copytree path = settings('backupPath') folder_name = "Kodi%s.%s" % (xbmc.getInfoLabel('System.BuildVersion')[:2], xbmc.getInfoLabel('System.Date(dd-mm-yy)')) folder_name = dialog("input", heading=translate(33089), defaultt=folder_name) if not folder_name: return backup = os.path.join(path, folder_name) if xbmcvfs.exists(backup + '/'): if not dialog("yesno", heading="{jellyfin}", line1=translate(33090)): return backup() delete_folder(backup) addon_data = xbmc.translatePath("special://profile/addon_data/plugin.video.jellyfin") destination_data = os.path.join(backup, "addon_data", "plugin.video.jellyfin") destination_databases = os.path.join(backup, "Database") if not xbmcvfs.mkdirs(path) or not xbmcvfs.mkdirs(destination_databases): LOG.info("Unable to create all directories") dialog("notification", heading="{jellyfin}", icon="{jellyfin}", message=translate(33165), sound=False) return copytree(addon_data, destination_data) databases = Objects().objects db = xbmc.translatePath(databases['jellyfin']) xbmcvfs.copy(db, os.path.join(destination_databases, db.rsplit('\\', 1)[1])) LOG.info("copied jellyfin.db") db = xbmc.translatePath(databases['video']) filename = db.rsplit('\\', 1)[1] xbmcvfs.copy(db, os.path.join(destination_databases, filename)) LOG.info("copied %s", filename) if settings('enableMusic.bool'): db = xbmc.translatePath(databases['music']) filename = db.rsplit('\\', 1)[1] xbmcvfs.copy(db, os.path.join(destination_databases, filename)) LOG.info("copied %s", filename) LOG.info("backup completed") dialog("ok", heading="{jellyfin}", line1="%s %s" % (translate(33091), backup))
def save_sync(sync): if not xbmcvfs.exists(ADDON_DATA): xbmcvfs.mkdirs(ADDON_DATA) sync['Date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') with open(os.path.join(ADDON_DATA, 'sync.json'), 'wb') as outfile: data = json.dumps(sync, sort_keys=True, indent=4, ensure_ascii=False) if isinstance(data, text_type): data = data.encode('utf-8') outfile.write(data)
def __init__(self, *args, **kwargs): super(xbmcgui.WindowXMLDialog, self).__init__() self.action_exitkeys_id = [10, 13] self.win = xbmcgui.Window(10000) self.build_colors_list() self.result = -1 # check paths if xbmcvfs.exists( SKINCOLORFILE) and not xbmcvfs.exists(SKINCOLORFILES_PATH): xbmcvfs.mkdirs(SKINCOLORFILES_PATH) if not xbmcvfs.exists(COLORFILES_PATH): xbmcvfs.mkdirs(COLORFILES_PATH)
def __init__(self, output_path=None, forced=False): self.working_path = ADDON_PROFILE self.output_path = output_path or xbmc.translatePath( settings.get('output_dir', '').strip() or self.working_path) if not xbmcvfs.exists(self.working_path): xbmcvfs.mkdirs(self.working_path) if not xbmcvfs.exists(self.output_path): xbmcvfs.mkdirs(self.output_path) self.forced = forced self.tmp_file = os.path.join(self.working_path, 'iptv_merge_tmp') self._playlist_epgs = []
def ask_to_share_log(): """ Ask the if he wants to share his log directly by mail with a QR code or by sharing the pastebin URL by mail, on github or forum """ r = xbmcgui.Dialog().yesno(Script.localize(LABELS['Information']), Script.localize(30860)) if not r: return if not xbmcvfs.exists(PROFILE): xbmcvfs.mkdirs(PROFILE) succes, data = read_log(LOGFILE) print_error = False error_message = "" if succes: content = clean_log(data) succes, data = post_log(content) if succes: imagefile = os.path.join(xbmc.translatePath(PROFILE), '%s.png' % str(data.split('/')[-1])) message = Script.localize(30861) message = message.replace("URL_TO_REPLACE", data) mail_url = 'mailto:[email protected]?subject=Kodi%20log&body=' + data qrIMG = pyqrcode.create(mail_url) qrIMG.png(imagefile, scale=10) qr = QRCode("script-loguploader-main.xml", CWD, "default", image=imagefile, text=message) qr.doModal() del qr xbmcvfs.delete(imagefile) else: print_error = True error_message = data else: print_error = True error_message = data if print_error: xbmcgui.Dialog().ok(Script.localize(LABELS['Information']), Script.localize(30862) + ': ' + error_message) return
def save_sync(sync): path = xbmc.translatePath( "special://profile/addon_data/plugin.video.jellyfin/") if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) sync['Date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') with open(os.path.join(path, 'sync.json'), 'wb') as outfile: data = json.dumps(sync, sort_keys=True, indent=4, ensure_ascii=False) if isinstance(data, text_type): data = data.encode('utf-8') outfile.write(data)
def copytree(path, dest): ''' Copy folder content from one to another. ''' dirs, files = xbmcvfs.listdir(path) if not xbmcvfs.exists(dest): xbmcvfs.mkdirs(dest) if dirs: copy_recursive(path, dirs, dest) for file in files: copy_file(os.path.join(path, file), os.path.join(dest, file)) LOG.info("Copied %s", path)
def save_credentials(credentials): credentials = credentials or {} if not xbmcvfs.exists(ADDON_DATA): xbmcvfs.mkdirs(ADDON_DATA) try: with open(os.path.join(ADDON_DATA, 'data.json'), 'wb') as outfile: data = json.dumps(credentials, sort_keys=True, indent=4, ensure_ascii=False) if isinstance(data, text_type): data = data.encode('utf-8') outfile.write(data) except Exception: LOG.exception("Failed to save credentials:")
def get_sync(): if not xbmcvfs.exists(ADDON_DATA): xbmcvfs.mkdirs(ADDON_DATA) try: with open(os.path.join(ADDON_DATA, 'sync.json'), 'rb') as infile: sync = json.load(infile, encoding='utf-8') except Exception: sync = {} sync['Libraries'] = sync.get('Libraries', []) sync['RestorePoint'] = sync.get('RestorePoint', {}) sync['Whitelist'] = list(set(sync.get('Whitelist', []))) sync['SortedViews'] = sync.get('SortedViews', []) return sync
def download_subs(link, referrer, filename): """ Download selected subs :param link: str - a download link for the subs. :param referrer: str - a referer URL for the episode page (required by addic7ed.com). :param filename: str - the name of the video-file being played. The function must add a single ListItem instance with one property: label - the download location for subs. """ # Re-create a download location in a temporary folder if xbmcvfs.exists(temp_dir): shutil.rmtree(temp_dir) xbmcvfs.mkdirs(temp_dir) # Combine a path where to download the subs filename = os.path.splitext(filename)[0] + '.srt' subspath = os.path.join(temp_dir, filename) # Download the subs from addic7ed.com try: parser.download_subs(link, referrer, subspath) except Add7ConnectionError: logger.error('Unable to connect to addic7ed.com') dialog.notification(get_ui_string(32002), get_ui_string(32005), 'error') except DailyLimitError: dialog.notification(get_ui_string(32002), get_ui_string(32003), 'error', 3000) logger.error('Exceeded daily limit for subs downloads.') else: # Create a ListItem for downloaded subs and pass it # to the Kodi subtitles engine to move the downloaded subs file # from the temp folder to the designated # location selected by 'Subtitle storage location' option # in 'Settings > Video > Subtitles' section. # A 2-letter language code will be added to subs filename. list_item = xbmcgui.ListItem(label=subspath) xbmcplugin.addDirectoryItem(handle=handle, url=subspath, listitem=list_item, isFolder=False) dialog.notification(get_ui_string(32000), get_ui_string(32001), icon, 3000, False) logger.notice('Subs downloaded.')
def save_credentials(credentials): credentials = credentials or {} path = xbmc.translatePath( "special://profile/addon_data/plugin.video.jellyfin/") if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) try: with open(os.path.join(path, 'data.json'), 'wb') as outfile: data = json.dumps(credentials, sort_keys=True, indent=4, ensure_ascii=False) if isinstance(data, text_type): data = data.encode('utf-8') outfile.write(data) except Exception: LOG.exception("Failed to save credentials:")
def get_credentials(): path = xbmc.translatePath( "special://profile/addon_data/plugin.video.jellyfin/") if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) try: with open(os.path.join(path, 'data.json'), 'rb') as infile: credentials = json.load(infile, encoding='utf8') except Exception: try: with open(os.path.join(path, 'data.txt'), 'rb') as infile: credentials = json.load(infile, encoding='utf-8') save_credentials(credentials) xbmcvfs.delete(os.path.join(path, 'data.txt')) except Exception: credentials = {} credentials['Servers'] = credentials.get('Servers', []) # Migration for #145 # TODO: CLEANUP for 1.0.0 release for server in credentials['Servers']: # Functionality removed in #60 if 'RemoteAddress' in server: del server['RemoteAddress'] if 'ManualAddress' in server: server['address'] = server['ManualAddress'] del server['ManualAddress'] # If manual is present, local should always be here, but better to be safe if 'LocalAddress' in server: del server['LocalAddress'] elif 'LocalAddress' in server: server['address'] = server['LocalAddress'] del server['LocalAddress'] if 'LastConnectionMode' in server: del server['LastConnectionMode'] return credentials
def download_subs(link, referrer, filename): """ Download selected subs :param link: str - a download link for the subs. :param referrer: str - a referer URL for the episode page (required by addic7ed.com). :param filename: str - the name of the video-file being played. The function must add a single ListItem instance with one property: label - the download location for subs. """ # Re-create a download location in a temporary folder if xbmcvfs.exists(temp_dir): shutil.rmtree(temp_dir) xbmcvfs.mkdirs(temp_dir) # Combine a path where to download the subs subspath = os.path.join(temp_dir, filename[:-3] + 'srt') # Download the subs from addic7ed.com try: parser.download_subs(link, referrer, subspath) except Add7ConnectionError: logger.error('Unable to connect to addic7ed.com') dialog.notification(get_ui_string(32002), get_ui_string(32005), 'error') except DailyLimitError: dialog.notification(get_ui_string(32002), get_ui_string(32003), 'error', 3000) logger.error('Exceeded daily limit for subs downloads.') else: # Create a ListItem for downloaded subs and pass it # to the Kodi subtitles engine to move the downloaded subs file # from the temp folder to the designated # location selected by 'Subtitle storage location' option # in 'Settings > Video > Subtitles' section. # A 2-letter language code will be added to subs filename. list_item = xbmcgui.ListItem(label=subspath) xbmcplugin.addDirectoryItem(handle=handle, url=subspath, listitem=list_item, isFolder=False) dialog.notification(get_ui_string(32000), get_ui_string(32001), icon, 3000, False) logger.notice('Subs downloaded.')
def get_sync(): if (3, 0) <= sys.version_info < (3, 6): LOG.error("Python versions 3.0-3.5 are NOT supported.") if not xbmcvfs.exists(ADDON_DATA): xbmcvfs.mkdirs(ADDON_DATA) try: with open(os.path.join(ADDON_DATA, 'sync.json'), 'rb') as infile: sync = json.load(infile) except Exception: sync = {} sync['Libraries'] = sync.get('Libraries', []) sync['RestorePoint'] = sync.get('RestorePoint', {}) sync['Whitelist'] = list(set(sync.get('Whitelist', []))) sync['SortedViews'] = sync.get('SortedViews', []) return sync
def get_fanart(item_id, path, server_id=None): ''' Get extra fanart for listitems. This is called by skinhelper. Images are stored locally, due to the Kodi caching system. ''' if not item_id and 'plugin.video.jellyfin' in path: item_id = path.split('/')[-2] if not item_id: return LOG.info("[ extra fanart ] %s", item_id) objects = Objects() list_li = [] directory = xbmc.translatePath("special://thumbnails/jellyfin/%s/" % item_id) server = TheVoid('GetServerAddress', {'ServerId': server_id}).get() if not xbmcvfs.exists(directory): xbmcvfs.mkdirs(directory) item = TheVoid('GetItem', {'ServerId': server_id, 'Id': item_id}).get() obj = objects.map(item, 'Artwork') backdrops = api.API(item, server).get_all_artwork(obj) tags = obj['BackdropTags'] for index, backdrop in enumerate(backdrops): tag = tags[index] fanart = os.path.join(directory, "fanart%s.jpg" % tag) li = xbmcgui.ListItem(tag, path=fanart) xbmcvfs.copy(backdrop, fanart) list_li.append((fanart, li, False)) else: LOG.debug("cached backdrop found") dirs, files = xbmcvfs.listdir(directory) for file in files: fanart = os.path.join(directory, file) li = xbmcgui.ListItem(file, path=fanart) list_li.append((fanart, li, False)) xbmcplugin.addDirectoryItems(int(sys.argv[1]), list_li, len(list_li)) xbmcplugin.endOfDirectory(int(sys.argv[1]))
def get_sync(): path = xbmc.translatePath( "special://profile/addon_data/plugin.video.jellyfin/") if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) try: with open(os.path.join(path, 'sync.json'), 'rb') as infile: sync = json.load(infile, encoding='utf-8') except Exception: sync = {} sync['Libraries'] = sync.get('Libraries', []) sync['RestorePoint'] = sync.get('RestorePoint', {}) sync['Whitelist'] = list(set(sync.get('Whitelist', []))) sync['SortedViews'] = sync.get('SortedViews', []) return sync
def get_sync(): if (3, 0) <= sys.version_info < (3, 6): LOG.error("Python versions 3.0-3.5 are NOT supported.") if not xbmcvfs.exists(ADDON_DATA): xbmcvfs.mkdirs(ADDON_DATA) try: with open(os.path.join(ADDON_DATA, 'sync.json'), 'rb') as infile: sync = json.load(infile) except Exception: sync = {} sync['Libraries'] = sync.get('Libraries', []) sync['RestorePoint'] = sync.get('RestorePoint', {}) sync['Whitelist'] = list(set(sync.get('Whitelist', []))) sync['SortedViews'] = sync.get('SortedViews', []) # Temporary cleanup from #494/#511, remove in a future version sync['Libraries'] = [ lib_id for lib_id in sync['Libraries'] if ',' not in lib_id ] return sync
def _update_settings_xml(): """ This function writes a new ``resources/settings.xml`` file which contains all settings for this addon and its plugins. """ try: xbmcvfs.mkdirs(common.settings_path) except OSError: pass new_xml = [ '<?xml version="1.0" encoding="utf-8" standalone="yes"?>', '<settings>', '\t<category label="ResolveURL">', '\t\t<setting default="true" id="allow_universal" label="%s" type="bool"/>' % (common.i18n('enable_universal')), '\t\t<setting default="true" id="allow_popups" label="%s" type="bool"/>' % (common.i18n('enable_popups')), '\t\t<setting default="true" id="auto_pick" label="%s" type="bool"/>' % (common.i18n('auto_pick')), '\t\t<setting default="true" id="use_cache" label="%s" type="bool"/>' % (common.i18n('use_function_cache')), '\t\t<setting id="reset_cache" type="action" label="%s" action="RunPlugin(plugin://script.module.resolveurl/?mode=reset_cache)"/>' % (common.i18n('reset_function_cache')), '\t\t<setting id="personal_nid" label="Your NID" type="text" visible="false" default=""/>', '\t\t<setting id="last_ua_create" label="last_ua_create" type="number" visible="false" default="0"/>', '\t\t<setting id="current_ua" label="current_ua" type="text" visible="false" default=""/>', '\t\t<setting id="addon_debug" label="addon_debug" type="bool" visible="false" default="false"/>', '\t</category>', '\t<category label="%s">' % (common.i18n('universal_resolvers')) ] resolvers = relevant_resolvers(include_universal=True, include_disabled=True) resolvers = sorted(resolvers, key=lambda x: x.name.upper()) for resolver in resolvers: if resolver.isUniversal(): new_xml.append('\t\t<setting label="%s" type="lsep"/>' % resolver.name) new_xml += ['\t\t' + line for line in resolver.get_settings_xml()] new_xml.append('\t</category>') new_xml.append('\t<category label="%s 1">' % (common.i18n('resolvers'))) i = 0 cat_count = 2 for resolver in resolvers: if not resolver.isUniversal(): if i > MAX_SETTINGS: new_xml.append('\t</category>') new_xml.append('\t<category label="%s %s">' % (common.i18n('resolvers'), cat_count)) cat_count += 1 i = 0 new_xml.append('\t\t<setting label="%s" type="lsep"/>' % resolver.name) res_xml = resolver.get_settings_xml() new_xml += ['\t\t' + line for line in res_xml] i += len(res_xml) + 1 new_xml.append('\t</category>') new_xml.append('</settings>') try: if six.PY3: with open(common.settings_file, 'r', encoding='utf-8') as f: old_xml = f.read() else: with open(common.settings_file, 'r') as f: old_xml = f.read() except: old_xml = u'' old_xml = six.ensure_text(old_xml) new_xml = six.ensure_text('\n'.join(new_xml)) if old_xml != new_xml: common.logger.log_debug('Updating Settings XML') try: if six.PY3: with open(common.settings_file, 'w', encoding='utf-8') as f: f.write(new_xml) else: with open(common.settings_file, 'w') as f: f.write(new_xml.encode('utf8')) except: raise else: common.logger.log_debug('No Settings Update Needed')
def record_once_thread(programmeid, do_refresh=True, watch=False, remind=False, channelid=None, channelname=None, start=None, stop=None, play=False, title=None): #TODO check for ffmpeg process already recording if job is re-added conn = sqlite3.connect(xbmc.translatePath( '%sxmltv.db' % plugin.addon.getAddonInfo('profile')), detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) cursor = conn.cursor() if not check_has_db_filled_show_error_message_ifn(cursor): return programme = {} if channelid is not None: programme["channelid"] = channelid if start: programme["start"] = datetime2timestamp(start) if stop: programme["stop"] = datetime2timestamp(stop) nfo = {} nfo["programme"] = programme if not start and not stop: return local_starttime = utc2local(start) local_endtime = utc2local(stop) if channelid: channel = cursor.execute( "SELECT name, url FROM streams WHERE tvg_id=? AND tvg_name=?", (channelid, channelname)).fetchone() if not channel: channel = cursor.execute( "SELECT name, url FROM streams WHERE tvg_id=? AND name=?", (channelid, channelname)).fetchone() else: channel = cursor.execute("SELECT name, url FROM streams WHERE name=?", (channelname, )).fetchone() if not channel: channel = cursor.execute( "SELECT name, url FROM streams WHERE tvg_name=?", (channelname, )).fetchone() if not channel: log("No channel {} {}".format(channelname, xbmc.LOGERROR)) return name, url = channel if not channelname: channelname = name nfo["channel"] = {"channelname": channelname} if not url: log("No url for {} {}".format(channelname, xbmc.LOGERROR)) return url_headers = url.split('|', 1) url = url_headers[0] headers = {} if len(url_headers) == 2: sheaders = url_headers[1] aheaders = sheaders.split('&') if aheaders: for h in aheaders: k, v = h.split('=', 1) headers[k] = unquote_plus(v) ftitle = sane_name(title) fchannelname = sane_name(channelname) fepisode = "" try: fepisode = re.search(r'(S\d+E\d+)|(S\d+|E\d+)', xbmc.getInfoLabel("ListItem.Plot")).group(0) except: log("Not a series") folder = "" if (plugin.get_setting('subfolder', str) == 'true'): folder = fchannelname if ftitle: if fepisode: filename = "%s - %s - %s - %s" % ( ftitle, fepisode, fchannelname, local_starttime.strftime("%Y-%m-%d %H-%M")) else: filename = "%s - %s - %s" % ( ftitle, fchannelname, local_starttime.strftime("%Y-%m-%d %H-%M")) else: filename = "%s - %s" % (fchannelname, local_starttime.strftime("%Y-%m-%d %H-%M")) before = int(plugin.get_setting('minutes.before', str) or "0") after = int(plugin.get_setting('minutes.after', str) or "0") local_starttime = local_starttime - timedelta(minutes=before) local_endtime = local_endtime + timedelta(minutes=after) now = datetime.now() if (local_starttime < now) and (local_endtime > now): local_starttime = now # immediate = True past_recording = False xbmcgui.Dialog().ok(addon.getLocalizedString(30050), addon.getLocalizedString(30051)) return elif (local_starttime < now) and (local_endtime < now): # immediate = True # local_starttime = now past_recording = True else: # immediate = False past_recording = False xbmcgui.Dialog().ok(addon.getLocalizedString(30050), addon.getLocalizedString(30051)) return kodi_recordings = xbmc.translatePath(plugin.get_setting('recordings', str)) ffmpeg_recordings = plugin.get_setting('ffmpeg.recordings', str) or kodi_recordings dir = os.path.join(kodi_recordings, folder) ffmpeg_dir = os.path.join(ffmpeg_recordings, folder) xbmcvfs.mkdirs(dir) path = os.path.join(dir, filename) json_path = path + '.json' nfo_path = path + '.nfo' jpg_path = path + '.jpg' path = path + '.' + plugin.get_setting('ffmpeg.ext', str) path = path.replace("\\", "\\\\") ffmpeg = ffmpeg_location() if not ffmpeg: return # Get artwork if plugin.get_setting('artwork', bool): artwork_url = xbmc.getInfoLabel("ListItem.Icon") r = requests.get(artwork_url, stream=True) if r.status_code == 200: # Set decode_content value to True, otherwise the downloaded image file's size will be zero. r.raw.decode_content = True with open(jpg_path, 'wb') as f: shutil.copyfileobj(r.raw, f) log('Image sucessfully Downloaded: {}'.format(jpg_path)) else: log('Image Couldn\'t be retreived') # Get and write info if plugin.get_setting('nfo', bool): plot = xbmc.getInfoLabel("ListItem.Plot") nfo_nfo = "Channel: {}\nTitle: {}\nStart: {} - End: {}\nPlot: {}".format( fchannelname, ftitle, start, stop, plot) nfo_nfo += "\n\nDownloaded using IPTV Archive Downloader\nhttps://github.com/tbrek/IPTV-Archive-Downloader" f = xbmcvfs.File(nfo_path, 'w') write_in_file(f, nfo_nfo) f.close() # Write JSON if plugin.get_setting('json', bool): json_nfo = json.dumps(nfo) f = xbmcvfs.File(json_path, 'w') write_in_file(f, json_nfo) f.close() # Make sure you're in the right timezone time_shift = int(plugin.get_setting('external.m3u.shift', str) or "0") utc = int(datetime2timestamp(local_starttime) - (3600 * time_shift)) lutc = int(datetime2timestamp(local_endtime) - (3600 * time_shift)) lengthSeconds = lutc - utc partLength = int(plugin.get_setting('part.length', str) or "3600") # log("Part length: {}s".format(partLength)) numberOfParts = (lutc - utc) / partLength # log("Number of parts: {}".format(numberOfParts)) remainingSeconds = lengthSeconds - (numberOfParts * partLength) # log("Remaining seconds: {}".format(remainingSeconds)) xbmcgui.Dialog().notification("{}: {}".format( addon.getLocalizedString(30053), channelname), title, sound=True) # Recording hour bits for part in range(0, numberOfParts): cmd = [ffmpeg] start = utc + (part * partLength) stop = start + partLength duration = partLength # log("Recordind part: {}/{}. Start: {}. Stop: {}".format(part,numberOfParts,start,stop)) # log("Filename: {}_{}".format(filename,part)) tempFilename = filename + "_" + "{}".format(part) cmd, ffmpeg_recording_path = getCmd(start, stop, cmd, past_recording, url, headers, ffmpeg_dir, tempFilename, duration) # log("Command: {}".format(cmd)) recordSegment(cmd, ffmpeg_recording_path) # Recording remaining minutes if remainingSeconds != 0: cmd = [ffmpeg] start = utc + (partLength * numberOfParts) stop = start + remainingSeconds # log("Recording remaining seconds: {} from: {}".format(remainingSeconds, start)) # log("Filename: {}_{}".format(filename,numberOfParts)) tempFilename = filename + "_" + "{}".format(numberOfParts) cmd, ffmpeg_recording_path = getCmd(start, stop, cmd, past_recording, url, headers, ffmpeg_dir, tempFilename, remainingSeconds) recordSegment(cmd, ffmpeg_recording_path) numberOfParts += 1 # Do you want to concat it all together if plugin.get_setting('join.segments', bool): # Concating fragments ffmpeg_recording_path = os.path.join( ffmpeg_dir, filename + '.' + plugin.get_setting('ffmpeg.ext', str)) temp_file_path = os.path.join( ffmpeg_dir, filename + '-temp.' + plugin.get_setting('ffmpeg.ext', str)) tempFile = open(temp_file_path, "wb") for fileName in sorted(os.listdir(ffmpeg_dir)): if fileName.startswith(filename + "_") and fileName.endswith(".ts"): # log("Joining: {}".format(fileName)) temp = open(ffmpeg_dir + "/" + fileName, "rb") # tempFile.write(temp.read()) shutil.copyfileobj(temp, tempFile) temp.close() os.remove(ffmpeg_dir + "/" + fileName) tempFile.close() # Fixing timestamps # log("Fixing timestamps from: {}".format(temp_file_path)) # log("New file: {}".format(ffmpeg_recording_path)) cmd = [ffmpeg] cmd.append("-i") cmd.append(temp_file_path) probe_cmd = cmd cmd = probe_cmd + \ ["-fflags", "+genpts", "-vcodec", "copy", "-acodec", "copy"] if (plugin.get_setting('ffmpeg.pipe', str) == 'true' ) and not (windows() and (plugin.get_setting('task.scheduler', str) == 'true')): cmd = cmd + ['-f', 'mpegts', '-'] else: cmd.append(ffmpeg_recording_path) # log("Command: {}".format(cmd)) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=False) f = xbmcvfs.File(ffmpeg_recording_path, "w") f.write(bytearray(repr(p.pid).encode('utf-8'))) f.close() video = xbmcvfs.File(ffmpeg_recording_path, "w") # playing = False while True: data = p.stdout.read(1000000) if data: video.write(bytearray(data)) else: break video.close() os.remove(temp_file_path) xbmcgui.Dialog().ok( addon.getLocalizedString(30054), "{}: {} - {}".format(addon.getLocalizedString(30055), channelname, title)) if do_refresh: refresh()
def download_media(url, path, file_name, translations, progress=None): try: if progress is None: progress = int(kodi.get_setting('down_progress')) i18n = translations.i18n active = not progress == PROGRESS.OFF background = progress == PROGRESS.BACKGROUND with kodi.ProgressDialog(kodi.get_name(), i18n('downloading') % (file_name), background=background, active=active) as pd: try: headers = dict([ item.split('=') for item in (url.split('|')[1]).split('&') ]) for key in headers: headers[key] = urllib.unquote(headers[key]) except: headers = {} if 'User-Agent' not in headers: headers['User-Agent'] = BROWSER_UA request = urllib2.Request(url.split('|')[0], headers=headers) response = urllib2.urlopen(request) if 'Content-Length' in response.info(): content_length = int(response.info()['Content-Length']) else: content_length = 0 file_name += '.' + get_extension(url, response) full_path = os.path.join(path, file_name) logger.log('Downloading: %s -> %s' % (url, full_path), log_utils.LOGDEBUG) path = kodi.translate_path(xbmc.makeLegalFilename(path)) try: try: xbmcvfs.mkdirs(path) except: os.makedirs(path) except Exception as e: logger.log('Path Create Failed: %s (%s)' % (e, path), log_utils.LOGDEBUG) if not path.endswith(os.sep): path += os.sep if not xbmcvfs.exists(path): raise Exception(i18n('failed_create_dir')) file_desc = xbmcvfs.File(full_path, 'w') total_len = 0 cancel = False while True: data = response.read(CHUNK_SIZE) if not data: break if pd.is_canceled(): cancel = True break total_len += len(data) if not file_desc.write(data): raise Exception(i18n('failed_write_file')) percent_progress = ( total_len ) * 100 / content_length if content_length > 0 else 0 logger.log( 'Position : %s / %s = %s%%' % (total_len, content_length, percent_progress), log_utils.LOGDEBUG) pd.update(percent_progress) file_desc.close() if not cancel: kodi.notify(msg=i18n('download_complete') % (file_name), duration=5000) logger.log('Download Complete: %s -> %s' % (url, full_path), log_utils.LOGDEBUG) except Exception as e: logger.log( 'Error (%s) during download: %s -> %s' % (str(e), url, file_name), log_utils.LOGERROR) kodi.notify(msg=i18n('download_error') % (str(e), file_name), duration=5000)
def makeDataDir(): if not xbmcvfs.exists(_addon_profile_): xbmcvfs.mkdirs(_addon_profile_)
def xmltv(): load_groups = plugin.get_storage('load_groups') load_channels = {} dialog = xbmcgui.DialogProgressBG() dialog.create("IPTV Recorder", get_string("Loading data...")) profilePath = xbmc.translatePath(plugin.addon.getAddonInfo('profile')) xbmcvfs.mkdirs(profilePath) shifts = {} streams_to_insert = [] for x in ["1", "2"]: dialog.update(0, message=get_string("Finding streams")) mode = plugin.get_setting('external.m3u.' + x, str) if mode == "0": if x == "1": try: m3uPathType = xbmcaddon.Addon('pvr.iptvsimple').getSetting( 'm3uPathType') if m3uPathType == "0": path = xbmcaddon.Addon('pvr.iptvsimple').getSetting( 'm3uPath') else: path = xbmcaddon.Addon('pvr.iptvsimple').getSetting( 'm3uUrl') except: path = "" else: path = "" elif mode == "1": if x == "1": try: m3uPathType = xbmcaddon.Addon( 'pvr.iptvarchive').getSetting('m3uPathType') if m3uPathType == "0": path = xbmcaddon.Addon('pvr.iptvarchive').getSetting( 'm3uPath') else: path = xbmcaddon.Addon('pvr.iptvarchive').getSetting( 'm3uUrl') except: path = "" else: path = "" elif mode == "2": path = plugin.get_setting('external.m3u.file.' + x, str) else: path = plugin.get_setting('external.m3u.url.' + x, str) if path: m3uFile = 'special://profile/addon_data/plugin.video.iptv.archive.downloader/channels' + x + '.m3u' xbmcvfs.copy(path, m3uFile) f = open(xbmc.translatePath(m3uFile), 'rb') data = f.read() data = data.decode('utf8') settings_shift = float( plugin.get_setting('external.m3u.shift.' + x, str)) global_shift = settings_shift header = re.search('#EXTM3U(.*)', data) if header: tvg_shift = re.search('tvg-shift="(.*?)"', header.group(1)) if tvg_shift: tvg_shift = tvg_shift.group(1) if tvg_shift: global_shift = float(tvg_shift) + settings_shift channels = re.findall( '#EXTINF:(.*?)(?:\r\n|\r|\n)(.*?)(?:\r\n|\r|\n|$)', data, flags=(re.I | re.DOTALL)) total = len(channels) i = 0 for channel in channels: name = None if ',' in re.sub('tvg-[a-z]+"[^"]*"', '', channel[0], flags=re.I): name = channel[0].rsplit(',', 1)[-1].strip() name = name.replace('+', '') name = name.replace(':', '') name = name.replace('#', '') #name = name.encode("utf8") tvg_name = re.search('tvg-name="(.*?)"', channel[0], flags=re.I) if tvg_name: tvg_name = tvg_name.group(1) or None #else: #tvg_name = name tvg_id = re.search('tvg-id="(.*?)"', channel[0], flags=re.I) if tvg_id: tvg_id = tvg_id.group(1) or None tvg_logo = re.search('tvg-logo="(.*?)"', channel[0], flags=re.I) if tvg_logo: tvg_logo = tvg_logo.group(1) or None shifts[tvg_id] = global_shift tvg_shift = re.search('tvg-shift="(.*?)"', channel[0], flags=re.I) if tvg_shift: tvg_shift = tvg_shift.group(1) if tvg_shift and tvg_id: shifts[tvg_id] = float(tvg_shift) + settings_shift url = channel[1] search = plugin.get_setting('m3u.regex.search', str) replace = plugin.get_setting('m3u.regex.replace', str) if search: url = re.sub(search, replace, url) groups = re.search('group-title="(.*?)"', channel[0], flags=re.I) if groups: groups = groups.group(1) or None streams_to_insert.append( (name, tvg_name, tvg_id, tvg_logo, groups, url.strip(), i)) i += 1 percent = 0 + int(100.0 * i / total) dialog.update(percent, message=get_string("Finding streams")) ''' missing_streams = conn.execute('SELECT name, tvg_name FROM streams WHERE tvg_id IS null OR tvg_id IS ""').fetchall() sql_channels = conn.execute('SELECT id, name FROM channels').fetchall() lower_channels = {x[1].lower():x[0] for x in sql_channels} for name, tvg_name in missing_streams: if tvg_name: tvg_id = None _tvg_name = tvg_name.replace("_"," ").lower() if _tvg_name in lower_channels: tvg_id = lower_channels[_tvg_name] conn.execute("UPDATE streams SET tvg_id=? WHERE tvg_name=?", (tvg_id, tvg_name)) elif name.lower() in lower_channels: tvg_id = lower_channels[name.lower()] conn.execute("UPDATE streams SET tvg_id=? WHERE name=?", (tvg_id, name)) ''' for _, _, tvg_id, _, groups, _, _ in streams_to_insert: if groups in load_groups: load_channels[tvg_id] = "" dialog.update(0, message=get_string("Creating database")) databasePath = os.path.join(profilePath, 'xmltv.db') conn = sqlite3.connect(databasePath, detect_types=sqlite3.PARSE_DECLTYPES) conn.execute('PRAGMA foreign_keys = ON') conn.row_factory = sqlite3.Row conn.execute('DROP TABLE IF EXISTS streams') conn.execute( 'CREATE TABLE IF NOT EXISTS streams(uid INTEGER PRIMARY KEY ASC, name TEXT, tvg_name TEXT, tvg_id TEXT, tvg_logo TEXT, groups TEXT, url TEXT, tv_number INTEGER)' ) dialog.update(0, message=get_string("Updating database")) conn.executemany( "INSERT OR IGNORE INTO streams(name, tvg_name, tvg_id, tvg_logo, groups, url, tv_number) VALUES (?, ?, ?, ?, ?, ?, ?)", streams_to_insert) conn.commit() conn.close() dialog.update(100, message=get_string("Finished loading data")) time.sleep(1) dialog.close() return
def Download(url,lang): global ZIMUKU_RESOURCE_BASE if not xbmcvfs.exists(__temp__.replace('\\','/')): xbmcvfs.mkdirs(__temp__) _dirs, files = xbmcvfs.listdir(__temp__) for file in files: xbmcvfs.delete(os.path.join(__temp__, file.decode('utf-8'))) subtitle_list = [] exts = ( ".srt", ".sub", ".smi", ".ssa", ".ass", ".sup" ) # Some exts may cause fatal failure/ crash with some coding except from UTF-8. supported_archive_exts = ( ".zip", ".7z", ".tar", ".bz2", ".rar", ".gz", ".xz", ".iso", ".tgz", ".tbz2", ".cbr" ) #self_archive_exts = ( ".zip", ".rar" ) log( sys._getframe().f_code.co_name ,"Download page: %s" % (url)) try: # Subtitle detail page. _headers, data = get_page(url) soup = BeautifulSoup(data, 'html.parser') url = soup.find("li", class_="dlsub").a.get('href').encode('utf-8') if not ( url.startswith('http://') or url.startswith('https://')): url = urlparse.urljoin(ZIMUKU_RESOURCE_BASE, url) else: ZIMUKU_RESOURCE_BASE = "{host_info.scheme}://{host_info.netloc}".format(host_info=urlparse.urlparse(url)) log( sys._getframe().f_code.co_name ,"Download links: %s" % (url)) # Subtitle download-list page. _headers, data = get_page(url) soup = BeautifulSoup(data, 'html.parser') links = soup.find("div", {"class":"clearfix"}).find_all('a') except: log( sys.exc_info()[2].tb_frame.f_code.co_name, "Error (%d) [%s]" % ( sys.exc_info()[2].tb_lineno, sys.exc_info()[1] ), level=xbmc.LOGERROR ) return [] filename, data = DownloadLinks(links, url) if filename == '': # No file received. return [] if filename.endswith(exts): tempfile = store_file(filename, data) subtitle_list.append(tempfile) elif filename.endswith(supported_archive_exts): tempfile = store_file(filename, data) # libarchive requires the access to the file, so sleep a while to ensure the file. xbmc.sleep(500) # Import here to avoid waste. import zimuku_archive archive_path, list = zimuku_archive.unpack(tempfile) if len(list) == 1: subtitle_list.append( os.path.join( archive_path, list[0] ).replace('\\','/')) elif len(list) > 1: # hack to fix encoding problem of zip file in Kodi 18 if __kodi__['major'] >= 18 and data[:2] == 'PK': try: dlist = [x.encode('CP437').decode('gbk') for x in list] except: dlist = list else: dlist = list sel = xbmcgui.Dialog().select('请选择压缩包中的字幕', dlist) if sel == -1: sel = 0 subtitle_list.append( os.path.join( archive_path, list[sel] ).replace('\\','/')) else: log(sys._getframe().f_code.co_name, "Unsupported file: %s" % (filename), level=xbmc.LOGWARNING) raise TypeError, "Unsupported file compressed format! Please try another subtitle." if len(subtitle_list) > 0: log(sys._getframe().f_code.co_name, "Get subtitle file: %s" % (subtitle_list[0]), level=xbmc.LOGINFO) return subtitle_list