def getDBlocation(retvar): custdb = addon.getSetting('customdbfolder') == 'true' old_dbpath = xbmc.translatePath(getConfig('old_dbfolder')).decode('utf-8') cur_dbpath = dbpath if not old_dbpath: old_dbpath = cur_dbpath if custdb: cur_dbpath = xbmc.translatePath(addon.getSetting('dbfolder')).decode('utf-8') else: addon.setSetting('dbfolder', dbpath) orgDBfile = {'tv': os.path.join(dbpath, 'tv.db'), 'movie': os.path.join(dbpath, 'movies.db')} oldDBfile = {'tv': os.path.join(old_dbpath, 'tv.db'), 'movie': os.path.join(old_dbpath, 'movies.db')} DBfile = {'tv': os.path.join(cur_dbpath, 'tv.db'), 'movie': os.path.join(cur_dbpath, 'movies.db')} if old_dbpath != cur_dbpath: Log('DBPath changed') if xbmcvfs.exists(oldDBfile['tv']) and xbmcvfs.exists(oldDBfile['movie']): if not xbmcvfs.exists(cur_dbpath): xbmcvfs.mkdir(cur_dbpath) if not xbmcvfs.exists(DBfile['tv']) or not xbmcvfs.exists(DBfile['movie']): copyDB(oldDBfile, DBfile) writeConfig('old_dbfolder', cur_dbpath) if custdb: org_fileacc = int(xbmcvfs.Stat(orgDBfile['tv']).st_mtime() + xbmcvfs.Stat(orgDBfile['movie']).st_mtime()) cur_fileacc = int(xbmcvfs.Stat(DBfile['tv']).st_mtime() + xbmcvfs.Stat(DBfile['movie']).st_mtime()) if org_fileacc > cur_fileacc: copyDB(orgDBfile, DBfile, True) return DBfile[retvar]
def WriteDomToXmlFile(self, dom, filepath, oldfilestat): root = dom.getroot() self.prettyPrintXML(root) xml = ET.tostring(root, encoding='UTF-8') if not xml: xbmc.log("{0} XML creation failed".format(addon_name), xbmc.LOGDEBUG) return False #if tracing: xbmc.log("{0} created xml is {1}".format(addon_name, str(xml)), xbmc.LOGDEBUG) if oldfilestat: currentfilestat = xbmcvfs.Stat(filepath) # if tracing: xbmc.log("%s %s size read %d now %d" % (addon_name, filepath,currentfilestat.st_size(),oldfilestat.st_size() ),xbmc.LOGDEBUG) if currentfilestat.st_size() != oldfilestat.st_size(): raise self.FileWriteCollision("size changed since read") # if tracing: xbmc.log("%s %s modified time read %d now %d" % (addon_name, filepath,currentfilestat.st_mtime(), oldfilestat.st_mtime() ),xbmc.LOGDEBUG) if currentfilestat.st_mtime() != oldfilestat.st_mtime(): raise self.FileWriteCollision( "modified time changed since read") result = self.writeFile(filepath, xml) readStat = xbmcvfs.Stat(filepath) # if tracing: xbmc.log("%s %s file size %d vs, the XML %d" % (addon_name,filepath,readStat.st_size(),len(xml)),xbmc.LOGDEBUG) if readStat.st_size() != len(xml): raise self.FileWriteCollision( "after-write file size not same as the XML") if result: xbmc.log( "{0} succesfully updated {1}".format(addon_name, filepath), xbmc.LOGDEBUG) return result
def isdir(folder): import utils if folder.endswith('\\') or folder.endswith('/'): folder = folder[:-1] import stat if stat.S_ISDIR(xbmcvfs.Stat(folder).st_mode()): return True import xbmc folder = xbmc.translatePath(folder) if folder.endswith('\\') or folder.endswith('/'): folder = folder[:-1] return stat.S_ISDIR(xbmcvfs.Stat(xbmc.translatePath(folder)).st_mode())
def recordAction(action): log = getActionLogName(False) old_log = getActionLogName(True) addon = xbmcaddon.Addon("service.zomboided.tools") if addon.getSetting("enable_action_log") == "true": try: if xbmcvfs.exists(log): st = xbmcvfs.Stat(log) size = st.st_size() if size > 1024000: # Limit log files to 1MB...this allow for ~10000 entries debugTrace("Action log size is " + str(size) + ", starting new action log") if xbmcvfs.exists(old_log): xbmcvfs.delete(old_log) xbmcvfs.rename(log, old_log) except Exception as e: errorTrace("common.py", "Couldn't manage existing action log file") errorTrace("common.py", str(e)) try: log_file = open(log, 'a+') time = datetime.datetime.fromtimestamp(now()) log_file.write(str(time) + " " + action + "\n") log_file.close() except Exception as e: errorTrace("common.py", "Couldn't record action") errorTrace("common.py", str(e))
def checkCache(self, cache_name, life=3600): if self.cache_location is None: return (False, None) if xbmcvfs.exists(self.cache_location + cache_name): printDebug.debug("CACHE [%s]: exists" % cache_name) now = int(round(time.time(), 0)) modified = int( xbmcvfs.Stat(self.cache_location + cache_name).st_mtime()) printDebug.debug("CACHE [%s]: mod[%s] now[%s] diff[%s]" % (cache_name, modified, now, now - modified)) if (modified < 0) or (now - modified) > life: printDebug.debug("CACHE [%s]: too old, delete" % cache_name) if xbmcvfs.delete(self.cache_location + cache_name): printDebug.debug("CACHE [%s]: deleted" % cache_name) else: printDebug.debug("CACHE [%s]: not deleted" % cache_name) else: printDebug.debug("CACHE [%s]: current" % cache_name) return self.readCache(cache_name) else: printDebug.debug("CACHE [%s]: does not exist" % cache_name) return (False, None)
def isUpdated(self, channelsLastUpdated, programLastUpdate): if channelsLastUpdated is None or not xbmcvfs.exists(self.xmltvFile): return True stat = xbmcvfs.Stat(self.xmltvFile) fileUpdated = datetime.datetime.fromtimestamp(stat.st_mtime()) return fileUpdated > channelsLastUpdated
def _optimize_file_size(self): if self.max_file_size_kb <= 0: return if not xbmcvfs.exists(self.filename): return file_size_kb = (xbmcvfs.Stat(self.filename).st_size() // 1024) if file_size_kb >= self.max_file_size_kb: self._open() result = self._execute( False, 'SELECT COUNT(key) FROM %s' % self.table_name ) item_count = result.fetchone()[0] try: item_count = int(item_count) except ValueError: item_count = 0 crop_count = max(item_count, item_count - 150) result = self._execute( True, 'DELETE FROM %s WHERE key in ' '(SELECT key FROM %s ORDER BY time DESC LIMIT -1 OFFSET %d)' % (self.table_name, self.table_name, crop_count) ) self._close()
def isfile(filename): if not exists(filename): #raise Exception('sfile.isfile error %s does not exists' % filename) return False import stat return stat.S_ISREG(xbmcvfs.Stat(filename).st_mode())
def isdir(folder): if not exists(folder): #raise Exception('sfile.isdir error %s does not exists' % folder) return False import stat return stat.S_ISDIR(xbmcvfs.Stat(folder).st_mode())
def DB_Path_Check(db_path): """ If you need to find out the current "real" database in use then this is the function for you. It will scan for a specific database type (e.g. addons) and return the path to the one which was last updated. This is particularly useful if the system has previously updated to a newer version rather than a fresh install or if they've installed a "build" which contained old databases. CODE: DB_Path_Check(db_path) AVAILABLE VALUES: (*) db_path - This is the string the database starts with. If you want to find the path for the addons*.db you would use "addons" as the value, if you wanted to find the path of the MyVideos*.db you would use "myvideos" etc. - it is not case sensitive. EXAMPLE CODE: dbpath = koding.DB_Path_Check(db_path='addons') dialog.ok('ADDONS DB','The path to the current addons database is:',dbpath) ~""" finalfile = 0 dirs,databasepath = xbmcvfs.listdir(DATABASE) for item in databasepath: if item.lower().endswith('.db') and item.lower().startswith(db_path.lower()): mydb = os.path.join(DATABASE,item) lastmodified = xbmcvfs.Stat(mydb).st_mtime() if lastmodified>finalfile: finalfile = lastmodified gooddb = mydb return Physical_Path(gooddb)
def serve_image(self): if pil_loaded: image_bytes = build_image(self.path) self.send_response(200) self.send_header('Content-type', 'image/jpeg') self.send_header('Content-Length', str(len(image_bytes))) self.end_headers() self.wfile.write(image_bytes) else: image_path = xbmc.translatePath( "special://home/addons/plugin.video.jellycon/icon.png").decode( 'utf-8') self.send_response(200) self.send_header('Content-type', 'image/png') modified = xbmcvfs.Stat(image_path).st_mtime() self.send_header('Last-Modified', "%s" % modified) image = xbmcvfs.File(image_path) size = image.size() self.send_header('Content-Length', str(size)) self.end_headers() self.wfile.write(image.readBytes()) image.close() del image
def _getThemesToUpload(self, id, themes): themeList = [] for theme in themes: maxFileSize = 104857600 if Settings.isVideoFile(theme): # Check if all videos are disabled if not self.isVideoEnabled: continue else: # Check if all audio are disabled if not self.isAudioEnabled: continue # Audio files have a smaller limit maxFileSize = 20971520 # Check to make sure the theme file is not too large, anything over 100 megabytes # is too large for a theme stat = xbmcvfs.Stat(theme) themeFileSize = stat.st_size() if themeFileSize > maxFileSize: log("UploadThemes: Theme %s too large %s" % (theme, themeFileSize)) continue if themeFileSize < 19460: log("UploadThemes: Theme %s too small %s" % (theme, themeFileSize)) continue # If we reach here it is not in either exclude list themeList.append(theme) return themeList
def get_cache(key, ttl=None): """ Get an item from the cache :type key: list[str] :type ttl: int """ import time fullpath = os.path.join(get_cache_path(), '.'.join(key)) if not xbmcvfs.exists(fullpath): return None if ttl and time.mktime( time.localtime()) - xbmcvfs.Stat(fullpath).st_mtime() > ttl: return None fdesc = xbmcvfs.File(fullpath, 'r') import json try: value = json.load(fdesc) fdesc.close() _LOGGER.debug('Fetching %s from cache', fullpath) return value except (ValueError, TypeError): return None
def _setSkinArtistImages(self, artist): common.debug("Collecting images for artist: %s" % str(artist)) images = [] params = {} kontor = 0 params['lang'] = self.__BIOLANGUAGE params['artist'] = artist params['infodir'] = self.dir_cache params['exclusionsfile'] = os.path.join(self.dir_cache, "_exclusions.nfo") for key in self.PROVIDERS.keys(): if self._isPlaybackChanged(): common.debug( "Cancel collecting images due to the change of player content" ) return common.debug('Identifying images by provider: [%s]' % key) params['getall'] = common.setting(key + "_all") params['clientapikey'] = common.setting(key + "_apikey") content = self.PROVIDERS[key].getImageList(params) if content is not None and len(content) > 0: images.extend(content) if self._reload: self._reload = False common.debug( "Cancel collecting images due to the addon configuration update" ) break common.trace("Downloading images for artist [%s]" % str(artist)) _, cachefiles = xbmcvfs.listdir(self.dir_cache) for url in images: if self._isPlaybackChanged(): common.debug( "Cancel downloading images due to the change of player content" ) break common.trace('Checking image URL: %s' % url) cachepath = utilities.ItemHashWithPath( url, self.dir_cache) + utilities.ImageType(url) if os.path.split( cachepath )[1] not in cachefiles and not xbmc.abortRequested and not self._isPlaybackChanged( ): common.trace('Downloading image file: %s' % cachepath) urldata = common.urlcall(url, output='binary', certver=self.__SSLCHECK) success = utilities.WriteFile(urldata, cachepath) if urldata else False if success and xbmcvfs.Stat(cachepath).st_size() < 999: utilities.DeleteFile(cachepath) elif success: kontor += 1 if (kontor % 5 == 0) or (kontor == 1 and len([ f for f in cachefiles if os.path.splitext(f)[1] != ".nfo" ]) == 0): self._setSkinSlideshow(None, self.dir_cache) common.trace("Images setup is done")
def check_date(file): cache_days = 3 st = xbmcvfs.Stat(file) modified = st.st_mtime() current = round(time.time()) t = current - modified if (t / 3600) < 24*cache_days: return True
def getfiledatetime(self, filename): filedatetime = "0000-00-00 00:00:00" try: filedatetime = datetime.datetime.fromtimestamp(xbmcvfs.Stat(filename).st_mtime()) except: pass return filedatetime
def get_mtime(self, filename): """ Returns last modification time. :rtype: int Timestamp """ filepath = os.path.join(self.path, filename) stat = xbmcvfs.Stat(filepath) return stat.st_mtime()
def load(self): if xbmcvfs.exists(self.filename) and xbmcvfs.Stat(self.filename).st_size() > 0: with open(self.filename, 'r') as jsonfile: data = json.load(jsonfile) self._data = data logger.log_debug('JSONStore Load |{filename}|'.format(filename=self.filename.encode("utf-8"))) else: self._data = dict()
def fetchFile(self): retVal = self.FETCH_NOT_NEEDED fetch = False logger.debug( 'Remote File : [%s] LocalFile : [%s]' % (self.fileUrl, self.filePath), __name__) try: if not os.path.exists( self.filePath): # always fetch if file doesn't exist! fetch = True else: self.fileStat = xbmcvfs.Stat(self.fileUrl) remoteModTime = self.fileStat.st_mtime() modTime = os.path.getmtime(self.filePath) logger.debug( 'Mod Time : Remote File [%s] Local File [%s]' % (datetime.datetime.fromtimestamp(remoteModTime), datetime.datetime.fromtimestamp(modTime)), __name__) if (remoteModTime > modTime): fetch = True else: fetch = False if fetch: tmpFile = os.path.join(self.basePath, self.fileName + '_tmp') if self.fileType == self.TYPE_LOCAL_COPY: logger.debug( 'file is in remote location: %s' % self.fileUrl, __name__) if not xbmcvfs.copy(self.fileUrl, tmpFile): logger.error('Remote file couldn\'t be copied: %s' % self.fileUrl) else: f = open(tmpFile, 'wb') logger.debug('file is on the internet: %s' % self.fileUrl, __name__) data = client.request(self.fileUrl) f.write(data) f.close() logger.debug( 'file %s size %s' % (self.fileName, os.path.getsize(tmpFile)), __name__) if os.path.getsize(tmpFile) > 10: if os.path.exists(self.filePath): os.remove(self.filePath) os.rename(tmpFile, self.filePath) retVal = self.FETCH_OK logger.debug('file %s was downloaded' % self.filePath, __name__) else: retVal = self.FETCH_ERROR else: logger.debug('not fetching : %s' % self.fileUrl, __name__) except: import traceback traceback.print_exc() pass return retVal
def get_file_size(filename, is_rar): try: if is_rar: file_size = get_file_size_from_rar(filename) return -1 if file_size == None else file_size else: return xbmcvfs.Stat(filename).st_size() except: return -1
def _get_folder_list(self, path): directory_listing = xbmcvfs.listdir(path) contents = [tools.ensure_path_is_dir(i) for i in directory_listing[0] ] + [i for i in directory_listing[1]] return [{ "name": i[:-1] if i.endswith(("\\", "/")) else i, "path": os.path.join(path, i), "size": xbmcvfs.Stat(os.path.join(path, i)).st_size() } for i in contents]
def file_info(path, silent=False, vfs=True): """ Info de un archivo o carpeta @param path: ruta @type path: str @rtype: str @return: Info de un archivo o carpeta """ path = encode(path) try: if xbmc_vfs and vfs: if not exists(path): return False import datetime stat = xbmcvfs.Stat(path) # Diccionario de permisos y tipos de archivos dic_perm = {'7':'rwx', '6':'rw-', '5':'r-x', '4':'r--', '3':'-wx', '2':'-w-', '1':'--x', '0':'---'} dic_type = {'01':'-', '02':'l', '03':'m', '04':'d'} perm = str(oct(stat.st_mode())) # Convertimos desde Octal los permisos y tipos de archivos if perm.startswith('0o'): perm = perm.replace('o', '') if perm.endswith('L'): perm = perm[:-1] file_type = dic_type.get(perm[:2], '') # Lo pasamos por diccionario de tipos de archivo perm = perm[-3:] perm = ''.join(dic_perm.get(x,x) for x in perm) # Lo pasamos por diccionario de permisos try: # Esta función NO está soportada en todas las plataformas import pwd uid = find_single_match(str(pwd.getpwuid(stat.st_uid())), "pw_name='([^']+)'") if not uid: uid = stat.st_uid() gid = find_single_match(str(pwd.getpwuid(stat.st_gid())), "pw_name='([^']+)'") if not gid: gid = stat.st_gid() except: uid = stat.st_uid() gid = stat.st_gid() try: # Puede haber errores en la fecha mod_time = stat.st_mtime() mod_time = datetime.datetime.fromtimestamp(mod_time).strftime('%Y-%m-%d %H:%M') except: mod_time = '0000-00-00 00:00' # Fecha en caso de error # Construimos la respuesta res = '%s%s %s %s %s %s %s %s' % (file_type, perm, stat.st_nlink(), uid, gid, stat.st_size(), mod_time, path) # Y la pasamos por encode, está en unicode en Py2. En el caso de Windows con Py2 hay que hacer una verificación adicional res = encode(res) if not PY3 and isinstance(res, unicode): res = res.encode("utf-8", "ignore") return res raise except: logger.error("File_Stat no soportado: %s" % path) if not silent: logger.error(traceback.format_exc()) return False
def clean_cache(): """ delete cache items that have expired """ dirs, files = xbmcvfs.listdir(CACHE_DIR) for filename in files: filepath = os.path.join(CACHE_DIR, filename) lastmod = datetime.fromtimestamp(xbmcvfs.Stat(filepath).st_mtime()) if datetime.now() - lastmod > CACHING_DURATION: xbmcvfs.delete(filepath)
def isdir(folder): if folder.endswith('\\') or folder.endswith('/'): folder = folder[:-1] if not exists(folder): #raise Exception('sfile.isdir error %s does not exists' % folder) return False import stat return stat.S_ISDIR(xbmcvfs.Stat(folder).st_mode())
def isfile(path): if not exists(path): return False #raise Exception('sfile.isFile error %s does not exists' % path) try: import stat return stat.S_ISREG(xbmcvfs.Stat(xbmcvfs_path(path)).st_mode()) except (ImportError, NameError): return os.path.isfile(get_path(path))
def _refreshJobs(self): # check if we should read in a new files list stat_file = xbmcvfs.Stat(xbmcvfs.translatePath(self.CRONFILE)) if (stat_file.st_mtime() > self.last_read): utils.log("File update, loading new jobs") # update the file self.jobs = self._readCronFile() self.last_read = time.time()
def GetFolderSize(path): TotalSize = 0.0 dirs, files = xbmcvfs.listdir(path) for dir in dirs: TotalSize = TotalSize + GetFolderSize(os.path.join(path, dir)) for file in files: TotalSize = TotalSize + xbmcvfs.Stat(os.path.join(path, file)).st_size() return TotalSize
def _refreshJobs(self): #check if we should read in a new files list stat_file = xbmcvfs.Stat(xbmc.translatePath(utils.data_dir() + "cron.xml")) if(stat_file.st_mtime() > self.last_read): utils.log("File update, loading new jobs",xbmc.LOGDEBUG) #update the file self.jobs = self._readCronFile(); self.last_read = time.time()
def makeSTRM(filepath, filename, url): addon_log('makeSTRM') name_orig, plugin_url = parseMediaListURL(url) mtime = None filepath = multiRstrip(filepath) filepath = completePath(os.path.join(settings.STRM_LOC, filepath)) if not xbmcvfs.exists(filepath): dirs = filepath.replace(settings.STRM_LOC, '').split('\\') if filepath.find('\\') != -1 else filepath.replace(settings.STRM_LOC, '').split('/') dirs = filter(None, dirs) filepath = settings.STRM_LOC for dir in dirs: filepath = completePath(os.path.join(filepath, dir)) if not xbmcvfs.exists(filepath): xbmcvfs.mkdir(filepath) if not settings.STRM_LOC.startswith('smb:') and not settings.STRM_LOC.startswith('nfs:'): fullpath = '{0}.strm'.format(py2_decode(os.path.normpath(xbmc.translatePath(os.path.join(filepath, filename))))) else: fullpath = '{0}{1}.strm'.format(filepath, filename) # if xbmcvfs.exists(fullpath): # if settings.CLEAR_STRMS == 'true': # x = 0 #xbmcvfs.delete(fullpath) # else: # return fullpath # if fullpath.find('Audio') > 0: # try: # if xbmcvfs.exists(fullpath): # return fullpath, None # except: # if xbmcvfs.exists(fullpath): # return fullpath, None try: fullpath = fullpath fle = xbmcvfs.File(fullpath, 'w') except: fullpath = fullpath fle = xbmcvfs.File(fullpath, 'w') fle.write(bytearray(url, 'utf-8')) fle.close() del fle try: if fullpath.find('Audio') > 0: mtime = xbmcvfs.Stat(fullpath).st_mtime() except OSError: pass return fullpath, mtime
def login(self, user, password, forceLogin=False): """ Logs in to the platform, fetches cookie headers and checks if the login succeeded :param user: Username/E-Mail :type user: string :param password: Password :type password: string :returns: bool -- Login succeeded """ # check if the suer is already logged in if forceLogin is False and path.isfile(self.session_file): file_time = xbmcvfs.Stat(self.session_file).st_mtime() if (time.time() - file_time) / 3600 < 24 and self.get_session( ).cookies.get('displayname'): return True else: self.clear_session() # get contents of login page res = self.get_session().get(self.constants.get_login_link()) for i in [0, 1]: soup = BeautifulSoup(res.text, 'html.parser') # find all <input/> items in the login form & grep their data payload = {} for item in soup.find(id='login').find_all('input'): if item.attrs.get('name') and ( item.attrs.get('name').startswith('xsrf') or item.attrs.get('name') == 'tid'): payload[item.attrs.get('name')] = item.attrs.get( 'value', '') # overwrite user & password fields with our settings data if i == 0: payload['pw_usr'] = user payload['hidden_pwd'] = '' else: payload['hidden_usr'] = user payload['pw_pwd'] = password # persist the session # payload['persist_session'] = 1 # add empyt sumbit field (it is the value of the button in the page...) payload['pw_submit'] = '' # do the login & read the incoming html <title/> # attribute to determine of the login was successfull res = self.get_session().post(self.constants.get_login_endpoint(), data=payload) success = self._session.cookies.get_dict().get('displayname') if success: self.save_session() return True return False