def Init(self, reset, convert): self.logger.info( 'Using SQLite version {}, python library sqlite3 version {}', sqlite3.sqlite_version, sqlite3.version) if not mvutils.dir_exists(self.settings.datapath): os.mkdir(self.settings.datapath) # remove old versions mvutils.file_remove( os.path.join(self.settings.datapath, 'filmliste-v1.db')) if reset == True or not mvutils.file_exists(self.dbfile): self.logger.info( '===== RESET: Database will be deleted and regenerated =====') mvutils.file_remove(self.dbfile) self.conn = sqlite3.connect(self.dbfile, timeout=60) self._handle_database_initialization() else: try: self.conn = sqlite3.connect(self.dbfile, timeout=60) except sqlite3.DatabaseError as err: self.logger.error( 'Error while opening database: {}. trying to fully reset the Database...', err) return self.Init(reset=True, convert=convert) self.conn.execute( 'pragma journal_mode=off') # 3x speed-up, check mode 'WAL' self.conn.execute('pragma synchronous=off' ) # that is a bit dangerous :-) but faaaast self.conn.create_function('UNIX_TIMESTAMP', 0, UNIX_TIMESTAMP) self.conn.create_aggregate('GROUP_CONCAT', 1, GROUP_CONCAT) return True
def _decompress_gz(self, sourcefile, destfile): blocksize = 8192 # pylint: disable=broad-except try: with open(destfile, 'wb') as dstfile, gzip.open(sourcefile) as srcfile: for data in iter(lambda: srcfile.read(blocksize), b''): dstfile.write(data) except Exception as err: self.logger.error('gz decompression of "{}" to "{}" failed: {}', sourcefile, destfile, err) if mvutils.find_gzip() is not None: gzip_binary = mvutils.find_gzip() self.logger.debug( 'Trying to decompress gzip file "{}" using {}...', sourcefile, gzip_binary) try: mvutils.file_remove(destfile) retval = subprocess.call([gzip_binary, '-d', sourcefile]) self.logger.debug('Calling {} -d {} returned {}', gzip_binary, sourcefile, retval) return retval except Exception as err: self.logger.error( 'gz commandline decompression of "{}" to "{}" failed: {}', sourcefile, destfile, err) raise return 0
def delete_list(self, full): """ Deletes locally stored database update files Args: full(bool): Deletes the full lists if `True` """ (_, compfile, destfile, _) = self._get_update_info(full) self.logger.info('Cleaning up downloads...') mvutils.file_remove(compfile) mvutils.file_remove(destfile)
def init(self, reset=False, convert=False): """ Startup of the database system Args: reset(bool, optional): if `True` the database will be cleaned up and recreated. Default is `False` convert(bool, optional): if `True` the database will be converted in case it is older than the supported version. If `False` a UI message will be displayed to the user informing that the database will be converted. Default is `False` """ self.logger.info( 'Using SQLite version {}, python library sqlite3 version {}', sqlite3.sqlite_version, sqlite3.version) if not mvutils.dir_exists(self.settings.datapath): os.mkdir(self.settings.datapath) # remove old versions mvutils.file_remove( os.path.join(self.settings.datapath, 'filmliste-v1.db')) if reset is True or not mvutils.file_exists(self.dbfile): self.logger.info( '===== RESET: Database will be deleted and regenerated =====') self.exit() mvutils.file_remove(self.dbfile) if self._handle_update_substitution(): self.conn = sqlite3.connect(self.dbfile, timeout=60) else: self.conn = sqlite3.connect(self.dbfile, timeout=60) self._handle_database_initialization() else: self._handle_update_substitution() try: self.conn = sqlite3.connect(self.dbfile, timeout=60) except sqlite3.DatabaseError as err: self.logger.error( 'Error while opening database: {}. trying to fully reset the Database...', err) return self.init(reset=True, convert=convert) # 3x speed-up, check mode 'WAL' self.conn.execute('pragma journal_mode=off') # that is a bit dangerous :-) but faaaast self.conn.execute('pragma synchronous=off') self.conn.create_function('UNIX_TIMESTAMP', 0, get_unix_timestamp) self.conn.create_aggregate('GROUP_CONCAT', 1, GroupConcatClass) return True
def load_cache(self, reqtype, condition): start = time.time() if not self.settings.getCaching(): self.logger.debug('loading cache is disabled') return None # filename = os.path.join(self.settings.getDatapath(), reqtype + '.cache') if not mvutils.file_exists(filename): self.logger.debug('no cache file request "{}" and condition "{}"', reqtype, condition) return None # dbLastUpdate = self.settings.getLastUpdate() try: with closing(open(filename, encoding='utf-8')) as json_file: data = json.load(json_file) if isinstance(data, dict): if data.get('type', '') != reqtype: self.logger.debug( 'no matching cache for type {} vs {}', data.get('type', ''), reqtype) return None if data.get('condition', '') != condition: self.logger.debug( 'no matching cache for condition {} vs {}', data.get('condition', ''), condition) return None if int(dbLastUpdate) != data.get('time', 0): self.logger.debug('outdated cache') return None data = data.get('data', []) if isinstance(data, list): self.logger.debug( 'return cache after {} sec for request "{}" and condition "{}"', (time.time() - start), reqtype, condition) return data # pylint: disable=broad-except except Exception as err: self.logger.error('Failed to load cache file {}: {}', filename, err) mvutils.file_remove(filename) raise self.logger.debug('no cache found') return None
def removeDownloads(self): mvutils.file_remove(self._compressedFilename) mvutils.file_remove(self._filename)
def _download(self, url, compressedFilename, targetFilename): # cleanup downloads start = time.time() self.logger.debug('Cleaning up old downloads...') mvutils.file_remove(compressedFilename) mvutils.file_remove(targetFilename) # # download filmliste self.notifier.show_download_progress() # pylint: disable=broad-except try: self.logger.debug('Trying to download {} from {}...', os.path.basename(compressedFilename), url) self.notifier.update_download_progress(0, url) mvutils.url_retrieve( url, filename=compressedFilename, reporthook=self.notifier.hook_download_progress, aborthook=self.monitor.abort_requested) self.logger.debug('downloaded {} in {} sec', compressedFilename, (time.time() - start)) except URLError as err: self.logger.error('Failure downloading {} - {}', url, err) self.notifier.close_download_progress() self.notifier.show_download_error(url, err) raise except ExitRequested as err: self.logger.error( 'Immediate exit requested. Aborting download of {}', url) self.notifier.close_download_progress() self.notifier.show_download_error(url, err) raise except Exception as err: self.logger.error('Failure writing {}', url) self.notifier.close_download_progress() self.notifier.show_download_error(url, err) raise # decompress filmliste start = time.time() try: if self.use_xz is True: self.logger.debug('Trying to decompress xz file...') retval = subprocess.call( [mvutils.find_xz(), '-d', compressedFilename]) self.logger.debug('decompress xz {} in {} sec', retval, (time.time() - start)) elif UPD_CAN_BZ2 is True: self.logger.debug('Trying to decompress bz2 file...') retval = self._decompress_bz2(compressedFilename, targetFilename) self.logger.debug('decompress bz2 {} in {} sec', retval, (time.time() - start)) elif UPD_CAN_GZ is True: self.logger.debug('Trying to decompress gz file...') retval = self._decompress_gz(compressedFilename, targetFilename) self.logger.debug('decompress gz {} in {} sec', retval, (time.time() - start)) else: # should never reach pass except Exception as err: self.logger.error('Failure decompress {}', err) self.notifier.close_download_progress() self.notifier.show_download_error('decompress failed', err) raise self.notifier.close_download_progress() return retval == 0 and mvutils.file_exists(targetFilename)
def get_newest_list(self, full): """ Downloads the database update file Args: full(bool): Downloads the full list if `True` """ (url, compfile, destfile, _) = self._get_update_info(full) if url is None: self.logger.error( 'No suitable archive extractor available for this system') self.notifier.show_missing_extractor_error() return False # cleanup downloads self.logger.info('Cleaning up old downloads...') mvutils.file_remove(compfile) mvutils.file_remove(destfile) # download filmliste self.notifier.show_download_progress() # pylint: disable=broad-except try: self.logger.info('Trying to download {} from {}...', os.path.basename(compfile), url) self.notifier.update_download_progress(0, url) mvutils.url_retrieve( url, filename=compfile, reporthook=self.notifier.hook_download_progress, aborthook=self.monitor.abort_requested ) except URLError as err: self.logger.error('Failure downloading {} - {}', url, err) self.notifier.close_download_progress() self.notifier.show_download_error(url, err) return False except ExitRequested as err: self.logger.error( 'Immediate exit requested. Aborting download of {}', url) self.notifier.close_download_progress() self.notifier.show_download_error(url, err) return False except Exception as err: self.logger.error('Failure writing {}', url) self.notifier.close_download_progress() self.notifier.show_download_error(url, err) return False # decompress filmliste if self.use_xz is True: self.logger.info('Trying to decompress xz file...') retval = subprocess.call([mvutils.find_xz(), '-d', compfile]) self.logger.info('Return {}', retval) elif UPD_CAN_BZ2 is True: self.logger.info('Trying to decompress bz2 file...') retval = self._decompress_bz2(compfile, destfile) self.logger.info('Return {}', retval) elif UPD_CAN_GZ is True: self.logger.info('Trying to decompress gz file...') retval = self._decompress_gz(compfile, destfile) self.logger.info('Return {}', retval) else: # should never reach pass self.notifier.close_download_progress() return retval == 0 and mvutils.file_exists(destfile)
def reset(self): mvutils.file_remove(self.dbfile) # last version mvutils.file_remove( os.path.join(self.settings.getDatapath(), 'filmliste-v2.db')) self.conn = None
def DeleteList(self, full): (_, compfile, destfile, _) = self._get_update_info(full) self.logger.info('Cleaning up downloads...') mvutils.file_remove(compfile) mvutils.file_remove(destfile)
def GetNewestList(self, full): (url, compfile, destfile, _) = self._get_update_info(full) if url is None: self.logger.error( 'No suitable archive extractor available for this system') self.notifier.ShowMissingExtractorError() return False # get mirrorlist self.logger.info('Opening {}', url) try: data = urllib2.urlopen(url).read() except urllib2.URLError as err: self.logger.error('Failure opening {}', url) self.notifier.ShowDownloadError(url, err) return False root = etree.fromstring(data) urls = [] for server in root.findall('Server'): try: URL = server.find('URL').text Prio = server.find('Prio').text urls.append((self._get_update_url(URL), float(Prio) + random.random() * 1.2)) self.logger.info('Found mirror {} (Priority {})', URL, Prio) except AttributeError: pass urls = sorted(urls, key=itemgetter(1)) urls = [url[0] for url in urls] # cleanup downloads self.logger.info('Cleaning up old downloads...') mvutils.file_remove(compfile) mvutils.file_remove(destfile) # download filmliste self.notifier.ShowDownloadProgress() lasturl = '' for url in urls: try: lasturl = url self.logger.info('Trying to download {} from {}...', os.path.basename(compfile), url) self.notifier.UpdateDownloadProgress(0, url) mvutils.url_retrieve( url, filename=compfile, reporthook=self.notifier.HookDownloadProgress, aborthook=self.monitor.abortRequested) break except urllib2.URLError as err: self.logger.error('Failure downloading {}', url) self.notifier.CloseDownloadProgress() self.notifier.ShowDownloadError(lasturl, err) return False except ExitRequested as err: self.logger.error( 'Immediate exit requested. Aborting download of {}', url) self.notifier.CloseDownloadProgress() self.notifier.ShowDownloadError(lasturl, err) return False except Exception as err: self.logger.error('Failure writng {}', url) self.notifier.CloseDownloadProgress() self.notifier.ShowDownloadError(lasturl, err) return False # decompress filmliste if self.use_xz is True: self.logger.info('Trying to decompress xz file...') retval = subprocess.call([mvutils.find_xz(), '-d', compfile]) self.logger.info('Return {}', retval) elif upd_can_bz2 is True: self.logger.info('Trying to decompress bz2 file...') retval = self._decompress_bz2(compfile, destfile) self.logger.info('Return {}', retval) elif upd_can_gz is True: self.logger.info('Trying to decompress gz file...') retval = self._decompress_gz(compfile, destfile) self.logger.info('Return {}', retval) else: # should nebver reach pass self.notifier.CloseDownloadProgress() return retval == 0 and mvutils.file_exists(destfile)
def init(self, reset=False, convert=False, failedCount=0): """ Startup of the database system Args: reset(bool, optional): if `True` the database will be cleaned up and recreated. Default is `False` convert(bool, optional): if `True` the database will be converted in case it is older than the supported version. If `False` a UI message will be displayed to the user informing that the database will be converted. Default is `False` """ self.logger.info( 'Using SQLite version {}, python library sqlite3 version {}', sqlite3.sqlite_version, sqlite3.version) if not mvutils.dir_exists(self.settings.datapath): os.mkdir(self.settings.datapath) # remove old versions mvutils.file_remove( os.path.join(self.settings.datapath, 'filmliste-v1.db')) if reset is True or not mvutils.file_exists(self.dbfile): self.logger.info( '===== RESET: Database will be deleted and regenerated =====') self.exit() mvutils.file_remove(self.dbfile) if self._handle_update_substitution(): self.conn = sqlite3.connect(self.dbfile, timeout=60) else: self.conn = sqlite3.connect(self.dbfile, timeout=60) self._handle_database_initialization() else: try: if self._handle_update_substitution(): self._handle_not_update_to_date_dbfile() self.conn = sqlite3.connect(self.dbfile, timeout=60) except sqlite3.DatabaseError as err: self.logger.error( 'Error while opening database: {}. trying to fully reset the Database...', err) return self.init(reset=True, convert=convert) try: # 3x speed-up, check mode 'WAL' self.conn.execute('pragma journal_mode=off') # check if DB is ready or broken cursor = self.conn.cursor() cursor.execute('SELECT * FROM `status` LIMIT 1') rs = cursor.fetchall() ## self.logger.info('Current DB Status Last modified {} ({})', time.ctime(rs[0][0]), rs[0][0]) self.logger.info('Current DB Status Last lastupdate {} ({})', time.ctime(rs[0][2]), rs[0][2]) self.logger.info('Current DB Status Last filmupdate {} ({})', time.ctime(rs[0][3]), rs[0][3]) self.logger.info('Current DB Status Last fullupdate {}', rs[0][4]) ## cursor.close() except sqlite3.DatabaseError as err: failedCount += 1 if (failedCount > 3): self.logger.error( 'Failed to restore database, please uninstall plugin, delete user profile and reinstall' ) raise err self.logger.error( 'Error on first query: {}. trying to fully reset the Database...trying {} times', err, failedCount) return self.init(reset=True, convert=convert, failedCount=failedCount) # that is a bit dangerous :-) but faaaast self.conn.execute('pragma synchronous=off') self.conn.create_function('UNIX_TIMESTAMP', 0, get_unix_timestamp) self.conn.create_aggregate('GROUP_CONCAT', 1, GroupConcatClass) return True