def migrateExtendedSearch(self):
     import resources.lib.mvutils as mvutils
     oldExtSearchFilename = os.path.join(
         appContext.MVSETTINGS.getDatapath(), 'recent_ext_searches.json')
     self.logger.debug("migrateExtendedSearch {}",
                       mvutils.file_exists(oldExtSearchFilename))
     if mvutils.file_exists(oldExtSearchFilename):
         oldData = mvutils.loadJsonFile(oldExtSearchFilename)
         self.logger.debug("Found legacy ext search entries to be migrated")
         if (oldData != None):
             newData = []
             lid = int(time.time())
             for entry in oldData:
                 esm = ExtendedSearchModel.ExtendedSearchModel(
                     entry.get('search'))
                 esm.setId(lid)
                 lid += 1
                 esm.setTitle(entry.get('search'))
                 esm.setDescription(entry.get('search'))
                 esm.setWhen(int(entry.get('when')))
                 newData.append(esm.toDict())
             #
             newExtSearchFilename = os.path.join(
                 appContext.MVSETTINGS.getDatapath(), 'searchConfig.json')
             mvutils.saveJsonFile(newExtSearchFilename, newData)
             self.logger.debug(
                 "Migrated {} legacy ext search entries to new format",
                 len(newData))
         #
         oldExtSearchFilenameBackup = os.path.join(
             appContext.MVSETTINGS.getDatapath(),
             'recent_ext_searches.json.bk')
         mvutils.file_rename(oldExtSearchFilename,
                             oldExtSearchFilenameBackup)
示例#2
0
    def Init(self, reset, convert):
        self.logger.info(
            'Using SQLite version {}, python library sqlite3 version {}',
            sqlite3.sqlite_version, sqlite3.version)
        if not mvutils.dir_exists(self.settings.datapath):
            os.mkdir(self.settings.datapath)

        # remove old versions
        mvutils.file_remove(
            os.path.join(self.settings.datapath, 'filmliste-v1.db'))

        if reset == True or not mvutils.file_exists(self.dbfile):
            self.logger.info(
                '===== RESET: Database will be deleted and regenerated =====')
            mvutils.file_remove(self.dbfile)
            self.conn = sqlite3.connect(self.dbfile, timeout=60)
            self._handle_database_initialization()
        else:
            try:
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
            except sqlite3.DatabaseError as err:
                self.logger.error(
                    'Error while opening database: {}. trying to fully reset the Database...',
                    err)
                return self.Init(reset=True, convert=convert)

        self.conn.execute(
            'pragma journal_mode=off')  # 3x speed-up, check mode 'WAL'
        self.conn.execute('pragma synchronous=off'
                          )  # that is a bit dangerous :-) but faaaast
        self.conn.create_function('UNIX_TIMESTAMP', 0, UNIX_TIMESTAMP)
        self.conn.create_aggregate('GROUP_CONCAT', 1, GROUP_CONCAT)
        return True
示例#3
0
 def _handle_update_substitution(self):
     updfile = os.path.join(self.settings.datapath, DATABASE_AKT)
     sqlfile = os.path.join(self.settings.datapath, 'filmliste-v2.db')
     if mvutils.file_exists(updfile):
         self.logger.info('Native update file found. Updating database...')
         return mvutils.file_rename(updfile, sqlfile)
     return False
示例#4
0
	def _file_remove( self, name ):
		if mvutils.file_exists( name ):
			try:
				os.remove( name )
				return True
			except OSError as err:
				self.logger.error( 'Failed to remove {}: error {}', name, err )
		return False
示例#5
0
 def _file_remove(self, name):
     if mvutils.file_exists(name):
         try:
             os.remove(name)
             return True
         except OSError as err:
             self.logger.error('Failed to remove {}: error {}', name, err)
     return False
示例#6
0
    def init(self, reset=False, convert=False):
        """
        Startup of the database system

        Args:
            reset(bool, optional): if `True` the database
                will be cleaned up and recreated. Default
                is `False`

            convert(bool, optional): if `True` the database
                will be converted in case it is older than
                the supported version. If `False` a UI message
                will be displayed to the user informing that
                the database will be converted. Default is
                `False`
        """
        self.logger.info(
            'Using SQLite version {}, python library sqlite3 version {}',
            sqlite3.sqlite_version, sqlite3.version)
        if not mvutils.dir_exists(self.settings.datapath):
            os.mkdir(self.settings.datapath)

        # remove old versions
        mvutils.file_remove(
            os.path.join(self.settings.datapath, 'filmliste-v1.db'))

        if reset is True or not mvutils.file_exists(self.dbfile):
            self.logger.info(
                '===== RESET: Database will be deleted and regenerated =====')
            self.exit()
            mvutils.file_remove(self.dbfile)
            if self._handle_update_substitution():
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
            else:
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
                self._handle_database_initialization()
        else:
            self._handle_update_substitution()
            try:
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
            except sqlite3.DatabaseError as err:
                self.logger.error(
                    'Error while opening database: {}. trying to fully reset the Database...',
                    err)
                return self.init(reset=True, convert=convert)

        # 3x speed-up, check mode 'WAL'
        self.conn.execute('pragma journal_mode=off')
        # that is a bit dangerous :-) but faaaast
        self.conn.execute('pragma synchronous=off')
        self.conn.create_function('UNIX_TIMESTAMP', 0, get_unix_timestamp)
        self.conn.create_aggregate('GROUP_CONCAT', 1, GroupConcatClass)
        return True
 def getConnection(self):
     if self.conn is None:
         if (not mvutils.file_exists(self.dbfile)):
             self.settings.setDatabaseStatus('UNINIT')
             self.logger.debug('Missing StoreSQLite DBFile: {}',
                               self.dbfile)
         self.conn = sqlite3.connect(self.dbfile, timeout=60)
         self.conn.execute('pragma synchronous=off')
         self.conn.execute('pragma journal_mode=off')
         self.conn.execute('pragma page_size=16384')
         self.conn.execute('pragma encoding="UTF-8"')
     return self.conn
示例#8
0
 def getConnection(self):
     if self.conn is None:
         if (not mvutils.file_exists(self.dbfile)):
             self.settings.setDatabaseStatus('UNINIT')
             self.logger.debug('Missing StoreSQLite DBFile: {}',
                               self.dbfile)
         self.conn = sqlite3.connect(self.dbfile, timeout=60)
         self.conn.execute('pragma synchronous=off')
         self.conn.execute('pragma journal_mode=off')
         self.conn.execute('pragma page_size=65536')
         self.conn.execute('pragma encoding="UTF-8"')
         self.conn.create_function('UNIX_TIMESTAMP', 0, get_unix_timestamp)
         self.conn.create_aggregate('GROUP_CONCAT', 1, GroupConcatClass)
     return self.conn
 def load_cache(self, reqtype, condition):
     start = time.time()
     if not self.settings.getCaching():
         self.logger.debug('loading cache is disabled')
         return None
     #
     filename = os.path.join(self.settings.getDatapath(),
                             reqtype + '.cache')
     if not mvutils.file_exists(filename):
         self.logger.debug('no cache file request "{}" and condition "{}"',
                           reqtype, condition)
         return None
     #
     dbLastUpdate = self.settings.getLastUpdate()
     try:
         with closing(open(filename, encoding='utf-8')) as json_file:
             data = json.load(json_file)
             if isinstance(data, dict):
                 if data.get('type', '') != reqtype:
                     self.logger.debug(
                         'no matching cache for type {} vs {}',
                         data.get('type', ''), reqtype)
                     return None
                 if data.get('condition', '') != condition:
                     self.logger.debug(
                         'no matching cache for condition {} vs {}',
                         data.get('condition', ''), condition)
                     return None
                 if int(dbLastUpdate) != data.get('time', 0):
                     self.logger.debug('outdated cache')
                     return None
                 data = data.get('data', [])
                 if isinstance(data, list):
                     self.logger.debug(
                         'return cache after {} sec for request "{}" and condition "{}"',
                         (time.time() - start), reqtype, condition)
                     return data
     # pylint: disable=broad-except
     except Exception as err:
         self.logger.error('Failed to load cache file {}: {}', filename,
                           err)
         mvutils.file_remove(filename)
         raise
     self.logger.debug('no cache found')
     return None
示例#10
0
	def Init( self, reset = False ):
		self.logger.info( 'Using SQLite version {}, python library sqlite3 version {}', sqlite3.sqlite_version, sqlite3.version )
		if not mvutils.dir_exists( self.settings.datapath ):
			os.mkdir( self.settings.datapath )
		if reset == True or not mvutils.file_exists( self.dbfile ):
			self.logger.info( '===== RESET: Database will be deleted and regenerated =====' )
			self._file_remove( self.dbfile )
			self.conn = sqlite3.connect( self.dbfile, timeout = 60 )
			self._handle_database_initialization()
		else:
			try:
				self.conn = sqlite3.connect( self.dbfile, timeout = 60 )
			except sqlite3.DatabaseError as err:
				self.logger.error( 'Error while opening database: {}. trying to fully reset the Database...', err )
				self.Init( reset = True )

		self.conn.execute( 'pragma journal_mode=off' )	# 3x speed-up, check mode 'WAL'
		self.conn.execute( 'pragma synchronous=off' )	# that is a bit dangerous :-) but faaaast

		self.conn.create_function( 'UNIX_TIMESTAMP', 0, UNIX_TIMESTAMP )
		self.conn.create_aggregate( 'GROUP_CONCAT', 1, GROUP_CONCAT )
示例#11
0
    def import_database(self, full):
        """
        Performs a database update when a
        downloaded update file is available

        Args:
            full(bool): Perform full update if `True`
        """
        (_, _, destfile, avgrecsize) = self._get_update_info(full)
        if not mvutils.file_exists(destfile):
            self.logger.error('File {} does not exists', destfile)
            return False
        # estimate number of records in update file
        fileSizeInByte = mvutils.file_size(destfile)
        records = int(fileSizeInByte / avgrecsize)
        self.logger.info( 'Starting import of {} records from {}', records, destfile )
        if not self.database.ft_init():
            self.logger.warn(
                'Failed to initialize update. Maybe a concurrency problem?')
            return False
        
        # pylint: disable=broad-except
        try:
            starttime = time.time()
            flsm = 0
            flts = 0
            ####
            flsm = 0
            sender = ""
            thema = ""
            self.notifier.show_update_progress()
            (self.tot_chn, self.tot_shw, self.tot_mov) = self._update_start(full)
            
            
            ufp = UpdateFileParser.UpdateFileParser(self.logger, 512000, destfile)
            ufp.init()
            fileHeader = ufp.next(',"X":');
            ### META
            ## {"Filmliste":["30.08.2020, 11:13","30.08.2020, 09:13","3","MSearch [Vers.: 3.1.139]","d93c9794acaf3e482d42c24e513f78a8"],"Filmliste":["Sender","Thema","Titel","Datum","Zeit","Dauer","Größe [MB]","Beschreibung","Url","Website","Url Untertitel","Url RTMP","Url Klein","Url RTMP Klein","Url HD","Url RTMP HD","DatumL","Url History","Geo","neu"]
            # this is the timestamp of this database update
            #value = jsonDoc['Filmliste'][0]
            value = fileHeader[15:32]
            #self.logger.info( 'update date ' + value )
            try:
                fldt = datetime.datetime.strptime(
                    value.strip(), "%d.%m.%Y, %H:%M")
                flts = int(time.mktime(fldt.timetuple()))
                self.database.update_status(filmupdate=flts)
                self.logger.info(
                    'Filmliste dated {}', value.strip())
            except TypeError:
                # pylint: disable=line-too-long
                # SEE: https://forum.kodi.tv/showthread.php?tid=112916&pid=1214507#pid1214507
                # Wonderful. His name is also Leopold
                try:
                    flts = int(time.mktime(time.strptime(
                        value.strip(), "%d.%m.%Y, %H:%M")))
                    self.database.update_status(
                        filmupdate=flts)
                    self.logger.info(
                        'Filmliste dated {}', value.strip())
                    # pylint: disable=broad-except
                except Exception as err:
                    # If the universe hates us...
                    self.logger.debug(
                        'Could not determine date "{}" of filmliste: {}', value.strip(), err)
            except ValueError as err:
                pass            

            ###
            while (True):
                aPart = ufp.next(',"X":');
                if (len(aPart) == 0):
                    break;
                ##
                aPart = '{"X":' + aPart;
                if (not(aPart.endswith("}"))):
                    aPart = aPart + "}";
                ##
                jsonDoc = json.loads(aPart)
                jsonDoc = jsonDoc['X']
                self._init_record()
                # behaviour of the update list
                if (len(jsonDoc[0]) > 0):
                    sender = jsonDoc[0]
                else:
                    jsonDoc[0] = sender
                # same for thema
                if (len(jsonDoc[1]) > 0):
                    thema = jsonDoc[1]
                else:
                    jsonDoc[1] = thema
                ##
                self._add_value( jsonDoc )
                self._end_record(records)
                if self.count % 100 == 0 and self.monitor.abort_requested():
                    # kodi is shutting down. Close all
                    self._update_end(full, 'ABORTED')
                    self.notifier.close_update_progress()
                    return True                

            ufp.close()        
            self._update_end(full, 'IDLE')
            self.logger.info('{} records processed',self.count)
            self.logger.info(
                'Import of {} in update cycle {} finished. Duration: {} seconds',
                destfile,
                self.cycle,
                int(time.time() - starttime)
            )
            self.notifier.close_update_progress()
            return True
        except KeyboardInterrupt:
            self._update_end(full, 'ABORTED')
            self.logger.info('Update cycle {} interrupted by user', self.cycle)
            self.notifier.close_update_progress()
            return False
        except DatabaseCorrupted as err:
            self.logger.error('{} on update cycle {}', err, self.cycle)
            self.notifier.close_update_progress()
        except DatabaseLost as err:
            self.logger.error('{} on update cycle {}', err, self.cycle)
            self.notifier.close_update_progress()
        except Exception as err:
            self.logger.error(
                'Error {} while processing {} on update cycle {}', err, destfile, self.cycle)
            self._update_end(full, 'ABORTED')
            self.notifier.close_update_progress()
        return False
示例#12
0
	def Import( self, full ):
		( _, _, destfile, avgrecsize ) = self._get_update_info( full )
		if not mvutils.file_exists( destfile ):
			self.logger.error( 'File {} does not exists', destfile )
			return False
		# estimate number of records in update file
		records = int( mvutils.file_size( destfile ) / avgrecsize )
		if not self.db.ftInit():
			self.logger.warn( 'Failed to initialize update. Maybe a concurrency problem?' )
			return False
		try:
			self.logger.info( 'Starting import of approx. {} records from {}', records, destfile )
			with open( destfile, 'r' ) as file:
				parser = ijson.parse( file )
				flsm = 0
				flts = 0
				( self.tot_chn, self.tot_shw, self.tot_mov ) = self._update_start( full )
				self.notifier.ShowUpdateProgress()
				for prefix, event, value in parser:
					if ( prefix, event ) == ( "X", "start_array" ):
						self._init_record()
					elif ( prefix, event ) == ( "X", "end_array" ):
						self._end_record( records )
						if self.count % 100 == 0 and self.monitor.abortRequested():
							# kodi is shutting down. Close all
							self._update_end( full, 'ABORTED' )
							self.notifier.CloseUpdateProgress()
							return True
					elif ( prefix, event ) == ( "X.item", "string" ):
						if value is not None:
	#						self._add_value( value.strip().encode('utf-8') )
							self._add_value( value.strip() )
						else:
							self._add_value( "" )
					elif ( prefix, event ) == ( "Filmliste", "start_array" ):
						flsm += 1
					elif ( prefix, event ) == ( "Filmliste.item", "string" ):
						flsm += 1
						if flsm == 2 and value is not None:
							# this is the timestmap of this database update
							try:
								fldt = datetime.datetime.strptime( value.strip(), "%d.%m.%Y, %H:%M" )
								flts = int( time.mktime( fldt.timetuple() ) )
								self.db.UpdateStatus( filmupdate = flts )
								self.logger.info( 'Filmliste dated {}', value.strip() )
							except TypeError:
								# SEE: https://forum.kodi.tv/showthread.php?tid=112916&pid=1214507#pid1214507
								# Wonderful. His name is also Leopold
								try:
									flts = int( time.mktime( time.strptime( value.strip(), "%d.%m.%Y, %H:%M" ) ) )
									self.db.UpdateStatus( filmupdate = flts )
									self.logger.info( 'Filmliste dated {}', value.strip() )
								except Exception as err:
									# If the universe hates us...
									self.logger.debug( 'Could not determine date "{}" of filmliste: {}', value.strip(), err )
							except ValueError as err:
								pass

			self._update_end( full, 'IDLE' )
			self.logger.info( 'Import of {} finished', destfile )
			self.notifier.CloseUpdateProgress()
			return True
		except KeyboardInterrupt:
			self._update_end( full, 'ABORTED' )
			self.logger.info( 'Interrupted by user' )
			self.notifier.CloseUpdateProgress()
			return True
		except DatabaseCorrupted as err:
			self.logger.error( '{}', err )
			self.notifier.CloseUpdateProgress()
		except DatabaseLost as err:
			self.logger.error( '{}', err )
			self.notifier.CloseUpdateProgress()
		except Exception as err:
			self.logger.error( 'Error {} wile processing {}', err, destfile )
			self._update_end( full, 'ABORTED' )
			self.notifier.CloseUpdateProgress()
		return False
示例#13
0
	def GetNewestList( self, full ):
		( url, compfile, destfile, _ ) = self._get_update_info( full )
		if url is None:
			self.logger.error( 'No suitable archive extractor available for this system' )
			self.notifier.ShowMissingExtractorError()
			return False

		# get mirrorlist
		self.logger.info( 'Opening {}', url )
		try:
			data = urllib2.urlopen( url ).read()
		except urllib2.URLError as err:
			self.logger.error( 'Failure opening {}', url )
			self.notifier.ShowDownloadError( url, err )
			return False
		root = etree.fromstring ( data )
		urls = []
		for server in root.findall( 'Server' ):
			try:
				URL = server.find( 'URL' ).text
				Prio = server.find( 'Prio' ).text
				urls.append( ( self._get_update_url( URL ), float( Prio ) + random.random() * 1.2 ) )
				self.logger.info( 'Found mirror {} (Priority {})', URL, Prio )
			except AttributeError:
				pass
		urls = sorted( urls, key = itemgetter( 1 ) )
		urls = [ url[0] for url in urls ]

		# cleanup downloads
		self.logger.info( 'Cleaning up old downloads...' )
		self._file_remove( compfile )
		self._file_remove( destfile )

		# download filmliste
		self.notifier.ShowDownloadProgress()
		lasturl = ''
		for url in urls:
			try:
				lasturl = url
				self.logger.info( 'Trying to download {} from {}...', os.path.basename( compfile ), url )
				self.notifier.UpdateDownloadProgress( 0, url )
				mvutils.url_retrieve( url, filename = compfile, reporthook = self.notifier.HookDownloadProgress, aborthook = self.monitor.abortRequested )
				break
			except urllib2.URLError as err:
				self.logger.error( 'Failure downloading {}', url )
				self.notifier.CloseDownloadProgress()
				self.notifier.ShowDownloadError( lasturl, err )
				return False
			except ExitRequested as err:
				self.logger.error( 'Immediate exit requested. Aborting download of {}', url )
				self.notifier.CloseDownloadProgress()
				self.notifier.ShowDownloadError( lasturl, err )
				return False
			except Exception as err:
				self.logger.error( 'Failure writng {}', url )
				self.notifier.CloseDownloadProgress()
				self.notifier.ShowDownloadError( lasturl, err )
				return False

		# decompress filmliste
		if self.use_xz is True:
			self.logger.info( 'Trying to decompress xz file...' )
			retval = subprocess.call( [ mvutils.find_xz(), '-d', compfile ] )
			self.logger.info( 'Return {}', retval )
		elif upd_can_bz2 is True:
			self.logger.info( 'Trying to decompress bz2 file...' )
			retval = self._decompress_bz2( compfile, destfile )
			self.logger.info( 'Return {}', retval )
		elif upd_can_gz is True:
			self.logger.info( 'Trying to decompress gz file...' )
			retval = self._decompress_gz( compfile, destfile )
			self.logger.info( 'Return {}', retval )
		else:
			# should nebver reach
			pass

		self.notifier.CloseDownloadProgress()
		return retval == 0 and mvutils.file_exists( destfile )
    def get_newest_list(self, full):
        """
        Downloads the database update file

        Args:
            full(bool): Downloads the full list if `True`
        """
        (url, compfile, destfile, _) = self._get_update_info(full)
        if url is None:
            self.logger.error(
                'No suitable archive extractor available for this system')
            self.notifier.show_missing_extractor_error()
            return False

        # cleanup downloads
        self.logger.info('Cleaning up old downloads...')
        mvutils.file_remove(compfile)
        mvutils.file_remove(destfile)

        # download filmliste
        self.notifier.show_download_progress()

        # pylint: disable=broad-except
        try:
            self.logger.info('Trying to download {} from {}...',
                             os.path.basename(compfile), url)
            self.notifier.update_download_progress(0, url)
            mvutils.url_retrieve(
                url,
                filename=compfile,
                reporthook=self.notifier.hook_download_progress,
                aborthook=self.monitor.abort_requested
            )
        except URLError as err:
            self.logger.error('Failure downloading {} - {}', url, err)
            self.notifier.close_download_progress()
            self.notifier.show_download_error(url, err)
            return False
        except ExitRequested as err:
            self.logger.error(
                'Immediate exit requested. Aborting download of {}', url)
            self.notifier.close_download_progress()
            self.notifier.show_download_error(url, err)
            return False
        except Exception as err:
            self.logger.error('Failure writing {}', url)
            self.notifier.close_download_progress()
            self.notifier.show_download_error(url, err)
            return False

        # decompress filmliste
        if self.use_xz is True:
            self.logger.info('Trying to decompress xz file...')
            retval = subprocess.call([mvutils.find_xz(), '-d', compfile])
            self.logger.info('Return {}', retval)
        elif UPD_CAN_BZ2 is True:
            self.logger.info('Trying to decompress bz2 file...')
            retval = self._decompress_bz2(compfile, destfile)
            self.logger.info('Return {}', retval)
        elif UPD_CAN_GZ is True:
            self.logger.info('Trying to decompress gz file...')
            retval = self._decompress_gz(compfile, destfile)
            self.logger.info('Return {}', retval)
        else:
            # should never reach
            pass

        self.notifier.close_download_progress()
        return retval == 0 and mvutils.file_exists(destfile)
    def import_database(self, full):
        """
        Performs a database update when a
        downloaded update file is available

        Args:
            full(bool): Perform full update if `True`
        """
        (_, _, destfile, avgrecsize) = self._get_update_info(full)
        if not mvutils.file_exists(destfile):
            self.logger.error('File {} does not exists', destfile)
            return False
        # estimate number of records in update file
        records = int(mvutils.file_size(destfile) / avgrecsize)
        if not self.database.ft_init():
            self.logger.warn(
                'Failed to initialize update. Maybe a concurrency problem?')
            return False
        
        # pylint: disable=broad-except
        try:
            starttime = time.time()
            with closing( open(destfile, 'r', encoding="utf-8") ) as updatefile:
                jsonDoc = json.load( updatefile, object_pairs_hook=self._object_pairs_hook )
                self.logger.info( 'Starting import of {} records from {}', (len(jsonDoc)-2), destfile )
                flsm = 0
                flts = 0
                (self.tot_chn, self.tot_shw, self.tot_mov) = self._update_start(full)
                self.notifier.show_update_progress()
                
                ####
                flsm = 0
                sender = ""
                thema = ""
                ### ROOT LIST
                for atuple in jsonDoc:
                    if (atuple[0] == 'Filmliste' and flsm == 0):
                        ### META
                        ### "Filmliste":["23.04.2020, 18:23","23.04.2020, 16:23","3","MSearch [Vers.: 3.1.129]","3c90946f05eb1e2fa6cf2327cca4f1d4"],
                        flsm +=1
                        # this is the timestamp of this database update
                        value = atuple[1][0]
                        try:
                            fldt = datetime.datetime.strptime(
                                value.strip(), "%d.%m.%Y, %H:%M")
                            flts = int(time.mktime(fldt.timetuple()))
                            self.database.update_status(filmupdate=flts)
                            self.logger.info(
                                'Filmliste dated {}', value.strip())
                        except TypeError:
                            # pylint: disable=line-too-long
                            # SEE: https://forum.kodi.tv/showthread.php?tid=112916&pid=1214507#pid1214507
                            # Wonderful. His name is also Leopold
                            try:
                                flts = int(time.mktime(time.strptime(
                                    value.strip(), "%d.%m.%Y, %H:%M")))
                                self.database.update_status(
                                    filmupdate=flts)
                                self.logger.info(
                                    'Filmliste dated {}', value.strip())
                                # pylint: disable=broad-except
                            except Exception as err:
                                # If the universe hates us...
                                self.logger.debug(
                                    'Could not determine date "{}" of filmliste: {}', value.strip(), err)
                        except ValueError as err:
                            pass
                                        
                    elif (atuple[0] == 'filmliste' and flsm == 1):
                        flsm +=1
                        # VOID - we do not need column names
                        # "Filmliste":["Sender","Thema","Titel","Datum","Zeit","Dauer","Größe [MB]","Beschreibung","Url","Website","Url Untertitel","Url RTMP","Url Klein","Url RTMP Klein","Url HD","Url RTMP HD","DatumL","Url History","Geo","neu"],
                    elif (atuple[0] == 'X'):
                        self._init_record()
                        # behaviour of the update list
                        if (len(atuple[1][0]) > 0):
                            sender = atuple[1][0]
                        else:
                            atuple[1][0] = sender
                        # same for thema
                        if (len(atuple[1][1]) > 0):
                            thema = atuple[1][1]
                        else:
                            atuple[1][1] = thema
                        ##
                        self._add_value( atuple[1] )
                        self._end_record(records)
                        if self.count % 100 == 0 and self.monitor.abort_requested():
                            # kodi is shutting down. Close all
                            self._update_end(full, 'ABORTED')
                            self.notifier.close_update_progress()
                            return True
                    
            self._update_end(full, 'IDLE')
            self.logger.info('{} records processed',self.count)
            self.logger.info(
                'Import of {} in update cycle {} finished. Duration: {} seconds',
                destfile,
                self.cycle,
                int(time.time() - starttime)
            )
            self.notifier.close_update_progress()
            return True
        except KeyboardInterrupt:
            self._update_end(full, 'ABORTED')
            self.logger.info('Update cycle {} interrupted by user', self.cycle)
            self.notifier.close_update_progress()
            return False
        except DatabaseCorrupted as err:
            self.logger.error('{} on update cycle {}', err, self.cycle)
            self.notifier.close_update_progress()
        except DatabaseLost as err:
            self.logger.error('{} on update cycle {}', err, self.cycle)
            self.notifier.close_update_progress()
        except Exception as err:
            self.logger.error(
                'Error {} while processing {} on update cycle {}', err, destfile, self.cycle)
            self._update_end(full, 'ABORTED')
            self.notifier.close_update_progress()
        return False
    def Import(self, full):
        (_, _, destfile, avgrecsize) = self._get_update_info(full)
        if not mvutils.file_exists(destfile):
            self.logger.error('File {} does not exists', destfile)
            return False
        # estimate number of records in update file
        records = int(mvutils.file_size(destfile) / avgrecsize)
        if not self.db.ftInit():
            self.logger.warn(
                'Failed to initialize update. Maybe a concurrency problem?')
            return False
        try:
            self.logger.info('Starting import of approx. {} records from {}',
                             records, destfile)
            with open(destfile, 'r') as file:
                parser = ijson.parse(file)
                flsm = 0
                flts = 0
                (self.tot_chn, self.tot_shw,
                 self.tot_mov) = self._update_start(full)
                self.notifier.ShowUpdateProgress()
                for prefix, event, value in parser:
                    if (prefix, event) == ("X", "start_array"):
                        self._init_record()
                    elif (prefix, event) == ("X", "end_array"):
                        self._end_record(records)
                        if self.count % 100 == 0 and self.monitor.abortRequested(
                        ):
                            # kodi is shutting down. Close all
                            self._update_end(full, 'ABORTED')
                            self.notifier.CloseUpdateProgress()
                            return True
                    elif (prefix, event) == ("X.item", "string"):
                        if value is not None:
                            self._add_value(value.strip())
                        else:
                            self._add_value("")
                    elif (prefix, event) == ("Filmliste", "start_array"):
                        flsm += 1
                    elif (prefix, event) == ("Filmliste.item", "string"):
                        flsm += 1
                        if flsm == 2 and value is not None:
                            # this is the timestmap of this database update
                            try:
                                fldt = datetime.datetime.strptime(
                                    value.strip(), "%d.%m.%Y, %H:%M")
                                flts = int(time.mktime(fldt.timetuple()))
                                self.db.UpdateStatus(filmupdate=flts)
                                self.logger.info('Filmliste dated {}',
                                                 value.strip())
                            except TypeError:
                                # SEE: https://forum.kodi.tv/showthread.php?tid=112916&pid=1214507#pid1214507
                                # Wonderful. His name is also Leopold
                                try:
                                    flts = int(
                                        time.mktime(
                                            time.strptime(
                                                value.strip(),
                                                "%d.%m.%Y, %H:%M")))
                                    self.db.UpdateStatus(filmupdate=flts)
                                    self.logger.info('Filmliste dated {}',
                                                     value.strip())
                                except Exception as err:
                                    # If the universe hates us...
                                    self.logger.debug(
                                        'Could not determine date "{}" of filmliste: {}',
                                        value.strip(), err)
                            except ValueError as err:
                                pass

            self.db.ftFlushInsert()
            self._update_end(full, 'IDLE')
            self.logger.info('Import of {} in update cycle {} finished',
                             destfile, self.cycle)
            self.notifier.CloseUpdateProgress()
            return True
        except KeyboardInterrupt:
            self._update_end(full, 'ABORTED')
            self.logger.info('Update cycle {} interrupted by user', self.cycle)
            self.notifier.CloseUpdateProgress()
            return False
        except DatabaseCorrupted as err:
            self.logger.error('{} on update cycle {}', err, self.cycle)
            self.notifier.CloseUpdateProgress()
        except DatabaseLost as err:
            self.logger.error('{} on update cycle {}', err, self.cycle)
            self.notifier.CloseUpdateProgress()
        except Exception as err:
            self.logger.error(
                'Error {} while processing {} on update cycle {}', err,
                destfile, self.cycle)
            self._update_end(full, 'ABORTED')
            self.notifier.CloseUpdateProgress()
        return False
    def _importFile(self, targetFilename):
        #
        if not mvutils.file_exists(targetFilename):
            self.logger.error('File {} does not exists', targetFilename)
            return False
        # estimate number of records in update file
        fileSizeInByte = mvutils.file_size(targetFilename)
        records = int(fileSizeInByte / 600)
        self.logger.info('Starting import of approximately {} records from {}',
                         records, targetFilename)
        #
        # pylint: disable=broad-except
        try:
            flsm = 0
            flts = 0
            #
            sender = ""
            thema = ""
            self.notifier.show_update_progress()
            #
            ufp = UpdateFileParser.UpdateFileParser(self.logger, 512000,
                                                    targetFilename)
            ufp.init()
            fileHeader = ufp.next(',"X":')
            # META
            # {"Filmliste":["30.08.2020, 11:13","30.08.2020, 09:13","3","MSearch [Vers.: 3.1.139]","d93c9794acaf3e482d42c24e513f78a8"],"Filmliste":["Sender","Thema","Titel","Datum","Zeit","Dauer","Größe [MB]","Beschreibung","Url","Website","Url Untertitel","Url RTMP","Url Klein","Url RTMP Klein","Url HD","Url RTMP HD","DatumL","Url History","Geo","neu"]
            # this is the timestamp of this database update
            # value = jsonDoc['Filmliste'][0]
            value = fileHeader[15:32]
            # self.logger.debug( 'update date ' + value )
            try:
                fldt = datetime.datetime.strptime(value.strip(),
                                                  "%d.%m.%Y, %H:%M")
                flts = int(time.mktime(fldt.timetuple()))
                self.logger.debug('Filmliste dated {}', value.strip())
                self.database.set_status('UPDATING', pFilmupdate=flts)
            except TypeError:
                # pylint: disable=line-too-long
                # SEE: https://forum.kodi.tv/showthread.php?tid=112916&pid=1214507#pid1214507
                # Wonderful. His name is also Leopold
                try:
                    flts = int(
                        time.mktime(
                            time.strptime(value.strip(), "%d.%m.%Y, %H:%M")))
                    self.database.set_status('UPDATING', pFilmupdate=flts)
                    self.logger.debug('Filmliste dated {}', value.strip())
                    # pylint: disable=broad-except
                except Exception as err:
                    # If the universe hates us...
                    self.logger.debug(
                        'Could not determine date "{}" of filmliste: {}',
                        value.strip(), err)
            except ValueError as err:
                pass

            #
            recordArray = []
            #
            while (True):
                aPart = ufp.next(',"X":')
                if (len(aPart) == 0):
                    break
                #
                aPart = '{"X":' + aPart
                if (not (aPart.endswith("}"))):
                    aPart = aPart + "}"
                #
                jsonDoc = json.loads(aPart)
                jsonDoc = jsonDoc['X']
                self._init_record()
                # behaviour of the update list
                if (len(jsonDoc[0]) > 0):
                    sender = jsonDoc[0][:32]
                else:
                    jsonDoc[0] = sender
                # same for thema
                if (len(jsonDoc[1]) > 0):
                    thema = jsonDoc[1][:128]
                else:
                    jsonDoc[1] = thema
                #
                self.film['channel'] = sender
                self.film['show'] = thema
                self.film["title"] = jsonDoc[2][:128]
                #
                if len(jsonDoc[3]) == 10:
                    self.film["aired"] = jsonDoc[3][6:] + '-' + jsonDoc[3][
                        3:5] + '-' + jsonDoc[3][:2]
                    if (len(jsonDoc[4]) == 8):
                        self.film[
                            "aired"] = self.film["aired"] + " " + jsonDoc[4]
                #
                if len(jsonDoc[5]) > 0:
                    self.film["duration"] = jsonDoc[5]
                if len(jsonDoc[7]) > 0:
                    self.film["description"] = jsonDoc[7][:1024]
                self.film["url_video"] = jsonDoc[8]
                self.film["website"] = jsonDoc[9]
                self.film["url_sub"] = jsonDoc[10]
                self.film["url_video_sd"] = self._make_url(jsonDoc[12])
                self.film["url_video_hd"] = self._make_url(jsonDoc[14])
                if len(jsonDoc[16]) > 0:
                    self.film["airedepoch"] = int(jsonDoc[16])
                self.film["geo"] = jsonDoc[18]
                #
                # check if the movie is there
                #
                checkString = sender + thema + self.film["title"] + self.film[
                    'url_video']
                idhash = hashlib.md5(checkString.encode('utf-8')).hexdigest()
                #
                showid = hashlib.md5(thema.encode('utf-8')).hexdigest()
                showid = showid[:8]
                #
                recordArray.append(
                    (idhash, int(time.time()), self.film['channel'], showid,
                     self.film['show'], self.film["title"],
                     self.film['airedepoch'],
                     mvutils.make_duration(self.film['duration']),
                     self.film['description'], self.film['url_sub'],
                     self.film['url_video'], self.film['url_video_sd'],
                     self.film['url_video_hd']))
                self.count = self.count + 1
                # check
                if self.count % self.settings.getDatabaseImportBatchSize(
                ) == 0:
                    if self.monitor.abort_requested():
                        # kodi is shutting down. Close all
                        self._update_end()
                        self.notifier.close_update_progress()
                        raise Exception('User requested Abort')
                    else:
                        # run insert
                        try:
                            (ai, au) = self.database.import_films(recordArray)
                            self.insertCount += ai
                            self.updateCount += au
                        except Exception as err:
                            self.logger.error('Error in data import: {}', err)
                            self.errorCount = self.errorCount + 1
                        recordArray = []
                        # update status
                        percent = int(self.count * 100 / records)
                        percent = percent if percent <= 100 else 100
                        self.logger.debug(
                            'In progress (%d%%): insert:%d, update:%d' %
                            (percent, self.insertCount, self.updateCount))
                        self.notifier.update_update_progress(
                            percent, self.count, self.insertCount,
                            self.updateCount)
            if len(recordArray) > 0:
                try:
                    (ai, au) = self.database.import_films(recordArray)
                    self.insertCount += ai
                    self.updateCount += au
                except Exception as err:
                    self.logger.error('Error in data import: {}', err)
                    self.errorCount = self.errorCount + 1
            #
            ufp.close()
            self.notifier.close_update_progress()
            if self.errorCount > 0:
                self.logger.warn('Update finished with error(s)')
        except Exception as err:
            self.logger.error('Error {} while processing {}', err,
                              targetFilename)
            self._update_end()
            self.database.set_status('ABORTED')
            self.notifier.close_update_progress()
            raise
    def GetNewestList(self, full):
        (url, compfile, destfile, _) = self._get_update_info(full)
        if url is None:
            self.logger.error(
                'No suitable archive extractor available for this system')
            self.notifier.ShowMissingExtractorError()
            return False

        # get mirrorlist
        self.logger.info('Opening {}', url)
        try:
            data = urllib2.urlopen(url).read()
        except urllib2.URLError as err:
            self.logger.error('Failure opening {}', url)
            self.notifier.ShowDownloadError(url, err)
            return False
        root = etree.fromstring(data)
        urls = []
        for server in root.findall('Server'):
            try:
                URL = server.find('URL').text
                Prio = server.find('Prio').text
                urls.append((self._get_update_url(URL),
                             float(Prio) + random.random() * 1.2))
                self.logger.info('Found mirror {} (Priority {})', URL, Prio)
            except AttributeError:
                pass
        urls = sorted(urls, key=itemgetter(1))
        urls = [url[0] for url in urls]

        # cleanup downloads
        self.logger.info('Cleaning up old downloads...')
        mvutils.file_remove(compfile)
        mvutils.file_remove(destfile)

        # download filmliste
        self.notifier.ShowDownloadProgress()
        lasturl = ''
        for url in urls:
            try:
                lasturl = url
                self.logger.info('Trying to download {} from {}...',
                                 os.path.basename(compfile), url)
                self.notifier.UpdateDownloadProgress(0, url)
                mvutils.url_retrieve(
                    url,
                    filename=compfile,
                    reporthook=self.notifier.HookDownloadProgress,
                    aborthook=self.monitor.abortRequested)
                break
            except urllib2.URLError as err:
                self.logger.error('Failure downloading {}', url)
                self.notifier.CloseDownloadProgress()
                self.notifier.ShowDownloadError(lasturl, err)
                return False
            except ExitRequested as err:
                self.logger.error(
                    'Immediate exit requested. Aborting download of {}', url)
                self.notifier.CloseDownloadProgress()
                self.notifier.ShowDownloadError(lasturl, err)
                return False
            except Exception as err:
                self.logger.error('Failure writng {}', url)
                self.notifier.CloseDownloadProgress()
                self.notifier.ShowDownloadError(lasturl, err)
                return False

        # decompress filmliste
        if self.use_xz is True:
            self.logger.info('Trying to decompress xz file...')
            retval = subprocess.call([mvutils.find_xz(), '-d', compfile])
            self.logger.info('Return {}', retval)
        elif upd_can_bz2 is True:
            self.logger.info('Trying to decompress bz2 file...')
            retval = self._decompress_bz2(compfile, destfile)
            self.logger.info('Return {}', retval)
        elif upd_can_gz is True:
            self.logger.info('Trying to decompress gz file...')
            retval = self._decompress_gz(compfile, destfile)
            self.logger.info('Return {}', retval)
        else:
            # should nebver reach
            pass

        self.notifier.CloseDownloadProgress()
        return retval == 0 and mvutils.file_exists(destfile)
示例#19
0
    def _download(self, url, compressedFilename, targetFilename):
        # cleanup downloads
        start = time.time()
        self.logger.debug('Cleaning up old downloads...')
        mvutils.file_remove(compressedFilename)
        mvutils.file_remove(targetFilename)
        #
        # download filmliste
        self.notifier.show_download_progress()

        # pylint: disable=broad-except
        try:
            self.logger.debug('Trying to download {} from {}...',
                              os.path.basename(compressedFilename), url)
            self.notifier.update_download_progress(0, url)
            mvutils.url_retrieve(
                url,
                filename=compressedFilename,
                reporthook=self.notifier.hook_download_progress,
                aborthook=self.monitor.abort_requested)
            self.logger.debug('downloaded {} in {} sec', compressedFilename,
                              (time.time() - start))
        except URLError as err:
            self.logger.error('Failure downloading {} - {}', url, err)
            self.notifier.close_download_progress()
            self.notifier.show_download_error(url, err)
            raise
        except ExitRequested as err:
            self.logger.error(
                'Immediate exit requested. Aborting download of {}', url)
            self.notifier.close_download_progress()
            self.notifier.show_download_error(url, err)
            raise
        except Exception as err:
            self.logger.error('Failure writing {}', url)
            self.notifier.close_download_progress()
            self.notifier.show_download_error(url, err)
            raise
        # decompress filmliste
        start = time.time()
        try:
            if self.use_xz is True:
                self.logger.debug('Trying to decompress xz file...')
                retval = subprocess.call(
                    [mvutils.find_xz(), '-d', compressedFilename])
                self.logger.debug('decompress xz {} in {} sec', retval,
                                  (time.time() - start))
            elif UPD_CAN_BZ2 is True:
                self.logger.debug('Trying to decompress bz2 file...')
                retval = self._decompress_bz2(compressedFilename,
                                              targetFilename)
                self.logger.debug('decompress bz2 {} in {} sec', retval,
                                  (time.time() - start))
            elif UPD_CAN_GZ is True:
                self.logger.debug('Trying to decompress gz file...')
                retval = self._decompress_gz(compressedFilename,
                                             targetFilename)
                self.logger.debug('decompress gz {} in {} sec', retval,
                                  (time.time() - start))
            else:
                # should never reach
                pass
        except Exception as err:
            self.logger.error('Failure decompress {}', err)
            self.notifier.close_download_progress()
            self.notifier.show_download_error('decompress failed', err)
            raise

        self.notifier.close_download_progress()
        return retval == 0 and mvutils.file_exists(targetFilename)
示例#20
0
    def init(self, reset=False, convert=False, failedCount=0):
        """
        Startup of the database system

        Args:
            reset(bool, optional): if `True` the database
                will be cleaned up and recreated. Default
                is `False`

            convert(bool, optional): if `True` the database
                will be converted in case it is older than
                the supported version. If `False` a UI message
                will be displayed to the user informing that
                the database will be converted. Default is
                `False`
        """
        self.logger.info(
            'Using SQLite version {}, python library sqlite3 version {}',
            sqlite3.sqlite_version, sqlite3.version)
        if not mvutils.dir_exists(self.settings.datapath):
            os.mkdir(self.settings.datapath)

        # remove old versions
        mvutils.file_remove(
            os.path.join(self.settings.datapath, 'filmliste-v1.db'))

        if reset is True or not mvutils.file_exists(self.dbfile):
            self.logger.info(
                '===== RESET: Database will be deleted and regenerated =====')
            self.exit()
            mvutils.file_remove(self.dbfile)
            if self._handle_update_substitution():
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
            else:
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
                self._handle_database_initialization()
        else:
            try:
                if self._handle_update_substitution():
                    self._handle_not_update_to_date_dbfile()
                self.conn = sqlite3.connect(self.dbfile, timeout=60)
            except sqlite3.DatabaseError as err:
                self.logger.error(
                    'Error while opening database: {}. trying to fully reset the Database...',
                    err)
                return self.init(reset=True, convert=convert)
        try:

            # 3x speed-up, check mode 'WAL'
            self.conn.execute('pragma journal_mode=off')
            # check if DB is ready or broken
            cursor = self.conn.cursor()
            cursor.execute('SELECT * FROM `status` LIMIT 1')
            rs = cursor.fetchall()
            ##
            self.logger.info('Current DB Status Last modified {} ({})',
                             time.ctime(rs[0][0]), rs[0][0])
            self.logger.info('Current DB Status Last lastupdate {} ({})',
                             time.ctime(rs[0][2]), rs[0][2])
            self.logger.info('Current DB Status Last filmupdate {} ({})',
                             time.ctime(rs[0][3]), rs[0][3])
            self.logger.info('Current DB Status Last fullupdate {}', rs[0][4])
            ##
            cursor.close()
        except sqlite3.DatabaseError as err:
            failedCount += 1
            if (failedCount > 3):
                self.logger.error(
                    'Failed to restore database, please uninstall plugin, delete user profile and reinstall'
                )
                raise err
            self.logger.error(
                'Error on first query: {}. trying to fully reset the Database...trying {} times',
                err, failedCount)
            return self.init(reset=True,
                             convert=convert,
                             failedCount=failedCount)
        # that is a bit dangerous :-) but faaaast
        self.conn.execute('pragma synchronous=off')
        self.conn.create_function('UNIX_TIMESTAMP', 0, get_unix_timestamp)
        self.conn.create_aggregate('GROUP_CONCAT', 1, GroupConcatClass)
        return True