예제 #1
0
 def _download( self, src, dst, dst2 ):
     if (not xbmc.abortRequested):
         tmpname = xbmc.translatePath('special://profile/addon_data/%s/temp/%s' % ( addonname , xbmc.getCacheThumbName(src) ))
         lw.log( ['the tmpname is ' + tmpname] )
         if xbmcvfs.exists(tmpname):
             success, loglines = deleteFile( tmpname )
             lw.log( loglines )
         success, loglines, urldata = imgURL.Get( src, params=self.params )
         lw.log( loglines )
         if success:
             success, loglines = writeFile( urldata, tmpname )
             lw.log( loglines )
         if not success:
             return False
         if xbmcvfs.Stat( tmpname ).st_size() > 999:
             image_ext = getImageType( tmpname )
             if not xbmcvfs.exists ( dst + image_ext ):
                 lw.log( ['copying %s to %s' % (tmpname, dst2 + image_ext)] )
                 xbmcvfs.copy( tmpname, dst2 + image_ext )
                 lw.log( ['moving %s to %s' % (tmpname, dst + image_ext)] )
                 xbmcvfs.rename( tmpname, dst + image_ext )
                 return True
             else:
                 lw.log( ['image already exists, deleting temporary file'] )
                 success, loglines = deleteFile( tmpname )
                 lw.log( loglines )
                 return False
         else:
             success, loglines = deleteFile( tmpname )
             lw.log( loglines )
             return False
예제 #2
0
파일: sat.py 프로젝트: pkscout/exautomation
 def Download(self):
     loglines = []
     url_params = {}
     dlist = []
     if self.REMOTEFILTER != 'all':
         url_params['fromDate'] = self.REMOTEFILTER
         loglines.append('getting SAT files newer than ' +
                         self.REMOTEFILTER)
     else:
         loglines.append('getting all SAT files')
     success, uloglines, json_data = self.JSONURL.Post(self.LISTURL,
                                                       params=url_params,
                                                       data=self.PAYLOAD)
     if self.DEBUG:
         loglines.extend(uloglines)
     if success:
         if json_data['files'] is None:
             loglines.append('no SAT files to download today')
             return False, loglines
         else:
             filequeue = []
             for file in json_data['files']:
                 filequeue.append(file['fileName'])
             if not filequeue:
                 loglines.append('No SAT files to download today')
                 return False, loglines
             for file in filequeue:
                 url_params = {}
                 url_params['filename'] = file
                 loglines.append('getting URL for file ' + file)
                 success, uloglines, file = self.JSONURL.Post(
                     self.FILEURL, params=url_params, data=self.PAYLOAD)
                 if self.DEBUG:
                     loglines.extend(uloglines)
                 try:
                     loglines.append('retrieving file from ' +
                                     file['fileUrl'])
                 except TypeError:
                     loglines.append('file is not json data, aborting')
                     return False, loglines
                 if '.zip' in file['fileName']:
                     success, uloglines, urldata = self.ZIPURL.Get(
                         file['fileUrl'])
                     writetype = 'wb'
                 else:
                     success, uloglines, urldata = self.TEXTURL.Get(
                         file['fileUrl'])
                     writetype = 'w'
                 if self.DEBUG:
                     loglines.extend(uloglines)
                 loglines.append('saving file ' + file['fileName'])
                 success, wloglines = writeFile(
                     urldata,
                     os.path.join(self.LOCALDOWNLOADPATH, file['fileName']),
                     writetype)
                 if self.DEBUG:
                     loglines.extend(wloglines)
                 dlist.append(file['fileName'])
             return dlist, loglines
     return False, loglines
예제 #3
0
 def _setPID(self):
     basetime = time.time()
     while os.path.isfile(pidfile):
         if time.time() - basetime > config.Get('aborttime'):
             err_str = 'taking too long for previous process to close - aborting attempt'
             lw.log([err_str])
             sys.exit(err_str)
     lw.log(['setting PID file'])
     success, loglines = writeFile(pid, pidfile, wtype='w')
     lw.log(loglines)
예제 #4
0
 def _setPID( self ):
     while os.path.isfile( pidfile ):
         time.sleep( random.randint( 1, 3 ) )
         if time.time() - basetime > config.Get( 'aborttime' ):
             err_str = 'taking too long for previous process to finish - aborting attempt to run automation'
             lw.log( [err_str], 'info' )
             sys.exit( err_str )
     lw.log( ['setting PID file'] )
     success, loglines = writeFile( pid, pidfile, wtype='w' )
     lw.log( loglines )        
예제 #5
0
 def _generate_hashlist( self ):
     hashmap = self._get_artists_hashmap()
     hashmap_str = ''
     for key, value in hashmap.iteritems():
        hashmap_str = hashmap_str + value + '\t' + key + '\n'
     success, log_line = writeFile( hashmap_str, self.HASHLISTFILE )
     if success:
         lw.log( log_line )
         message = smartUTF8( language(30311) )
     else:
         lw.log( ['unable to write has list file out to disk'] )
         message = smartUTF8( language(30312) )
예제 #6
0
 def _create_speedfan_log(self):
     log_name = os.path.join(
         self.DATAROOT,
         'SFLog%s.csv' % datetime.datetime.now().strftime("%Y%m%d"))
     header = 'Seconds'
     data_row = '12345'
     for mapping in self.MAPPINGS:
         lw.log(['trying vcgencmd ' + mapping[1]])
         output = os.popen("/opt/vc/bin/vcgencmd %s" %
                           mapping[1]).readline().strip()
         lw.log(['got %s from vcgencmd' % output])
         num_output = replaceWords(output, self.STRIPTEXT)
         header = header + '\t' + mapping[0]
         data_row = data_row + '\t' + num_output
     file_text = header
     for x in range(0, self.MINLINES):
         file_text = file_text + '\n' + data_row
     success, loglines = writeFile(file_text, log_name, wtype='w')
     lw.log(loglines)
예제 #7
0
 def _write_nfofile( self, nfotemplate, ep_info, newnfoname ):
     newnfopath = os.path.join( self.FOLDERPATH, newnfoname )
     replacement_dic = {
         '[SEASON]': ep_info['season'],
         '[EPISODE]' : ep_info['episode'],
         '[TITLE]' : ep_info['title'],
         '[DESC]' : ep_info['description'],
         '[AIRDATE]' : ep_info["airdate"]}
     exists, loglines = checkPath( newnfopath, create=False )
     lw.log( loglines )
     if exists:
         success, loglines = deleteFile( newnfopath )
         lw.log( loglines )
     loglines, fin = readFile( nfotemplate )
     lw.log (loglines )
     if fin:
         newnfo = replaceWords( fin, replacement_dic )
         success, loglines = writeFile( newnfo, newnfopath )
         lw.log( loglines )
예제 #8
0
                lw.log( ['%s %s to %s' % (self.MIGRATETYPE, folder, new_folder)] )
                for file in files:
                    old_file = os.path.join(old_folder, file)
                    new_file = os.path.join(new_folder, file)
                    if self.MIGRATETYPE == 'move':
                        xbmcvfs.copy( old_file, new_file  )
                        xbmcvfs.delete( old_file )
                    elif self.MIGRATETYPE == 'copy':                
                        xbmcvfs.copy( old_file, new_file )
                    else:
                        test_str = test_str + old_file + ' to ' + new_file + '\n'
                if self.MIGRATETYPE == 'move':
                    xbmcvfs.rmdir ( old_folder )
                count += 1
        if self.MIGRATETYPE == 'test':
            success, loglines = writeFile( test_str, os.path.join( self.MIGRATEFOLDER, '_migrationtest.txt' ) )
            lw.log( loglines )
        pDialog.close()


    def _remove_trailing_dot( self, thename ):
        if thename[-1] == '.' and len( thename ) > 1 and self.ENDREPLACE <> '.':
            return self._remove_trailing_dot( thename[:-1] + self.ENDREPLACE )
        else:
            return thename


if ( __name__ == "__main__" ):
    lw.log( ['script version %s started' % addonversion], xbmc.LOGNOTICE )
    lw.log( ['debug logging set to %s' % logdebug], xbmc.LOGNOTICE )
    Main()
예제 #9
0
 def _update_check_file( self, version, message ):
     success, loglines = writeFile( version, self.CHECKFILE )
     lw.log( loglines )
     if success:
         lw.log( [message] )
예제 #10
0
     return
 for folder in folders:
     if type == 'cache':
         old_folder = os.path.join( old_loc, folder )
         new_folder = os.path.join( new_loc, folder )
     elif type == 'local':
         old_folder = os.path.join( old_loc, smartUTF8(folder).decode('utf-8'), self.FANARTFOLDER )
         new_folder = os.path.join( new_loc, itemHash(folder) )
     try:
         dirs, old_files = xbmcvfs.listdir( old_folder )
     except Exception, e:
         lw.log( ['unexpected error while getting directory list', e] )
         old_files = []
     exclude_path = os.path.join( old_folder, '_exclusions.nfo' )
     if old_files and type == 'cache' and not xbmcvfs.exists(exclude_path):
         success, loglines = writeFile( '', exclude_path )
         lw.log( loglines )
     for old_file in old_files:
         if old_file.endswith( '.nfo' ) and not old_file == '_exclusions.nfo':
             exists, loglines = checkPath( new_folder )
             lw.log( loglines )
             new_file = old_file.strip('_')
             if new_file == 'artistimagesfanarttv.nfo':
                 new_file = 'fanarttvartistimages.nfo'
             elif new_file == 'artistimageshtbackdrops.nfo':
                 new_file = 'htbackdropsartistimages.nfo'
             elif new_file == 'artistimageslastfm.nfo':
                 new_file = 'lastfmartistimages.nfo'
             elif new_file == 'artistbio.nfo':
                 new_file = 'lastfmartistbio.nfo'
             elif new_file == 'artistsalbums.nfo':