def package_extracted(self, pypack): x = False download_folder = self.pyload.config['general']['download_folder'] extract_destination = self.pyload.config.getPlugin( "ExtractArchive", "destination") extract_subfolder = self.pyload.config.getPlugin( "ExtractArchive", "subfolder") # determine output folder folder = save_join(download_folder, pypack.folder, extract_destination, "") #: force trailing slash if extract_subfolder is True: folder = save_join(folder, pypack.folder) if self.get_config('delete_extracted') is True: self.log_debug("MKV-Checkup (package_extracted)") for root, dirs, files in os.walk(folder): for name in files: if name.endswith((".rar", ".r0", ".r12")): self.log_debug("Hier sind noch Archive") x = True break break if x == False: self.log_debug("Hier sind keine Archive") self.Finished(folder) else: self.Finished(folder)
def downloads(): root = PYLOAD.getConfigValue("general", "download_folder") if not isdir(root): return base([_('Download directory not found.')]) data = {'folder': [], 'files': []} items = listdir(fs_encode(root)) for item in sorted([fs_decode(x) for x in items]): if isdir(save_join(root, item)): folder = {'name': item, 'path': item, 'files': []} files = listdir(save_join(root, item)) for file in sorted([fs_decode(x) for x in files]): try: if isfile(save_join(root, item, file)): folder['files'].append(file) except: pass data['folder'].append(folder) elif isfile(join(root, item)): data['files'].append(item) return render_to_response('downloads.html', {'files': data}, [pre_processor])
def package_extracted(self, pypack): x = False download_folder = self.pyload.config['general']['download_folder'] extract_destination = self.pyload.config.getPlugin("ExtractArchive", "destination") extract_subfolder = self.pyload.config.getPlugin("ExtractArchive", "subfolder") # determine output folder folder = save_join(download_folder, pypack.folder, extract_destination, "") #: force trailing slash if extract_subfolder is True: folder = save_join(folder, pypack.folder) if self.get_config('delete_extracted') is True: self.log_debug("MKV-Checkup (package_extracted)") for root, dirs, files in os.walk(folder): for name in files: if name.endswith((".rar", ".r0", ".r12")): self.log_debug("Hier sind noch Archive") x = True break break if x == False: self.log_debug("Hier sind keine Archive") self.Finished(folder) else: self.Finished(folder)
def list(self, password=None): command = "vb" if self.fullpath else "lb" p = self.call_cmd(command, "-v", fs_encode(self.filename), password=password) out, err = p.communicate() if "Cannot open" in err: raise ArchiveError(_("Cannot open file")) if err.strip(): #: only log error at this point self.manager.logError(err.strip()) result = set() if not self.fullpath and self.VERSION.startswith('5'): # NOTE: Unrar 5 always list full path for f in fs_decode(out).splitlines(): f = save_join(self.out, os.path.basename(f.strip())) if os.path.isfile(f): result.add(save_join(self.out, os.path.basename(f))) else: for f in fs_decode(out).splitlines(): f = f.strip() result.add(save_join(self.out, f)) return list(result)
def packageFinished(self, pypack): download_folder = save_join(self.config['general']['download_folder'], pypack.folder, "") for link in pypack.getChildren().itervalues(): file_type = splitext(link["name"])[1][1:].lower() #self.logDebug(link, file_type) if file_type not in self.formats: continue hash_file = fs_encode(save_join(download_folder, link["name"])) if not isfile(hash_file): self.logWarning("File not found: %s" % link["name"]) continue with open(hash_file) as f: text = f.read() for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text): data = m.groupdict() self.logDebug(link["name"], data) local_file = fs_encode(save_join(download_folder, data["name"])) algorithm = self.methods.get(file_type, file_type) checksum = computeChecksum(local_file, algorithm) if checksum == data["hash"]: self.logInfo('File integrity of "%s" verified by %s checksum (%s).' % (data["name"], algorithm, checksum)) else: self.logWarning("%s checksum for file %s does not match (%s != %s)" % (algorithm, data["name"], checksum, data["hash"]))
def process(self, pyfile): site = self.load(pyfile.url) avail_videos = re.findall(r"""mediaCollection.addMediaStream\(0, ([0-9]*), "([^\"]*)", "([^\"]*)", "[^\"]*"\);""", site) avail_videos.sort(key=lambda videodesc: int(videodesc[0]), reverse=True) # The higher the number, the better the quality quality, url, playpath = avail_videos[0] pyfile.name = re.search(r"<h1>([^<]*)</h1>", site).group(1) if url.startswith("http"): # Best quality is available over HTTP. Very rare. self.download(url) else: pyfile.setStatus("downloading") download_folder = self.config['general']['download_folder'] location = save_join(download_folder, pyfile.package().folder) if not os.path.exists(location): os.makedirs(location, int(self.core.config["permission"]["folder"], 8)) if self.core.config["permission"]["change_dl"] and os.name != "nt": try: uid = getpwnam(self.config["permission"]["user"])[2] gid = getgrnam(self.config["permission"]["group"])[2] chown(location, uid, gid) except Exception, e: self.log.warning(_("Setting User and Group failed: %s") % str(e)) output_file = save_join(location, save_path(pyfile.name)) + os.path.splitext(playpath)[1] RTMP.download_rtmp_stream(url, playpath=playpath, output_file=output_file)
def package_extracted(self, pypack): x = False download_folder = self.config['general']['download_folder'] extract_destination = self.core.api.getConfigValue("ExtractArchive", "destination", section='plugin') extract_subfolder = self.core.api.getConfigValue("ExtractArchive", "subfolder", section='plugin') # determine output folder folder = save_join(download_folder, pypack.folder, extract_destination, "") #: force trailing slash if extract_subfolder: folder = save_join(folder, pypack.folder) self.core.log.debug("FileBot-Hook: MKV-Checkup (package_extracted)") for root, dirs, files in os.walk(folder): for name in files: if name.endswith((".rar", ".r0", ".r12")): self.core.log.debug("Hier sind noch Archive") x = True break break if x == False: self.core.log.debug("Hier sind keine Archive") self.Finished(folder)
def downloads(): root = PYLOAD.getConfigValue("general", "download_folder") if not isdir(root): return base([_('Download directory not found.')]) data = { 'folder': [], 'files': [] } items = listdir(fs_encode(root)) for item in sorted([fs_decode(x) for x in items]): if isdir(save_join(root, item)): folder = { 'name': item, 'path': item, 'files': [] } files = listdir(save_join(root, item)) for file in sorted([fs_decode(x) for x in files]): try: if isfile(save_join(root, item, file)): folder['files'].append(file) except: pass data['folder'].append(folder) elif isfile(join(root, item)): data['files'].append(item) return render_to_response('downloads.html', {'files': data}, [pre_processor])
def downloadFinished(self, pyfile): if self.config['general']['folder_per_package']: download_folder = save_join(self.config['general']['download_folder'], pyfile.package().folder) else: download_folder = self.config['general']['download_folder'] for script in self.scripts['download_finished']: file = save_join(download_folder, pyfile.name) self.callScript(script, pyfile.id, pyfile.name, file, pyfile.pluginname, pyfile.url)
def packageFinished(self, pack): files = {} fid_dict = {} for fid, data in pack.getChildren().iteritems(): if re.search("\.\d{3}$", data['name']): if data['name'][:-4] not in files: files[data['name'][:-4]] = [] files[data['name'][:-4]].append(data['name']) files[data['name'][:-4]].sort() fid_dict[data['name']] = fid download_folder = self.config['general']['download_folder'] if self.config['general']['folder_per_package']: download_folder = save_join(download_folder, pack.folder) for name, file_list in files.iteritems(): self.logInfo(_("Starting merging of"), name) with open(save_join(download_folder, name), "wb") as final_file: for splitted_file in file_list: self.logDebug("Merging part", splitted_file) pyfile = self.core.files.getFile(fid_dict[splitted_file]) pyfile.setStatus("processing") try: with open(save_join(download_folder, splitted_file), "rb") as s_file: size_written = 0 s_file_size = int( os.path.getsize( os.path.join(download_folder, splitted_file))) while True: f_buffer = s_file.read(self.BUFFER_SIZE) if f_buffer: final_file.write(f_buffer) size_written += self.BUFFER_SIZE pyfile.setProgress( (size_written * 100) / s_file_size) else: break self.logDebug("Finished merging part", splitted_file) except Exception, e: traceback.print_exc() finally: pyfile.setProgress(100) pyfile.setStatus("finished") pyfile.release()
def loadEpisodesOnDisk(self, showDir): # store season and episode number of already available episodes try: for entry in os.listdir(showDir): if os.path.isfile(save_join(showDir, entry)): seasonNum, episodeNum = extractSeasonAndEpisodeNum(entry) if seasonNum > -1 and episodeNum > -1: self.episodesOnDisk[(seasonNum, episodeNum)] = 1 else: self.loadEpisodesOnDisk(save_join(showDir, entry)) except Exception, e: pass
def run_tesser(self, subset=False, digits=True, lowercase=True, uppercase=True): #tmpTif = tempfile.NamedTemporaryFile(suffix=".tif") try: tmpTif = open(save_join("tmp", "tmpTif_%s.tif" % self.__name__), "wb") tmpTif.close() #tmpTxt = tempfile.NamedTemporaryFile(suffix=".txt") tmpTxt = open(save_join("tmp", "tmpTxt_%s.txt" % self.__name__), "wb") tmpTxt.close() except IOError, e: self.logError(e) return
def packageFinished(self, pack): files = {} fid_dict = {} for fid, data in pack.getChildren().iteritems(): if re.search("\.\d{3}$", data['name']): if data['name'][:-4] not in files: files[data['name'][:-4]] = [] files[data['name'][:-4]].append(data['name']) files[data['name'][:-4]].sort() fid_dict[data['name']] = fid download_folder = self.config['general']['download_folder'] if self.config['general']['folder_per_package']: download_folder = save_join(download_folder, pack.folder) for name, file_list in files.iteritems(): self.logInfo(_("Starting merging of"), name) with open(save_join(download_folder, name), "wb") as final_file: for splitted_file in file_list: self.logDebug("Merging part", splitted_file) pyfile = self.core.files.getFile(fid_dict[splitted_file]) pyfile.setStatus("processing") try: with open(save_join(download_folder, splitted_file), "rb") as s_file: size_written = 0 s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file))) while True: f_buffer = s_file.read(self.BUFFER_SIZE) if f_buffer: final_file.write(f_buffer) size_written += self.BUFFER_SIZE pyfile.setProgress((size_written * 100) / s_file_size) else: break self.logDebug("Finished merging part", splitted_file) except Exception, e: print_exc() finally: pyfile.setProgress(100) pyfile.setStatus("finished") pyfile.release()
def decrypt(self, pyfile): m = re.match(self.__pattern__, pyfile.url) m_id = m.group('ID') m_type = m.group('TYPE') if m_type == "playlist": self.logDebug("Url recognized as Playlist") p_info = self.getPlaylistInfo(m_id) playlists = [(m_id, ) + p_info] if p_info else None else: self.logDebug("Url recognized as Channel") playlists = self.getPlaylists(m_id) self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), m_id)) if not playlists: self.fail(_("No playlist available")) for p_id, p_name, p_owner in playlists: p_videos = self.getVideos(p_id) p_folder = save_join(self.config['general']['download_folder'], p_owner, p_name) self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name)) self.packages.append( (p_name, p_videos, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
def decrypt(self, pyfile): fs_filename = fs_encode(pyfile.url.strip()) opener = urllib2.build_opener( MultipartPostHandler.MultipartPostHandler) dlc_content = opener.open( 'http://service.jdownloader.net/dlcrypt/getDLC.php', { 'src': "ccf", 'filename': "test.ccf", 'upload': open(fs_filename, "rb") }).read() download_folder = self.config['general']['download_folder'] dlc_file = save_join(download_folder, "tmp_%s.dlc" % pyfile.name) try: dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) with open(dlc_file, "w") as tempdlc: tempdlc.write(dlc) self.urls = [dlc_file]
def download(self, url, get={}, post={}, ref=True, cookies=True, disposition=False): """Downloads the content at url to download folder :param url: :param get: :param post: :param ref: :param cookies: :param disposition: if True and server provides content-disposition header\ the filename will be changed if needed :return: The location where the file was saved """ self.checkForSameFiles() self.pyfile.setStatus("downloading") download_folder = self.config['general']['download_folder'] location = save_join(download_folder, self.pyfile.package().folder) if not exists(location): makedirs(location, int(self.core.config["permission"]["folder"], 8)) if self.core.config["permission"]["change_dl"] and os.name != "nt": try: uid = getpwnam(self.config["permission"]["user"])[2] gid = getgrnam(self.config["permission"]["group"])[2] chown(location, uid, gid) except Exception, e: self.log.warning(_("Setting User and Group failed: %s") % str(e))
def _copyChunks(self): init = fs_encode(self.info.getChunkName(0)) #initial chunk name if self.info.getCount() > 1: fo = open(init, "rb+") #first chunkfile for i in range(1, self.info.getCount()): #input file fo.seek( self.info.getChunkRange(i - 1)[1] + 1) #seek to beginning of chunk, to get rid of overlapping chunks fname = fs_encode("%s.chunk%d" % (self.filename, i)) fi = open(fname, "rb") buf = 32 * 1024 while True: #copy in chunks, consumes less memory data = fi.read(buf) if not data: break fo.write(data) fi.close() if fo.tell() < self.info.getChunkRange(i)[1]: fo.close() remove(init) self.info.remove() #there are probably invalid chunks raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.") remove(fname) #remove chunk fo.close() if self.nameDisposition and self.disposition: self.filename = save_join(dirname(self.filename), self.nameDisposition) move(init, fs_encode(self.filename)) self.info.remove() #remove info file
def checkForSameFiles(self, starting=False): """ checks if same file was/is downloaded within same package :param starting: indicates that the current download is going to start :raises SkipDownload: """ pack = self.pyfile.package() for pyfile in self.core.files.cache.values(): if pyfile != self.pyfile and pyfile.name == self.pyfile.name and pyfile.package().folder == pack.folder: if pyfile.status in (0, 12): #finished or downloading raise SkipDownload(pyfile.pluginname) elif pyfile.status in ( 5, 7) and starting: #a download is waiting/starting and was appenrently started before raise SkipDownload(pyfile.pluginname) download_folder = self.config['general']['download_folder'] location = save_join(download_folder, pack.folder, self.pyfile.name) if starting and self.core.config['download']['skip_existing'] and exists(location): size = os.stat(location).st_size if size >= self.pyfile.size: raise SkipDownload("File exists.") pyfile = self.core.db.findDuplicates(self.pyfile.id, self.pyfile.package().folder, self.pyfile.name) if pyfile: if exists(location): raise SkipDownload(pyfile[0]) self.log.debug("File %s not skipped, because it does not exists." % self.pyfile.name)
def _copyChunks(self): init = fs_encode(self.info.getChunkName(0)) #initial chunk name if self.info.getCount() > 1: fo = open(init, "rb+") #first chunkfile for i in range(1, self.info.getCount()): #input file fo.seek( self.info.getChunkRange(i - 1)[1] + 1 ) #seek to beginning of chunk, to get rid of overlapping chunks fname = fs_encode("%s.chunk%d" % (self.filename, i)) fi = open(fname, "rb") buf = 32 * 1024 while True: #copy in chunks, consumes less memory data = fi.read(buf) if not data: break fo.write(data) fi.close() if fo.tell() < self.info.getChunkRange(i)[1]: fo.close() remove(init) self.info.remove() #there are probably invalid chunks raise Exception( "Downloaded content was smaller than expected. Try to reduce download connections." ) remove(fname) #remove chunk fo.close() if self.nameDisposition and self.disposition: self.filename = save_join(dirname(self.filename), self.nameDisposition) move(init, fs_encode(self.filename)) self.info.remove() #remove info file
def packageFinished(self, pypack): for script in self.scripts['package_finished']: folder = self.config['general']['download_folder'] folder = save_join(folder, pypack.folder) self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
def package_extracted(self, pypack): if self.config['general']['folder_per_package']: download_folder = save_join(self.config['general']['download_folder'], pypack.folder) else: download_folder = self.config['general']['download_folder'] for script in self.scripts['package_extracted']: self.callScript(script, pypack.id, pypack.name, download_folder)
def downloadFinished(self, pyfile): filename = os.path.splitext(pyfile.name) extensions = string.split(self.getConf("extensions"), ',') if filename[1].replace('.','') in extensions: package = pyfile.package() folder = save_join(self.config['general']['download_folder'], package.folder) self.Finished(folder)
def process(self, pyfile): site = self.load(pyfile.url) avail_videos = re.findall( r"""mediaCollection.addMediaStream\(0, ([0-9]*), "([^\"]*)", "([^\"]*)", "[^\"]*"\);""", site) avail_videos.sort( key=lambda videodesc: int(videodesc[0]), reverse=True) # The higher the number, the better the quality quality, url, playpath = avail_videos[0] pyfile.name = re.search(r"<h1>([^<]*)</h1>", site).group(1) if url.startswith("http"): # Best quality is available over HTTP. Very rare. self.download(url) else: pyfile.setStatus("downloading") download_folder = self.config['general']['download_folder'] location = save_join(download_folder, pyfile.package().folder) if not os.path.exists(location): os.makedirs(location, int(self.core.config["permission"]["folder"], 8)) if self.core.config["permission"][ "change_dl"] and os.name != "nt": try: uid = getpwnam(self.config["permission"]["user"])[2] gid = getgrnam(self.config["permission"]["group"])[2] chown(location, uid, gid) except Exception, e: self.log.warning( _("Setting User and Group failed: %s") % str(e)) output_file = save_join(location, save_path( pyfile.name)) + os.path.splitext(playpath)[1] RTMP.download_rtmp_stream(url, playpath=playpath, output_file=output_file)
def storeNameInfo(self, episodesInfo): for ep in episodesInfo: if self.format == "{show name}/Season 01/S01E01 - {episode name}": filepath = save_join(ep['showDir'], "Season %02d" % ep['season'], \ "S%(season)02dE%(episode)02d - %(name)s" % \ {'season': ep['season'], 'episode': ep['episode'], 'name': ep['name'] }) elif self.format == "{show name}/Season 01/1x01 - {episode name}": filepath = save_join(ep['showDir'], "Season %02d" % ep['season'], \ "%(season)02dx%(episode)02d - %(name)s" % \ {'season': ep['season'], 'episode': ep['episode'], 'name': ep['name'] }) elif self.format == "{show name}/S01E01 - {episode name}": filepath = save_join(ep['showDir'], "S%(season)02dE%(episode)02d - %(name)s" % \ {'season': ep['season'], 'episode': ep['episode'], 'name': ep['name'] }) else: filepath = save_join(ep['showDir'], "%(season)02dx%(episode)02d - %(name)s" % \ {'season': ep['season'], 'episode': ep['episode'], 'name': ep['name'] }) # save url for lookup later self.hook.setStorage(ep['url'], filepath)
def downloadFinished(self, pyfile): filename = os.path.splitext(pyfile.name) extensions = string.split(self.getConf("extensions"), ',') if filename[1].replace('.', '') in extensions: package = pyfile.package() folder = save_join(self.config['general']['download_folder'], package.folder) self.Finished(folder)
def downloadFinished(self, pyfile): if not self.getConf('renameAndMoveFile'): return # try to get the target filename (and path) (without extension) from storage. # If finished file was added by this hook, then there is a target filename targetfile = self.getStorage(pyfile.url) if targetfile: self.delStorage(pyfile.url) # append filename extension ext = os.path.splitext(pyfile.name)[1] targetfile = targetfile + ext # get full path of source file downloadDir = self.core.api.getConfigValue('general', 'download_folder') packageDir = self.core.api.getPackageInfo(pyfile.packageid).folder sourcefile = save_join(downloadDir, packageDir, pyfile.name) if exists(sourcefile): if not exists(targetfile): # create target dir targetpath = os.path.split(targetfile)[0] if not exists(targetpath): try: os.makedirs(targetpath, 0755) except: pass # rename and move file move(sourcefile, targetfile) # check if moved if exists(targetfile): self.logInfo('Moved %(sourcefile)s to %(targetfile)s' % { 'sourcefile': sourcefile, 'targetfile': targetfile }) # try to delete source dir if moved successfully try: os.rmdir(os.path.split(sourcefile)[0]) except OSError: pass else: self.logInfo( 'Failed to move %(sourcefile)s to %(targetfile)s' % { 'sourcefile': pyfile.name, 'targetfile': targetfile }) else: self.logWarning( 'File \'%(targetfile)s\' already exists. \'%(sourcefile)s\' will not be moved.' % { 'sourcefile': pyfile.name, 'targetfile': targetfile })
def packageDeleted(self, pid): pack = self.core.api.getPackageInfo(pid) if self.config['general']['folder_per_package']: download_folder = save_join(self.config['general']['download_folder'], pack.folder) else: download_folder = self.config['general']['download_folder'] for script in self.scripts['package_deleted']: self.callScript(script, pack.id, pack.name, download_folder, pack.password)
def periodical(self): if not self.configIsValid(): self.setConfig("activated", False) self.logWarning('deactivated because of invalid config.') return # sleep 10 secs in case pc just woke up and # network connection is not yet established sleep(10) # prepare names of preferred hosters self.preferredHosters = self.getConfig( "preferredHosters").strip().split(',') if len(self.preferredHosters[0]) == 0: self.preferredHosters = {} if len(self.preferredHosters) > 0: for i in range(len(self.preferredHosters)): self.preferredHosters[i] = self.preferredHosters[i].strip() seriesCfg = ConfigParser.RawConfigParser() parsedCfgFiles = seriesCfg.read(self.getConfig("showsCfgFile")) self.logInfo('Processed config file(s): %s' % str.join(',', parsedCfgFiles)) self.printSeriesCfgInfo(seriesCfg) for showName in seriesCfg.sections(): if not seriesCfg.getboolean(showName, 'active'): continue if not self.seriesCfgIsValid(seriesCfg, showName): continue self.logInfo('Syncronizing %s' % showName) showUrl = seriesCfg.get(showName, 'url') showHdPreferred = seriesCfg.getboolean(showName, 'hdPreferred') showExclEpisodes = re.findall( r'\w+', seriesCfg.get(showName, 'excludedEpisodes').lower()) showExclSeasons = re.findall( r'\w+', seriesCfg.get(showName, 'excludedSeasons')) showDirFmt = self.getConf('format') showsBaseDir = self.getConfig("showsBaseDir") showDir = save_join( self.core.api.getConfigValue('general', 'download_folder'), showsBaseDir, save_path(showName)) queue = seriesCfg.getboolean(showName, 'queue') self.logDebug("%s queue=%s" % (showName, queue)) show = Show(self, showName, showDir, showUrl, showHdPreferred, showExclSeasons, showExclEpisodes, showDirFmt, queue) self.logDebug(show) show.syncronize() self.logInfo('Finished')
def getDeleteFiles(self): dir, name = os.path.split(self.filename) # actually extracted file files = [self.filename] # eventually Multipart Files files.extend(save_join(dir, os.path.basename(file)) for file in filter(self.isMultipart, os.listdir(dir)) if re.sub(self.re_multipart,".rar",name) == re.sub(self.re_multipart,".rar",file)) return files
def decrypt(self, pyfile): match = re.match(self.__pattern__, pyfile.url) m_id = match.group("ID") m_type = match.group("TYPE") if m_type == "user": self.logDebug("Url recognized as Channel") user = m_id channel = self.getChannel(user) if channel: playlists = self.getPlaylists(channel["id"]) self.logDebug('%s playlist\s found on channel "%s"' % (len(playlists), channel["title"])) relatedplaylist = { p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems() } self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys()) relatedplaylist["uploads"]["title"] = "Unplaylisted videos" relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag for p_name, p_data in relatedplaylist.iteritems(): if self.getConfig(p_name): p_data["title"] += " of " + user playlists.append(p_data) else: playlists = [] else: self.logDebug("Url recognized as Playlist") playlists = [self.getPlaylist(m_id)] if not playlists: self.fail("No playlist available") addedvideos = [] urlize = lambda x: "https://www.youtube.com/watch?v=" + x for p in playlists: p_name = p["title"] p_videos = self.getVideosId(p["id"]) p_folder = save_join(self.config["general"]["download_folder"], p["channelTitle"], p_name) self.logDebug('%s video\s found on playlist "%s"' % (len(p_videos), p_name)) if not p_videos: continue elif "checkDups" in p: p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos] self.logDebug('%s video\s available on playlist "%s" after duplicates cleanup' % (len(p_urls), p_name)) else: p_urls = map(urlize, p_videos) self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9! addedvideos.extend(p_videos)
def decrypt(self, pyfile): match = re.match(self.__pattern__, pyfile.url) m_id = match.group("ID") m_type = match.group("TYPE") if m_type == "user": self.logDebug("Url recognized as Channel") user = m_id channel = self.getChannel(user) if channel: playlists = self.getPlaylists(channel["id"]) self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel["title"])) relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems()} self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys()) relatedplaylist["uploads"]["title"] = "Unplaylisted videos" relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag for p_name, p_data in relatedplaylist.iteritems(): if self.getConfig(p_name): p_data["title"] += " of " + user playlists.append(p_data) else: playlists = [] else: self.logDebug("Url recognized as Playlist") playlists = [self.getPlaylist(m_id)] if not playlists: self.fail("No playlist available") addedvideos = [] urlize = lambda x: "https://www.youtube.com/watch?v=" + x for p in playlists: p_name = p["title"] p_videos = self.getVideosId(p["id"]) p_folder = save_join(self.config['general']['download_folder'], p["channelTitle"], p_name) self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name)) if not p_videos: continue elif "checkDups" in p: p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos] self.logDebug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name)) else: p_urls = map(urlize, p_videos) self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9! addedvideos.extend(p_videos)
def _load2disk(self): """Loads container to disk if its stored remotely and overwrite url, or check existent on several places at disk""" if self.pyfile.url.startswith("http"): self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1] content = self.load(self.pyfile.url) self.pyfile.url = save_join(self.core.config.get("general", "download_folder"), self.pyfile.name) try: with open(self.pyfile.url, "wb") as f: f.write(content) except IOError, e: self.fail(str(e))
def getDeleteFiles(self): dir, name = os.path.split(self.filename) # actually extracted file files = [self.filename] # eventually Multipart Files files.extend( save_join(dir, os.path.basename(file)) for file in filter(self.isMultipart, os.listdir(dir)) if re.sub(self.re_multipart, ".rar", name) == re.sub( self.re_multipart, ".rar", file)) return files
def packageFinished(self, pypack): x = False download_folder = self.config['general']['download_folder'] folder = save_join(download_folder, pypack.folder) self.core.log.debug("FileBot-Hook: MKV-Checkup (packageFinished)") for root, dirs, files in os.walk(folder): for name in files: if name.endswith((".rar", ".r0", ".r12")): self.core.log.debug("Hier sind noch Archive") x = True break break if x == False: self.core.log.debug("Hier sind keine Archive") self.Finished(folder)
def packageFinished(self, pack): files = {} fid_dict = {} for fid, data in pack.getChildren().iteritems(): if re.search("\.[0-9]{3}$", data["name"]): if data["name"][:-4] not in files: files[data["name"][:-4]] = [] files[data["name"][:-4]].append(data["name"]) files[data["name"][:-4]].sort() fid_dict[data["name"]] = fid download_folder = self.core.config['general']['download_folder'] if self.core.config['general']['folder_per_package']: download_folder = save_join(download_folder, pack.folder) for name, file_list in files.iteritems(): self.core.log.info("Starting merging of %s" % name) final_file = open(join(download_folder, fs_encode(name)), "wb") for splitted_file in file_list: self.core.log.debug("Merging part %s" % splitted_file) pyfile = self.core.files.getFile(fid_dict[splitted_file]) pyfile.setStatus("processing") try: s_file = open(os.path.join(download_folder, splitted_file), "rb") size_written = 0 s_file_size = int( os.path.getsize( os.path.join(download_folder, splitted_file))) while True: f_buffer = s_file.read(BUFFER_SIZE) if f_buffer: final_file.write(f_buffer) size_written += BUFFER_SIZE pyfile.setProgress( (size_written * 100) / s_file_size) else: break s_file.close() self.core.log.debug("Finished merging part %s" % splitted_file) except Exception, e: print traceback.print_exc() finally: pyfile.setProgress(100)
def list(self, password=None): command = "l" if self.fullpath else "l" p = self.call_cmd(command, fs_encode(self.filename), password=password) out, err = p.communicate() if "Can not open" in err: raise ArchiveError(_("Cannot open file")) if p.returncode > 1: raise ArchiveError(_("Process return code: %d") % p.returncode) result = set() for groups in self.re_filelist.findall(out): f = groups[-1].strip() result.add(save_join(self.out, f)) return list(result)
def package_finished(self, pypack): download_folder = self.pyload.config['general']['download_folder'] folder = save_join(download_folder, pypack.folder) if self.get_config('delete_extracted') is True: x = False self.log_debug("MKV-Checkup (packageFinished)") for root, dirs, files in os.walk(folder): for name in files: if name.endswith((".rar", ".r0", ".r12")): self.log_debug("Hier sind noch Archive") x = True break break if x == False: self.log_debug("Hier sind keine Archive") self.Finished(folder) else: self.Finished(folder)
def listContent(self): command = "vb" if self.fullpath else "lb" p = self.call_unrar(command, "-v", self.file, password=self.password) out, err = p.communicate() if "Cannot open" in err: raise ArchiveError("Cannot open file") if err.strip(): #: only log error at this point self.m.logError(err.strip()) result = set() for f in decode(out).splitlines(): f = f.strip() result.add(save_join(self.out, f)) self.files = result
def handleDLCLinks(self): self.logDebug("Search for DLC links") package_links = [] m = re.search(self.DLC_LINK_REGEX, self.html) if m is not None: container_url = self.DLC_DOWNLOAD_URL + "?id=%s&dlc=1" % self.fileid self.logDebug("Downloading DLC container link [%s]" % container_url) try: dlc = self.load(container_url) dlc_filename = self.fileid + ".dlc" dlc_filepath = save_join(self.config['general']['download_folder'], dlc_filename) with open(dlc_filepath, "wb") as f: f.write(dlc) package_links.append(dlc_filepath) except Exception: self.fail(_("Unable to download DLC container")) return package_links
def periodical(self): folder = fs_encode(self.getConfig('folder')) file = fs_encode(self.getConfig('file')) try: if not os.path.isdir(os.path.join(folder, "finished")): os.makedirs(os.path.join(folder, "finished")) if self.getConfig('watch_file'): with open(file, "a+") as f: f.seek(0) content = f.read().strip() if content: f = open(file, "wb") f.close() name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y")) with open(save_join(folder, "finished", name), "wb") as f: f.write(content) self.core.api.addPackage(f.name, [f.name], 1) for f in os.listdir(folder): path = os.path.join(folder, f) if not os.path.isfile(path) or f.endswith("~") or f.startswith( "#") or f.startswith("."): continue newpath = os.path.join( folder, "finished", f if self.getConfig('keep') else "tmp_" + f) move(path, newpath) self.logInfo(_("Added %s from HotFolder") % f) self.core.api.addPackage(f, [newpath], 1) except (IOError, OSError), e: self.logError(e)
def packageFinished(self, pack): files = {} fid_dict = {} for fid, data in pack.getChildren().iteritems(): if re.search("\.[0-9]{3}$", data["name"]): if data["name"][:-4] not in files: files[data["name"][:-4]] = [] files[data["name"][:-4]].append(data["name"]) files[data["name"][:-4]].sort() fid_dict[data["name"]] = fid download_folder = self.core.config['general']['download_folder'] if self.core.config['general']['folder_per_package']: download_folder = save_join(download_folder, pack.folder) for name, file_list in files.iteritems(): self.core.log.info("Starting merging of %s" % name) final_file = open(join(download_folder, fs_encode(name)), "wb") for splitted_file in file_list: self.core.log.debug("Merging part %s" % splitted_file) pyfile = self.core.files.getFile(fid_dict[splitted_file]) pyfile.setStatus("processing") try: s_file = open(os.path.join(download_folder, splitted_file), "rb") size_written = 0 s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file))) while True: f_buffer = s_file.read(BUFFER_SIZE) if f_buffer: final_file.write(f_buffer) size_written += BUFFER_SIZE pyfile.setProgress((size_written*100)/s_file_size) else: break s_file.close() self.core.log.debug("Finished merging part %s" % splitted_file) except Exception, e: print traceback.print_exc() finally: pyfile.setProgress(100)
def handleDLCLinks(self): self.logDebug("Search for DLC links") package_links = [] m = re.search(self.DLC_LINK_REGEX, self.html) if m: container_url = self.DLC_DOWNLOAD_URL + "?id=%s&dlc=1" % self.fileid self.logDebug("Downloading DLC container link [%s]" % container_url) try: dlc = self.load(container_url) dlc_filename = self.fileid + ".dlc" dlc_filepath = save_join( self.config['general']['download_folder'], dlc_filename) with open(dlc_filepath, "wb") as f: f.write(dlc) package_links.append(dlc_filepath) except Exception: self.fail(_("Unable to download DLC container")) return package_links
def decrypt(self, pyfile): file = fs_encode(pyfile.url.strip()) opener = build_opener(MultipartPostHandler) dlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', {'src' : "ccf", 'filename': "test.ccf", 'upload' : open(file, "rb")}).read() download_folder = self.config['general']['download_folder'] dlc_file = save_join(download_folder, "tmp_%s.dlc" % pyfile.name) try: dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) with open(dlc_file, "w") as tempdlc: tempdlc.write(dlc) self.urls = [dlc_file]
def decrypt(self, pyfile): m = re.match(self.__pattern__, pyfile.url) m_id = m.group('ID') m_type = m.group('TYPE') if m_type == "playlist": self.logDebug("Url recognized as Playlist") p_info = self.getPlaylistInfo(m_id) playlists = [(m_id,) + p_info] if p_info else None else: self.logDebug("Url recognized as Channel") playlists = self.getPlaylists(m_id) self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), m_id)) if not playlists: self.fail(_("No playlist available")) for p_id, p_name, p_owner in playlists: p_videos = self.getVideos(p_id) p_folder = save_join(self.config['general']['download_folder'], p_owner, p_name) self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name)) self.packages.append((p_name, p_videos, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
def periodical(self): folder = fs_encode(self.getConfig('folder')) file = fs_encode(self.getConfig('file')) try: if not os.path.isdir(os.path.join(folder, "finished")): os.makedirs(os.path.join(folder, "finished")) if self.getConfig('watch_file'): with open(file, "a+") as f: f.seek(0) content = f.read().strip() if content: f = open(file, "wb") f.close() name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y")) with open(save_join(folder, "finished", name), "wb") as f: f.write(content) self.core.api.addPackage(f.name, [f.name], 1) for f in os.listdir(folder): path = os.path.join(folder, f) if not os.path.isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."): continue newpath = os.path.join(folder, "finished", f if self.getConfig('keep') else "tmp_" + f) move(path, newpath) self.logInfo(_("Added %s from HotFolder") % f) self.core.api.addPackage(f, [newpath], 1) except (IOError, OSError), e: self.logError(e)
def removePlugins(self, type_plugins): """ delete plugins from disk """ if not type_plugins: return removed = set() self.logDebug("Requested deletion of plugins: %s" % type_plugins) for type, name in type_plugins: rootplugins = os.path.join(pypath, "module", "plugins") for dir in ("userplugins", rootplugins): py_filename = save_join(dir, type, name + ".py") pyc_filename = py_filename + "c" if type == "hook": try: self.manager.deactivateHook(name) except Exception, e: self.logDebug(e) for filename in (py_filename, pyc_filename): if not exists(filename): continue try: os.remove(filename) except OSError, e: self.logError(_("Error removing: %s") % filename, e) else: id = (type, name) removed.add(id)
def downloadFinished(self, pyfile): for script in self.scripts['download_finished']: self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, pyfile.id, save_join(self.core.config['general']['download_folder'], pyfile.package().folder, pyfile.name), pyfile.id)
def packageFinished(self, pypack): for script in self.scripts['package_finished']: folder = self.core.config['general']['download_folder'] folder = save_join(folder, pypack.folder) self.callScript(script, pypack.name, folder, pypack.id)
def _updatePlugins(self, data): """ check for plugin updates """ exitcode = 0 updated = [] url = data[0] schema = data[1].split('|') VERSION = re.compile(r'__version__.*=.*("|\')([\d.]+)') if "BLACKLIST" in data: blacklist = data[data.index('BLACKLIST') + 1:] updatelist = data[2:data.index('BLACKLIST')] else: blacklist = [] updatelist = data[2:] updatelist = [dict(zip(schema, x.split('|'))) for x in updatelist] blacklist = [dict(zip(schema, x.split('|'))) for x in blacklist] if blacklist: type_plugins = [(plugin['type'], plugin['name'].rsplit('.', 1)[0]) for plugin in blacklist] # Protect UpdateManager from self-removing try: type_plugins.remove(("hook", "UpdateManager")) except ValueError: pass for t, n in type_plugins: for idx, plugin in enumerate(updatelist): if n == plugin['name'] and t == plugin['type']: updatelist.pop(idx) break for t, n in self.removePlugins(sorted(type_plugins)): self.logInfo(_("Removed blacklisted plugin: [%(type)s] %(name)s") % { 'type': t, 'name': n, }) for plugin in sorted(updatelist, key=itemgetter("type", "name")): filename = plugin['name'] prefix = plugin['type'] version = plugin['version'] if filename.endswith(".pyc"): name = filename[:filename.find("_")] else: name = filename.replace(".py", "") #@TODO: Remove in 0.4.10 if prefix.endswith("s"): type = prefix[:-1] else: type = prefix plugins = getattr(self.core.pluginManager, "%sPlugins" % type) oldver = float(plugins[name]['v']) if name in plugins else None newver = float(version) if not oldver: msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)" elif newver > oldver: msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)" else: continue self.logInfo(_(msg) % {'type' : type, 'name' : name, 'oldver': oldver, 'newver': newver}) try: content = getURL(url % plugin) m = VERSION.search(content) if m and m.group(2) == version: with open(save_join("userplugins", prefix, filename), "wb") as f: f.write(content) updated.append((prefix, name)) else: raise Exception, _("Version mismatch") except Exception, e: self.logError(_("Error updating plugin: %s") % filename, e)
def doDownload(self, url): self.pyfile.setStatus("waiting") # real link download_folder = self.config['general']['download_folder'] location = join(download_folder, self.pyfile.package().folder.decode(sys.getfilesystemencoding())) if not exists(location): makedirs(location) m = re.search(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url) server = m.group(1) chan = m.group(2) bot = m.group(3) pack = m.group(4) nick = self.getConfig('nick') ident = self.getConfig('ident') real = self.getConfig('realname') temp = server.split(':') ln = len(temp) if ln == 2: host, port = temp elif ln == 1: host, port = temp[0], 6667 else: self.fail("Invalid hostname for IRC Server (%s)" % server) ####################### # CONNECT TO IRC AND IDLE FOR REAL LINK dl_time = time.time() sock = socket.socket() sock.connect((host, int(port))) if nick == "pyload": nick = "pyload-%d" % (time.time() % 1000) # last 3 digits sock.send("NICK %s\r\n" % nick) sock.send("USER %s %s bla :%s\r\n" % (ident, host, real)) time.sleep(3) sock.send("JOIN #%s\r\n" % chan) sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) # IRC recv loop readbuffer = "" done = False retry = None m = None while True: # done is set if we got our real link if done: break if retry: if time.time() > retry: retry = None dl_time = time.time() sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) else: if (dl_time + self.timeout) < time.time(): # todo: add in config sock.send("QUIT :byebye\r\n") sock.close() self.fail("XDCC Bot did not answer") fdset = select([sock], [], [], 0) if sock not in fdset[0]: continue readbuffer += sock.recv(1024) temp = readbuffer.split("\n") readbuffer = temp.pop() for line in temp: if self.debug is 2: print "*> " + unicode(line, errors='ignore') line = line.rstrip() first = line.split() if first[0] == "PING": sock.send("PONG %s\r\n" % first[1]) if first[0] == "ERROR": self.fail("IRC-Error: %s" % line) msg = line.split(None, 3) if len(msg) != 4: continue msg = { "origin": msg[0][1:], "action": msg[1], "target": msg[2], "text": msg[3][1:] } if nick == msg["target"][0:len(nick)] and "PRIVMSG" == msg["action"]: if msg["text"] == "\x01VERSION\x01": self.logDebug("XDCC: Sending CTCP VERSION.") sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface")) elif msg["text"] == "\x01TIME\x01": self.logDebug("Sending CTCP TIME.") sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) elif msg["text"] == "\x01LAG\x01": pass # don't know how to answer if not (bot == msg["origin"][0:len(bot)] and nick == msg["target"][0:len(nick)] and msg["action"] in ("PRIVMSG", "NOTICE")): continue if self.debug is 1: print "%s: %s" % (msg["origin"], msg["text"]) if "You already requested that pack" in msg["text"]: retry = time.time() + 300 if "you must be on a known channel to request a pack" in msg["text"]: self.fail("Wrong channel") m = re.match('\x01DCC SEND (.*?) (\d+) (\d+)(?: (\d+))?\x01', msg["text"]) if m: done = True # get connection data ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2))))) port = int(m.group(3)) packname = m.group(1) if len(m.groups()) > 3: self.req.filesize = int(m.group(4)) self.pyfile.name = packname filename = save_join(location, packname) self.logInfo("XDCC: Downloading %s from %s:%d" % (packname, ip, port)) self.pyfile.setStatus("downloading") newname = self.req.download(ip, port, filename, sock, self.pyfile.setProgress) if newname and newname != filename: self.logInfo("%(name)s saved as %(newname)s" % {"name": self.pyfile.name, "newname": newname}) filename = newname # kill IRC socket # sock.send("QUIT :byebye\r\n") sock.close() self.lastDownload = filename return self.lastDownload