def eval(self, script, engine=None): #: engine can be a jse name """string""" or an AbstractEngine """class""" JSE = self.get(engine) if not JSE: raise TypeError("engine") script = encode(script) out, err = JSE.eval(script) results = [out] if self.core.config.get("general", "debug"): if err: self.core.log.debug(JSE._name + ":", err) engines = self.find() engines.remove(JSE) for E in engines: out, err = E.eval(script) res = err or out self.core.log.debug(E._name + ":", res) results.append(res) if len(results) > 1 and len(uniqify(results)) > 1: self.core.log.warning("JS output of two or more engines mismatch") return results[0]
def decrypt(cls, core, url_or_urls, password=None): """Static method to decrypt urls or content. Can be used by other plugins. To decrypt file content prefix the string with ``CONTENT_PREFIX `` as seen above. :param core: pyLoad `Core`, needed in decrypt context :param url_or_urls: List of urls or single url/ file content :param password: optional password used for decrypting :raises Exception: No decryption errors are cascaded :return: List of decrypted urls, all package info removed """ urls = to_list(url_or_urls) p = cls(core, password) try: result = p._decrypt(urls) finally: p.clean() ret = [] for url_or_pack in result: if isinstance(url_or_pack, Package): # package ret.extend(url_or_pack.getAllURLs()) else: # single url ret.append(url_or_pack) # eliminate duplicates return uniqify(ret)
def decrypt(cls, core, url_or_urls, password=None): """Static method to decrypt urls or content. Can be used by other plugins. To decrypt file content prefix the string with ``CONTENT_PREFIX `` as seen above. :param core: pyLoad `Core`, needed in decrypt context :param url_or_urls: List of urls or single url/ file content :param password: optional password used for decrypting :raises Exception: No decryption errors are cascaded :return: List of decrypted urls, all package info removed """ urls = to_list(url_or_urls) p = cls(core, password) try: result = p._decrypt(urls) finally: p.clean() ret = [] for url_or_pack in result: if isinstance(url_or_pack, Package): #package ret.extend(url_or_pack.getAllURLs()) elif isinstance(url_or_pack, LinkStatus): #link ret.append(url_or_pack.url) else: core.log.debug("Invalid decrypter result: " + url_or_pack) return uniqify(ret)
def addPassword(self, password): """ Adds a password to saved list""" try: self.passwords = uniqify([password] + self.passwords) file = fs_encode(self.getConfig('passwordfile')) with open(file, "wb") as f: for pw in self.passwords: f.write(pw + '\n') except IOError, e: self.logError(e)
def _extract(self, pyfile, archive, password): name = os.path.basename(archive.filename) pyfile.setStatus("processing") encrypted = False try: self.logDebug("Password: %s" % (password or "None provided")) passwords = uniqify([password] + self.getPasswords(False)) if self.getConfig( 'usepasswordfile') else [password] for pw in passwords: try: if self.getConfig('test') or self.repair: pyfile.setCustomStatus(_("archive testing")) if pw: self.logDebug("Testing with password: %s" % pw) pyfile.setProgress(0) archive.verify(pw) pyfile.setProgress(100) else: archive.check(pw) self.addPassword(pw) break except PasswordError: if not encrypted: self.logInfo(name, _("Password protected")) encrypted = True except CRCError, e: self.logDebug(name, e) self.logInfo(name, _("CRC Error")) if self.repair: self.logWarning(name, _("Repairing...")) pyfile.setCustomStatus(_("archive repairing")) pyfile.setProgress(0) repaired = archive.repair() pyfile.setProgress(100) if not repaired and not self.getConfig('keepbroken'): raise CRCError("Archive damaged") self.addPassword(pw) break raise CRCError("Archive damaged") except ArchiveError, e: raise ArchiveError(e)
def _extract(self, pyfile, archive, password): name = os.path.basename(archive.filename) pyfile.setStatus("processing") encrypted = False try: self.logDebug("Password: %s" % (password or "None provided")) passwords = uniqify([password] + self.getPasswords(False)) if self.getConfig('usepasswordfile') else [password] for pw in passwords: try: if self.getConfig('test') or self.repair: pyfile.setCustomStatus(_("archive testing")) if pw: self.logDebug("Testing with password: %s" % pw) pyfile.setProgress(0) archive.verify(pw) pyfile.setProgress(100) else: archive.check(pw) self.addPassword(pw) break except PasswordError: if not encrypted: self.logInfo(name, _("Password protected")) encrypted = True except CRCError, e: self.logDebug(name, e) self.logInfo(name, _("CRC Error")) if self.repair: self.logWarning(name, _("Repairing...")) pyfile.setCustomStatus(_("archive repairing")) pyfile.setProgress(0) repaired = archive.repair() pyfile.setProgress(100) if not repaired and not self.getConfig('keepbroken'): raise CRCError("Archive damaged") self.addPassword(pw) break raise CRCError("Archive damaged") except ArchiveError, e: raise ArchiveError(e)
def checkHTML(self, html, url): """Parses html content or any arbitrary text for links and returns result of `checkURLs` :param html: html source :return: """ urls = [] if html: urls += [x[0] for x in urlmatcher.findall(html)] if url: page = getURL(url) urls += [x[0] for x in urlmatcher.findall(page)] return self.checkLinks(uniqify(urls))
def getEvents(self, uuid): events = [] validUuid = False for client in self.clients: if client.uuid == uuid: client.lastActive = time() validUuid = True while client.newEvents(): events.append(client.popEvent().toList()) break if not validUuid: self.newClient(uuid) events = [ReloadAllEvent("queue").toList(), ReloadAllEvent("collector").toList()] return uniqify(events)
def getEvents(self, uuid): events = [] validUuid = False for client in self.clients: if client.uuid == uuid: client.lastActive = time.time() validUuid = True while client.newEvents(): events.append(client.popEvent().toList()) break if not validUuid: self.newClient(uuid) events = [ ReloadAllEvent("queue").toList(), ReloadAllEvent("collector").toList() ] return uniqify(events)
def assignJob(self): """assign a job to a thread if possible""" if self.pause or not self.core.api.isTimeDownload(): return #if self.downloaded > 20: # if not self.cleanPyCurl(): return free = [x for x in self.threads if not x.active] inuse = [(x.active.pluginname, x.active.plugin.getDownloadLimit()) for x in self.threads if x.active and x.active.hasPlugin()] inuse = [(x[0], x[1], len([ y for y in self.threads if y.active and y.active.pluginname == x[0] ])) for x in inuse] occ = tuple(sorted(uniqify([x[0] for x in inuse if 0 < x[1] <= x[2]]))) job = self.core.files.getJob(occ) if job: try: job.initPlugin() except Exception, e: self.log.critical(str(e)) print_exc() job.setStatus("failed") job.error = str(e) job.release() return spaceLeft = free_space( self.core.config["general"]["download_folder"]) / 1024 / 1024 if spaceLeft < self.core.config["general"]["min_free_space"]: self.log.warning(_("Not enough space left on device")) self.pause = True if free and not self.pause: thread = free[0] #self.downloaded += 1 thread.put(job) else: #put job back if occ not in self.core.files.jobCache: self.core.files.jobCache[occ] = [] self.core.files.jobCache[occ].append(job.id)
def assignJob(self): """assign a job to a thread if possible""" if self.pause or not self.core.api.isTimeDownload(): return #if self.downloaded > 20: # if not self.cleanPyCurl(): return free = [x for x in self.threads if not x.active] inuse = [(x.active.pluginname, x.active.plugin.getDownloadLimit()) for x in self.threads if x.active and x.active.hasPlugin()] inuse = [(x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) for x in inuse] occ = tuple(sorted(uniqify([x[0] for x in inuse if 0 < x[1] <= x[2]]))) job = self.core.files.getJob(occ) if job: try: job.initPlugin() except Exception, e: self.log.critical(str(e)) print_exc() job.setStatus("failed") job.error = str(e) job.release() return spaceLeft = free_space(self.core.config["general"]["download_folder"]) / 1024 / 1024 if spaceLeft < self.core.config["general"]["min_free_space"]: self.log.warning(_("Not enough space left on device")) self.pause = True if free and not self.pause: thread = free[0] #self.downloaded += 1 thread.put(job) else: #put job back if occ not in self.core.files.jobCache: self.core.files.jobCache[occ] = [] self.core.files.jobCache[occ].append(job.id)
def decrypt(cls, core, url_or_urls): """Static method to decrypt urls or content. Can be used by other plugins. To decrypt file content prefix the string with ``CONTENT_PREFIX `` as seen above. :param core: pyLoad `Core`, needed in decrypt context :param url_or_urls: List of urls or single url/ file content :return: List of decrypted urls, all package info removed """ urls = to_list(url_or_urls) p = cls(core) try: result = p.processDecrypt(urls) finally: p.clean() ret = [] for url_or_pack in result: if isinstance(url_or_pack, Package): #package ret.extend(url_or_pack.getAllURLs()) else: # single url ret.append(url_or_pack) # eliminate duplicates return uniqify(ret)
def searchSuggestions(self, pattern): names = self.core.db.getMatchingFilenames(pattern, self.primaryUID) # TODO: stemming and reducing the names to provide better suggestions return uniqify(names)
finally: if plugin: plugin.clean() self.progress.done += len(urls) result.extend(plugin_result) # clear the progress self.progress = None # generated packages packs = {} # urls without package urls = [] # merge urls and packages for p in result: if isinstance(p, Package): if p.name in packs: packs[p.name].urls.extend(p.urls) else: if not p.name: urls.extend(p.links) else: packs[p.name] = p else: urls.append(p) urls = uniqify(urls) return urls, packs.values()
raise CRCError("Archive damaged") except ArchiveError, e: raise ArchiveError(e) pyfile.setCustomStatus(_("extracting")) pyfile.setProgress(0) if not encrypted or not self.getConfig('usepasswordfile'): self.logDebug("Extracting using password: %s" % (password or "None")) archive.extract(password) else: for pw in filter( None, uniqify([password] + self.getPasswords(False))): try: self.logDebug("Extracting using password: %s" % pw) archive.extract(pw) self.addPassword(pw) break except PasswordError: self.logDebug("Password was wrong") else: raise PasswordError pyfile.setProgress(100) pyfile.setStatus("processing")
self.addPassword(pw) break raise CRCError("Archive damaged") except ArchiveError, e: raise ArchiveError(e) pyfile.setCustomStatus(_("extracting")) pyfile.setProgress(0) if not encrypted or not self.getConfig('usepasswordfile'): self.logDebug("Extracting using password: %s" % (password or "None")) archive.extract(password) else: for pw in filter(None, uniqify([password] + self.getPasswords(False))): try: self.logDebug("Extracting using password: %s" % pw) archive.extract(pw) self.addPassword(pw) break except PasswordError: self.logDebug("Password was wrong") else: raise PasswordError pyfile.setProgress(100) pyfile.setStatus("processing")
class DecrypterThread(BaseThread): """thread for decrypting""" def __init__(self, manager, data, pid): """constructor""" BaseThread.__init__(self, manager) self.data = data self.pid = pid self.start() def run(self): plugin_map = {} for url, plugin in self.data: if plugin in plugin_map: plugin_map[plugin].append(url) else: plugin_map[plugin] = [url] self.decrypt(plugin_map) def decrypt(self, plugin_map): pack = self.m.core.files.getPackage(self.pid) result = [] for name, urls in plugin_map.iteritems(): klass = self.m.core.pluginManager.loadClass("crypter", name) plugin = klass(self.m.core, pack, pack.password) plugin_result = [] try: try: plugin_result = plugin._decrypt(urls) except Retry: sleep(1) plugin_result = plugin._decrypt(urls) except Exception, e: plugin.logError(_("Decrypting failed"), e) if self.m.core.debug: print_exc() self.writeDebugReport(plugin.__name__, plugin=plugin) plugin.logDebug("Decrypted", plugin_result) result.extend(plugin_result) #TODO package names are optional result = uniqify(result) pack_names = {} urls = [] for p in result: if isinstance(p, Package): if p.name in pack_names: pack_names[p.name].urls.extend(p.urls) else: pack_names[p.name] = p else: urls.append(p) if urls: self.log.info( _("Decrypted %(count)d links into package %(name)s") % { "count": len(urls), "name": pack.name }) self.m.core.api.addFiles(self.pid, urls) for p in pack_names.itervalues(): self.m.core.api.addPackage(p.name, p.urls, pack.password) if not result: self.log.info(_("No links decrypted"))
if plugin: plugin.clean() self.progress.done += len(urls) result.extend(plugin_result) # clear the progress self.progress = None # generated packages packs = {} # urls without package urls = [] # merge urls and packages for p in result: if isinstance(p, Package): if p.name in packs: packs[p.name].urls.extend(p.urls) else: if not p.name: urls.extend(p.links) else: packs[p.name] = p else: urls.append(p) urls = uniqify(urls) return urls, packs.values()
def getLinks(self): f = lambda url: "http://" + re.sub(r'(\w{7})s\.', r'\1.', url) return uniqify(map(f, re.findall(self.LINK_PATTERN, self.html)))