def _copyChunks(self): init = fs_encode(self.info.getChunkName(0)) #: initial chunk name if self.info.getCount() > 1: with open(init, "rb+") as fo: #: first chunkfile for i in xrange(1, self.info.getCount()): # input file fo.seek( self.info.getChunkRange(i - 1)[1] + 1 ) #: seek to beginning of chunk, to get rid of overlapping chunks fname = fs_encode("%s.chunk%d" % (self.filename, i)) with open(fname, "rb") as fi: buf = 32 * 1024 while True: #: copy in chunks, consumes less memory data = fi.read(buf) if not data: break fo.write(data) if fo.tell() < self.info.getChunkRange(i)[1]: reshutil.move(init) self.info.remove( ) #: there are probably invalid chunks raise Exception( "Downloaded content was smaller than expected. Try to reduce download connections." ) reshutil.move(fname) #: remove chunk if self.nameDisposition and self.disposition: self.filename = fs_join(os.path.dirname(self.filename), self.nameDisposition) shutil.move(init, fs_encode(self.filename)) self.info.remove() #: remove info file
def _copyChunks(self): init = fs_encode(self.info.getChunkName(0)) #: initial chunk name if self.info.getCount() > 1: with open(init, "rb+") as fo: #: first chunkfile for i in xrange(1, self.info.getCount()): # input file fo.seek( self.info.getChunkRange(i - 1)[1] + 1) #: seek to beginning of chunk, to get rid of overlapping chunks fname = fs_encode("%s.chunk%d" % (self.filename, i)) with open(fname, "rb") as fi: buf = 32 * 1024 while True: #: copy in chunks, consumes less memory data = fi.read(buf) if not data: break fo.write(data) if fo.tell() < self.info.getChunkRange(i)[1]: reshutil.move(init) self.info.remove() #: there are probably invalid chunks raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.") reshutil.move(fname) #: remove chunk if self.nameDisposition and self.disposition: self.filename = fs_join(os.path.dirname(self.filename), self.nameDisposition) shutil.move(init, fs_encode(self.filename)) self.info.remove() #: remove info file
def packageFinished(self, pypack): download_folder = fs_join(self.config.get("general", "download_folder"), pypack.folder, "") for link in pypack.getChildren().itervalues(): file_type = splitext(link['name'])[1][1:].lower() if file_type not in self.formats: continue hash_file = fs_encode(fs_join(download_folder, link['name'])) if not isfile(hash_file): self.logWarning(_("File not found"), link['name']) continue with open(hash_file) as f: text = f.read() for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text): data = m.groupdict() self.logDebug(link['name'], data) local_file = fs_encode(fs_join(download_folder, data['NAME'])) algorithm = self.methods.get(file_type, file_type) checksum = computeChecksum(local_file, algorithm) if checksum == data['HASH']: self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') % (data['NAME'], algorithm, checksum)) else: self.logWarning(_("%s checksum for file %s does not match (%s != %s)") % (algorithm, data['NAME'], checksum, data['HASH']))
def packageFinished(self, pypack): download_folder = fs_join( self.config.get("general", "download_folder"), pypack.folder, "") for link in pypack.getChildren().itervalues(): file_type = splitext(link['name'])[1][1:].lower() if file_type not in self.formats: continue hash_file = fs_encode(fs_join(download_folder, link['name'])) if not isfile(hash_file): self.logWarning(_("File not found"), link['name']) continue with open(hash_file) as f: text = f.read() for m in re.finditer( self.regexps.get(file_type, self.regexps['default']), text): data = m.groupdict() self.logDebug(link['name'], data) local_file = fs_encode(fs_join(download_folder, data['NAME'])) algorithm = self.methods.get(file_type, file_type) checksum = computeChecksum(local_file, algorithm) if checksum == data['HASH']: self.logInfo( _('File integrity of "%s" verified by %s checksum (%s)' ) % (data['NAME'], algorithm, checksum)) else: self.logWarning( _("%s checksum for file %s does not match (%s != %s)") % (algorithm, data['NAME'], checksum, data['HASH']))
def get_download(path): path = unquote(path).decode("utf8") #@TODO some files can not be downloaded root = PYLOAD.getConfigValue("general", "download_folder") path = path.replace("..", "") return static_file(fs_encode(path), fs_encode(root))
def get_download(path): path = urllib.unquote(path).decode("utf8") #@TODO some files can not be downloaded root = PYLOAD.getConfigValue("general", "download_folder") path = path.replace("..", "") return bottle.static_file(fs_encode(path), fs_encode(root))
def scan(self, pyfile, thread): file = fs_encode(pyfile.plugin.lastDownload) filename = os.path.basename(pyfile.plugin.lastDownload) cmdfile = fs_encode(self.getConfig('cmdfile')) cmdargs = fs_encode(self.getConfig('cmdargs').strip()) if not os.path.isfile(file) or not os.path.isfile(cmdfile): return thread.addActive(pyfile) pyfile.setCustomStatus(_("virus scanning")) pyfile.setProgress(0) try: p = subprocess.Popen([cmdfile, cmdargs, file], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = map(str.strip, p.communicate()) if out: self.logInfo(filename, out) if err: self.logWarning(filename, err) if not self.getConfig('ignore-err'): self.logDebug("Delete/Quarantine task is aborted") return if p.returncode: pyfile.error = _("infected file") action = self.getConfig('action') try: if action == "Delete": if not self.getConfig('deltotrash'): os.remove(file) elif self.trashable: send2trash.send2trash(file) else: self.logWarning( _("Unable to move file to trash, move to quarantine instead" )) pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.getConfig('quardir')) elif action == "Quarantine": pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.getConfig('quardir')) except (IOError, shutil.Error), e: self.logError(filename, action + " action failed!", e) elif not out and not err: self.logDebug(filename, "No infected file found")
def scan(self, pyfile, thread): file = fs_encode(pyfile.plugin.lastDownload) filename = os.path.basename(pyfile.plugin.lastDownload) cmdfile = fs_encode(self.getConfig('cmdfile')) cmdargs = fs_encode(self.getConfig('cmdargs').strip()) if not os.path.isfile(file) or not os.path.isfile(cmdfile): return thread.addActive(pyfile) pyfile.setCustomStatus(_("virus scanning")) pyfile.setProgress(0) try: p = subprocess.Popen([cmdfile, cmdargs, file], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = map(str.strip, p.communicate()) if out: self.logInfo(filename, out) if err: self.logWarning(filename, err) if not self.getConfig('ignore-err'): self.logDebug("Delete/Quarantine task is aborted") return if p.returncode: pyfile.error = _("infected file") action = self.getConfig('action') try: if action == "Delete": if not self.getConfig('deltotrash'): os.remove(file) elif self.trashable: send2trash.send2trash(file) else: self.logWarning(_("Unable to move file to trash, move to quarantine instead")) pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.getConfig('quardir')) elif action == "Quarantine": pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.getConfig('quardir')) except (IOError, shutil.Error), e: self.logError(filename, action + " action failed!", e) elif not out and not err: self.logDebug(filename, "No infected file found")
def load(name): fs_name = fs_encode("%s.chunks" % name) if not exists(fs_name): raise IOError() fh = codecs.open(fs_name, "r", "utf_8") name = fh.readline()[:-1] size = fh.readline()[:-1] if name.startswith("name:") and size.startswith("size:"): name = name[5:] size = size[5:] else: fh.close() raise WrongFormat() ci = ChunkInfo(name) ci.loaded = True ci.setSize(size) while True: if not fh.readline(): #: skip line break name = fh.readline()[1:-1] range = fh.readline()[1:-1] if name.startswith("name:") and range.startswith("range:"): name = name[5:] range = range[6:].split("-") else: raise WrongFormat() ci.addChunk(name, (long(range[0]), long(range[1]))) fh.close() return ci
def extract(self, password=None): command = "x" if self.fullpath else "e" p = self.call_cmd(command, fs_encode(self.filename), self.out, password=password) renice(p.pid, self.renice) # communicate and retrieve stderr self._progress(p) err = p.stderr.read().strip() if err: if self.re_wrongpwd.search(err): raise PasswordError elif self.re_wrongcrc.search(err): raise CRCError(err) else: #: raise error if anything is on stderr raise ArchiveError(err) if p.returncode: raise ArchiveError(_("Process return code: %d") % p.returncode) self.files = self.list(password)
def list(self, password=None): command = "vb" if self.fullpath else "lb" p = self.call_cmd(command, "-v", fs_encode(self.filename), password=password) out, err = p.communicate() if "Cannot open" in err: raise ArchiveError(_("Cannot open file")) if err.strip(): #: only log error at this point self.manager.logError(err.strip()) result = set() if not self.fullpath and self.VERSION.startswith('5'): # NOTE: Unrar 5 always list full path for f in fs_decode(out).splitlines(): f = fs_join(self.out, os.path.basename(f.strip())) if os.path.isfile(f): result.add(fs_join(self.out, os.path.basename(f))) else: for f in fs_decode(out).splitlines(): f = f.strip() result.add(fs_join(self.out, f)) return list(result)
def downloads(): root = PYLOAD.getConfigValue("general", "download_folder") if not isdir(root): return base([_('Download directory not found.')]) data = { 'folder': [], 'files': [] } items = listdir(fs_encode(root)) for item in sorted([fs_decode(x) for x in items]): if isdir(fs_join(root, item)): folder = { 'name': item, 'path': item, 'files': [] } files = listdir(fs_join(root, item)) for file in sorted([fs_decode(x) for x in files]): try: if isfile(fs_join(root, item, file)): folder['files'].append(file) except Exception: pass data['folder'].append(folder) elif isfile(join(root, item)): data['files'].append(item) return render_to_response('downloads.html', {'files': data}, [pre_processor])
def downloads(): root = PYLOAD.getConfigValue("general", "download_folder") if not os.path.isdir(root): return base([_('Download directory not found.')]) data = {'folder': [], 'files': []} items = os.listdir(fs_encode(root)) for item in sorted([fs_decode(x) for x in items]): if os.path.isdir(fs_join(root, item)): folder = {'name': item, 'path': item, 'files': []} files = os.listdir(fs_join(root, item)) for file in sorted([fs_decode(x) for x in files]): try: if os.path.isfile(fs_join(root, item, file)): folder['files'].append(file) except Exception: pass data['folder'].append(folder) elif os.path.isfile(os.path.join(root, item)): data['files'].append(item) return render_to_response('downloads.html', {'files': data}, [pre_processor])
def decrypt(self, pyfile): fs_filename = fs_encode(pyfile.url.strip()) opener = urllib2.build_opener( MultipartPostHandler.MultipartPostHandler) dlc_content = opener.open( 'http://service.jdownloader.net/dlcrypt/getDLC.php', { 'src': "ccf", 'filename': "test.ccf", 'upload': open(fs_filename, "rb") }).read() download_folder = self.config.get("general", "download_folder") dlc_file = fs_join(download_folder, "tmp_%s.dlc" % pyfile.name) try: dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) with open(dlc_file, "w") as tempdlc: tempdlc.write(dlc) self.urls = [dlc_file]
def decryptFile(self, key): """ Decrypts the file at lastDownload` """ # upper 64 bit of counter start n = self.b64_decode(key)[16:24] # convert counter to long and shift bytes k, iv, meta_mac = self.getCipherKey(key) ctr = Crypto.Util.Counter.new( 128, initial_value=long(n.encode("hex"), 16) << 64) cipher = Crypto.Cipher.AES.new(k, Crypto.Cipher.AES.MODE_CTR, counter=ctr) self.pyfile.setStatus("decrypting") self.pyfile.setProgress(0) file_crypted = fs_encode(self.lastDownload) file_decrypted = file_crypted.rsplit(self.FILE_SUFFIX)[0] try: f = open(file_crypted, "rb") df = open(file_decrypted, "wb") except IOError, e: self.fail(e)
def verify(self): with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z: badfile = z.testzip() if badfile: raise CRCError(badfile) else: raise PasswordError
def repair(self): p = self.call_cmd("rc", fs_encode(self.filename)) # communicate and retrieve stderr self._progress(p) err = p.stderr.read().strip() if err or p.returncode: return False return True
def check(self, password): p = self.call_cmd("l", "-slt", fs_encode(self.filename)) out, err = p.communicate() # check if output or error macthes the 'wrong password'-Regexp if self.re_wrongpwd.search(out): raise PasswordError if self.re_wrongcrc.search(out): raise CRCError(_("Header protected"))
def verify(self, password): p = self.call_cmd("t", "-v", fs_encode(self.filename), password=password) self._progress(p) err = p.stderr.read().strip() if self.re_wrongpwd.search(err): raise PasswordError if self.re_wrongcrc.search(err): raise CRCError(err)
def save(self): fs_name = fs_encode("%s.chunks" % self.name) fh = codecs.open(fs_name, "w", "utf_8") fh.write("name:%s\n" % self.name) fh.write("size:%s\n" % self.size) for i, c in enumerate(self.chunks): fh.write("#%d:\n" % i) fh.write("\tname:%s\n" % c[0]) fh.write("\trange:%i-%i\n" % c[1]) fh.close()
def periodical(self): folder = fs_encode(self.getConfig('folder')) file = fs_encode(self.getConfig('file')) try: if not os.path.isdir(os.path.join(folder, "finished")): os.makedirs(os.path.join(folder, "finished")) if self.getConfig('watch_file'): with open(file, "a+") as f: f.seek(0) content = f.read().strip() if content: f = open(file, "wb") f.close() name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y")) with open(fs_join(folder, "finished", name), "wb") as f: f.write(content) self.core.api.addPackage(f.name, [f.name], 1) for f in os.listdir(folder): path = os.path.join(folder, f) if not os.path.isfile(path) or f.endswith("~") or f.startswith( "#") or f.startswith("."): continue newpath = os.path.join( folder, "finished", f if self.getConfig('keep') else "tmp_" + f) move(path, newpath) self.logInfo(_("Added %s from HotFolder") % f) self.core.api.addPackage(f, [newpath], 1) except (IOError, OSError), e: self.logError(e)
def reloadPasswords(self): try: passwords = [] file = fs_encode(self.getConfig('passwordfile')) with open(file) as f: for pw in f.read().splitlines(): passwords.append(pw) except IOError, e: self.logError(e)
def addPassword(self, password): """ Adds a password to saved list""" try: self.passwords = uniqify([password] + self.passwords) file = fs_encode(self.getConfig('passwordfile')) with open(file, "wb") as f: for pw in self.passwords: f.write(pw + '\n') except IOError, e: self.logError(e)
def checkFile(self, rules={}): if self.cTask and not self.lastDownload: self.invalidCaptcha() self.retry(10, reason=_("Wrong captcha")) elif not self.lastDownload or not os.path.exists( fs_encode(self.lastDownload)): self.lastDownload = "" self.error(self.pyfile.error or _("No file downloaded")) else: errmsg = self.checkDownload({ 'Empty file': re.compile(r'\A\s*\Z'), 'Html error': re.compile( r'\A(?:\s*<.+>)?((?:[\w\s]*(?:[Ee]rror|ERROR)\s*\:?)?\s*\d{3})(?:\Z|\s+)' ) }) if not errmsg: for r, p in [ ('Html file', re.compile(r'\A\s*<!DOCTYPE html')), ('Request error', re.compile( r'([Aa]n error occured while processing your request)' )) ]: if r not in rules: rules[r] = p for r, a in [('Error', "ERROR_PATTERN"), ('Premium only', "PREMIUM_ONLY_PATTERN"), ('Wait error', "WAIT_PATTERN")]: if r not in rules and hasattr(self, a): rules[r] = getattr(self, a) errmsg = self.checkDownload(rules) if not errmsg: return errmsg = errmsg.strip().capitalize() try: errmsg += " | " + self.lastCheck.group(1).strip() except Exception: pass self.logWarning("Check result: " + errmsg, "Waiting 1 minute and retry") self.retry(3, 60, errmsg)
def downloadPreparing(self, pyfile): uar = self.getConfig('uar') uaf = fs_encode(self.getConfig('uaf')) if uar and os.path.isfile(uaf): with open(uaf) as f: uas = random.choice([ua for ua in f.read().splitlines()]) else: uas = self.getConfig('uas') if uas: self.logDebug("Use custom user-agent string: " + uas) pyfile.plugin.req.http.c.setopt(pycurl.USERAGENT, uas.encode('utf-8'))
def verify(self, password): # 7z can't distinguish crc and pw error in test p = self.call_cmd("l", "-slt", fs_encode(self.filename)) out, err = p.communicate() if self.re_wrongpwd.search(out): raise PasswordError if self.re_wrongpwd.search(err): raise PasswordError if self.re_wrongcrc.search(err): raise CRCError(err)
def periodical(self): folder = fs_encode(self.getConfig('folder')) file = fs_encode(self.getConfig('file')) try: if not os.path.isdir(os.path.join(folder, "finished")): os.makedirs(os.path.join(folder, "finished")) if self.getConfig('watch_file'): with open(file, "a+") as f: f.seek(0) content = f.read().strip() if content: f = open(file, "wb") f.close() name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y")) with open(fs_join(folder, "finished", name), "wb") as f: f.write(content) self.core.api.addPackage(f.name, [f.name], 1) for f in os.listdir(folder): path = os.path.join(folder, f) if not os.path.isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."): continue newpath = os.path.join(folder, "finished", f if self.getConfig('keep') else "tmp_" + f) shutil.move(path, newpath) self.logInfo(_("Added %s from HotFolder") % f) self.core.api.addPackage(f, [newpath], 1) except (IOError, OSError), e: self.logError(e)
def getHandle(self): """ returns a Curl handle ready to use for perform/multiperform """ self.setRequestContext(self.p.url, self.p.get, self.p.post, self.p.referer, self.p.cj) self.c.setopt(pycurl.WRITEFUNCTION, self.writeBody) self.c.setopt(pycurl.HEADERFUNCTION, self.writeHeader) # request all bytes, since some servers in russia seems to have a defect arihmetic unit fs_name = fs_encode(self.p.info.getChunkName(self.id)) if self.resume: self.fp = open(fs_name, "ab") self.arrived = self.fp.tell() if not self.arrived: self.arrived = stat(fs_name).st_size if self.range: # do nothing if chunk already finished if self.arrived + self.range[0] >= self.range[1]: return None if self.id == len( self.p.info.chunks ) - 1: #: as last chunk dont set end range, so we get everything range = "%i-" % (self.arrived + self.range[0]) else: range = "%i-%i" % (self.arrived + self.range[0], min(self.range[1] + 1, self.p.size - 1)) self.log.debug("Chunked resume with range %s" % range) self.c.setopt(pycurl.RANGE, range) else: self.log.debug("Resume File from %i" % self.arrived) self.c.setopt(pycurl.RESUME_FROM, self.arrived) else: if self.range: if self.id == len(self.p.info.chunks) - 1: #: see above range = "%i-" % self.range[0] else: range = "%i-%i" % (self.range[0], min(self.range[1] + 1, self.p.size - 1)) self.log.debug("Chunked with range %s" % range) self.c.setopt(pycurl.RANGE, range) self.fp = open(fs_name, "wb") return self.c
def check(self, password): p = self.call_cmd("l", "-v", fs_encode(self.filename), password=password) out, err = p.communicate() if self.re_wrongpwd.search(err): raise PasswordError if self.re_wrongcrc.search(err): raise CRCError(err) # output only used to check if passworded files are present for attr in self.re_filelist.findall(out): if attr[0].startswith("*"): raise PasswordError
def extract(self, password=None): try: with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z: z.setpassword(password) badfile = z.testzip() if badfile: raise CRCError(badfile) else: z.extractall(self.out) except (zipfile.BadZipfile, zipfile.LargeZipFile), e: raise ArchiveError(e)
def decrypt(self, pyfile): KEY = binascii.unhexlify(self.KEY) IV = binascii.unhexlify(self.IV) iv = AES.new(KEY, AES.MODE_ECB).encrypt(IV) cipher = AES.new(KEY, AES.MODE_CFB, iv) try: fs_filename = fs_encode(pyfile.url.strip()) with open(fs_filename, 'r') as rsdf: data = rsdf.read() except IOError, e: self.fail(e)
def decrypt(self, pyfile): KEY = binascii.unhexlify(self.KEY) IV = binascii.unhexlify(self.IV) iv = Crypto.Cipher.AES.new(KEY, Crypto.Cipher.AES.MODE_ECB).encrypt(IV) cipher = Crypto.Cipher.AES.new(KEY, Crypto.Cipher.AES.MODE_CFB, iv) try: fs_filename = fs_encode(pyfile.url.strip()) with open(fs_filename, 'r') as rsdf: data = rsdf.read() except IOError, e: self.fail(e)
def decrypt(self, pyfile): try: encoding = codecs.lookup(self.getConfig('encoding')).name except Exception: encoding = "utf-8" fs_filename = fs_encode(pyfile.url.strip()) txt = codecs.open(fs_filename, 'r', encoding) curPack = "Parsed links from %s" % pyfile.name packages = { curPack: [], } for link in txt.readlines(): link = link.strip() if not link: continue if link.startswith(";"): continue if link.startswith("[") and link.endswith("]"): # new package curPack = link[1:-1] packages[curPack] = [] continue packages[curPack].append(link) txt.close() # empty packages fix for key, value in packages.iteritems(): if not value: packages.pop(key, None) if self.getConfig('flush'): try: txt = open(fs_filename, 'wb') txt.close() except IOError: self.logWarning(_("Failed to flush list")) for name, links in packages.iteritems(): self.packages.append((name, links, name))
def checkFile(self, rules={}): if self.checkDownload({'nopremium': "No premium account available"}): self.retry(60, 5 * 60, "No premium account available") err = '' if self.req.http.code == '420': # Custom error code send - fail file = fs_encode(self.lastDownload) with open(file, "rb") as f: err = f.read(256).strip() os.remove(file) if err: self.fail(err) return super(PremiumTo, self).checkFile(rules)
def checkFile(self, rules={}): if self.checkDownload({'nopremium': "No premium account available"}): self.retry(60, 5 * 60, "No premium account available") err = '' if self.req.http.code == '420': # Custom error code send - fail file = fs_encode(self.lastDownload) with open(file, "rb") as f: err = f.read(256).strip() remove(file) if err: self.fail(err) return super(PremiumTo, self).checkFile(rules)
def callScript(self, script, *args): try: cmd_args = [fs_encode(str(x) if not isinstance(x, basestring) else x) for x in args] cmd = [script] + cmd_args self.logDebug("Executing: %s" % os.path.abspath(script), "Args: " + ' '.join(cmd_args)) p = subprocess.Popen(cmd, bufsize=-1) #@NOTE: output goes to pyload if self.getConfig('waitend'): p.communicate() except Exception, e: try: self.logError(_("Runtime error: %s") % os.path.abspath(script), e) except Exception: self.logError(_("Runtime error: %s") % os.path.abspath(script), _("Unknown error"))
def checkDownload(self, rules, api_size=0, max_size=50000, delete=True, read_size=0): """ checks the content of the last downloaded file, re match is saved to `lastCheck` :param rules: dict with names and rules to match (compiled regexp or strings) :param api_size: expected file size :param max_size: if the file is larger then it wont be checked :param delete: delete if matched :param read_size: amount of bytes to read from files larger then max_size :return: dictionary key of the first rule that matched """ lastDownload = fs_encode(self.lastDownload) if not os.path.exists(lastDownload): return None size = os.stat(lastDownload) size = size.st_size if api_size and api_size <= size: return None elif size > max_size and not read_size: return None self.logDebug("Download Check triggered") with open(lastDownload, "rb") as f: content = f.read(read_size if read_size else -1) # produces encoding errors, better log to other file in the future? # self.logDebug("Content: %s" % content) for name, rule in rules.iteritems(): if isinstance(rule, basestring): if rule in content: if delete: os.remove(lastDownload) return name elif hasattr(rule, "search"): m = rule.search(content) if m: if delete: os.remove(lastDownload) self.lastCheck = m return name
def decrypt(self, pyfile): try: encoding = codecs.lookup(self.getConfig("encoding")).name except Exception: encoding = "utf-8" fs_filename = fs_encode(pyfile.url.strip()) txt = codecs.open(fs_filename, "r", encoding) curPack = "Parsed links from %s" % pyfile.name packages = {curPack: []} for link in txt.readlines(): link = link.strip() if not link: continue if link.startswith(";"): continue if link.startswith("[") and link.endswith("]"): # new package curPack = link[1:-1] packages[curPack] = [] continue packages[curPack].append(link) txt.close() # empty packages fix for key, value in packages.iteritems(): if not value: packages.pop(key, None) if self.getConfig("flush"): try: txt = open(fs_filename, "wb") txt.close() except IOError: self.logWarning(_("Failed to flush list")) for name, links in packages.iteritems(): self.packages.append((name, links, name))
def getHandle(self): """ returns a Curl handle ready to use for perform/multiperform """ self.setRequestContext(self.p.url, self.p.get, self.p.post, self.p.referer, self.p.cj) self.c.setopt(pycurl.WRITEFUNCTION, self.writeBody) self.c.setopt(pycurl.HEADERFUNCTION, self.writeHeader) # request all bytes, since some servers in russia seems to have a defect arihmetic unit fs_name = fs_encode(self.p.info.getChunkName(self.id)) if self.resume: self.fp = open(fs_name, "ab") self.arrived = self.fp.tell() if not self.arrived: self.arrived = os.stat(fs_name).st_size if self.range: # do nothing if chunk already finished if self.arrived + self.range[0] >= self.range[1]: return None if self.id == len(self.p.info.chunks) - 1: #: as last chunk dont set end range, so we get everything range = "%i-" % (self.arrived + self.range[0]) else: range = "%i-%i" % (self.arrived + self.range[0], min(self.range[1] + 1, self.p.size - 1)) self.log.debug("Chunked resume with range %s" % range) self.c.setopt(pycurl.RANGE, range) else: self.log.debug("Resume File from %i" % self.arrived) self.c.setopt(pycurl.RESUME_FROM, self.arrived) else: if self.range: if self.id == len(self.p.info.chunks) - 1: #: see above range = "%i-" % self.range[0] else: range = "%i-%i" % (self.range[0], min(self.range[1] + 1, self.p.size - 1)) self.log.debug("Chunked with range %s" % range) self.c.setopt(pycurl.RANGE, range) self.fp = open(fs_name, "wb") return self.c
def list(self, password=None): command = "l" if self.fullpath else "l" p = self.call_cmd(command, fs_encode(self.filename), password=password) out, err = p.communicate() if "Can not open" in err: raise ArchiveError(_("Cannot open file")) if p.returncode > 1: raise ArchiveError(_("Process return code: %d") % p.returncode) result = set() for groups in self.re_filelist.findall(out): f = groups[-1].strip() result.add(fs_join(self.out, f)) return list(result)
def decrypt(self, pyfile): fs_filename = fs_encode(pyfile.url.strip()) opener = urllib2.build_opener(MultipartPostHandler.MultipartPostHandler) dlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', {'src' : "ccf", 'filename': "test.ccf", 'upload' : open(fs_filename, "rb")}).read() download_folder = self.config.get("general", "download_folder") dlc_file = fs_join(download_folder, "tmp_%s.dlc" % pyfile.name) try: dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) with open(dlc_file, "w") as tempdlc: tempdlc.write(dlc) self.urls = [dlc_file]
def checkFile(self, rules={}): if self.cTask and not self.lastDownload: self.invalidCaptcha() self.retry(10, reason=_("Wrong captcha")) elif not self.lastDownload or not os.path.exists(fs_encode(self.lastDownload)): self.lastDownload = "" self.error(self.pyfile.error or _("No file downloaded")) else: errmsg = self.checkDownload({'Empty file': re.compile(r'\A\s*\Z'), 'Html error': re.compile(r'\A(?:\s*<.+>)?((?:[\w\s]*(?:[Ee]rror|ERROR)\s*\:?)?\s*\d{3})(?:\Z|\s+)')}) if not errmsg: for r, p in [('Html file', re.compile(r'\A\s*<!DOCTYPE html')), ('Request error', re.compile(r'([Aa]n error occured while processing your request)'))]: if r not in rules: rules[r] = p for r, a in [('Error', "ERROR_PATTERN"), ('Premium only', "PREMIUM_ONLY_PATTERN"), ('Wait error', "WAIT_PATTERN")]: if r not in rules and hasattr(self, a): rules[r] = getattr(self, a) errmsg = self.checkDownload(rules) if not errmsg: return errmsg = errmsg.strip().capitalize() try: errmsg += " | " + self.lastCheck.group(1).strip() except Exception: pass self.logWarning("Check result: " + errmsg, "Waiting 1 minute and retry") self.retry(3, 60, errmsg)
def decrypt(self, pyfile): fs_filename = fs_encode(pyfile.url.strip()) with open(fs_filename) as dlc: data = dlc.read().strip() data += '=' * (-len(data) % 4) dlc_key = data[-88:] dlc_data = data[:-88].decode('base64') dlc_content = self.load(self.API_URL % dlc_key) try: rc = re.search(r'<rc>(.+)</rc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) key = iv = Crypto.Cipher.AES.new(self.KEY, Crypto.Cipher.AES.MODE_CBC, self.IV).decrypt(rc) self.data = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_CBC, iv).decrypt(dlc_data).decode('base64') self.packages = [(name or pyfile.name, links, name or pyfile.name) \ for name, links in self.getPackages()]