def package_finished(self, pypack): dl_folder = fs_join(self.pyload.config.get("general", "download_folder"), pypack.folder, "") for link in pypack.getChildren().values(): file_type = os.path.splitext(link['name'])[1][1:].lower() if file_type not in self.formats: continue hash_file = encode(fs_join(dl_folder, link['name'])) if not os.path.isfile(hash_file): self.log_warning(_("File not found"), link['name']) continue with open(hash_file) as f: text = f.read() for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text): data = m.groupdict() self.log_debug(link['name'], data) local_file = encode(fs_join(dl_folder, data['NAME'])) algorithm = self.methods.get(file_type, file_type) checksum = compute_checksum(local_file, algorithm) if checksum is data['HASH']: self.log_info(_('File integrity of "%s" verified by %s checksum (%s)') % (data['NAME'], algorithm, checksum)) else: self.log_warning(_("%s checksum for file %s does not match (%s != %s)") % (algorithm, data['NAME'], checksum, data['HASH']))
def retry(self, attemps=5, wait=1, msg=""): """ Retries and begin again from the beginning :param attemps: number of maximum retries :param wait: time to wait in seconds before retry :param msg: message passed to fail if attemps value was reached """ frame = inspect.currentframe() try: id = frame.f_back.f_lineno finally: del frame if id not in self.retries: self.retries[id] = 0 if 0 < attemps <= self.retries[id]: self.fail(msg or _("Max retries reached")) self.wait(wait, False) self.retries[id] += 1 raise Retry(encode(msg)) # @TODO: Remove `encode` in 0.4.10
def _process(self, thread): self.log_debug("Plugin version: " + self.__version__) self.log_debug("Plugin status: " + self.__status__) if self.__status__ is "broken": self.fail(_("Plugin is temporarily unavailable")) elif self.__status__ is "testing": self.log_warning(_("Plugin may be unstable")) self.thread = thread self._setup() # self.pyload.hookManager.downloadPreparing(self.pyfile) #@TODO: Recheck in 0.4.10 self.check_status() self.pyfile.setStatus("starting") try: self.process(self.pyfile) self.check_status() self.check_download() except Fail, e: #@TODO: Move to PluginThread in 0.4.10 if self.get_config('fallback', True) and self.premium: self.log_warning(_("Premium download failed"), e) self.restart(premium=False) else: raise Fail(encode(e))
def handle_free(self, pyfile): m = re.search('<h2>((Daily )?Download Limit)</h2>', self.data) if m is not None: pyfile.error = encode(m.group(1)) self.log_warning(pyfile.error) self.retry(6, (6 * 60 if m.group(2) else 15) * 60, pyfile.error) ajax_url = "http://uploading.com/files/get/?ajax" self.req.http.c.setopt(pycurl.HTTPHEADER, ["X-Requested-With: XMLHttpRequest"]) self.req.http.lastURL = pyfile.url res = json.loads(self.load(ajax_url, post={'action': 'second_page', 'code': self.info['pattern']['ID']})) if 'answer' in res and 'wait_time' in res['answer']: wait_time = int(res['answer']['wait_time']) self.log_info(_("Waiting %d seconds") % wait_time) self.wait(wait_time) else: self.error(_("No AJAX/WAIT")) res = json.loads(self.load(ajax_url, post={'action': 'get_link', 'code': self.info['pattern']['ID'], 'pass': '******'})) if 'answer' in res and 'link' in res['answer']: url = res['answer']['link'] else: self.error(_("No AJAX/URL")) self.data = self.load(url) m = re.search(r'<form id="file_form" action="(.*?)"', self.data) if m is not None: url = m.group(1) else: self.error(_("No URL")) self.link = url
def check_filesize(self, file_size, size_tolerance=1024): """ Checks the file size of the last downloaded file :param file_size: expected file size :param size_tolerance: size check tolerance """ if not self.last_download: return dl_location = encode(self.last_download) dl_size = os.stat(dl_location).st_size if dl_size < 1: self.fail(_("Empty file")) elif file_size > 0: diff = abs(file_size - dl_size) if diff > size_tolerance: self.fail(_("File size mismatch | Expected file size: %s | Downloaded file size: %s") % (file_size, dl_size)) elif diff != 0: self.log_warning(_("File size is not equal to expected size"))
def decrypt_image(self, data, input_type='jpg', output_type='textual', ocr=False, timeout=120): """ Loads a captcha and decrypts it with ocr, plugin, user input :param data: image raw data :param get: get part for request :param post: post part for request :param cookies: True if cookies should be enabled :param input_type: Type of the Image :param output_type: 'textual' if text is written on the captcha\ or 'positional' for captcha where the user have to click\ on a specific region on the captcha :param ocr: if True, ocr is not used :return: result of decrypting """ result = "" time_ref = ("%.2f" % time.time())[-6:].replace(".", "") with open(os.path.join("tmp", "captcha_image_%s_%s.%s" % (self.plugin.__name__, time_ref, input_type)), "wb") as tmp_img: tmp_img.write(encode(data)) if ocr: if isinstance(ocr, basestring): OCR = self.pyload.pluginManager.loadClass("captcha", ocr) #: Rename `captcha` to `ocr` in 0.4.10 result = OCR(self.plugin).recognize(tmp_img.name) else: result = self.recognize(tmp_img.name) if not result: captchaManager = self.pyload.captchaManager try: self.task = captchaManager.newTask(data, input_type, tmp_img.name, output_type) captchaManager.handleCaptcha(self.task) self.task.setWaiting(max(timeout, 50)) #@TODO: Move to `CaptchaManager` in 0.4.10 while self.task.isWaiting(): self.plugin.check_status() time.sleep(1) finally: captchaManager.removeTask(self.task) if self.task.error: self.fail(self.task.error) elif not self.task.result: self.plugin.retry_captcha(msg=_("No captcha result obtained in appropriate time")) result = self.task.result if not self.pyload.debug: try: os.remove(tmp_img.name) except OSError, e: self.log_warning(_("Error removing `%s`") % tmp_img.name, e)
def abort(self, msg=""): """ Abort and give msg """ if msg: # @TODO: Remove in 0.4.10 self.pyfile.error = encode(msg) raise Abort
def download(self, url, get={}, post={}, ref=True, cookies=True, disposition=True, resume=None, chunks=None): """ Downloads the content at url to download folder :param url: :param get: :param post: :param ref: :param cookies: :param disposition: if True and server provides content-disposition header\ the filename will be changed if needed :return: The location where the file was saved """ self.check_status() if self.pyload.debug: self.log_debug("DOWNLOAD URL " + url, *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url", "_[1]")]) dl_url = self.fixurl(url) dl_basename = parse_name(self.pyfile.name) self.pyfile.name = dl_basename self.captcha.correct() if self.pyload.config.get("download", "skip_existing"): self.check_filedupe() self.pyfile.setStatus("downloading") dl_folder = self.pyload.config.get("general", "download_folder") dl_dirname = os.path.join(dl_folder, self.pyfile.package().folder) dl_filename = os.path.join(dl_dirname, dl_basename) dl_dir = encode(dl_dirname) dl_file = encode(dl_filename) #@TODO: Move safe-filename check to HTTPDownload in 0.4.10 if not exists(dl_dir): try: os.makedirs(dl_dir) except Exception, e: self.fail(e)
def max_mtime(self, path): return max( 0, 0, *( os.path.getmtime(fs_join(root, file)) for root, dirs, files in os.walk(encode(path), topdown=False) for file in files ) )
def reload_passwords(self): try: passwords = [] file = encode(self.get_config('passwordfile')) with open(file) as f: for pw in f.read().splitlines(): passwords.append(pw) except IOError, e: self.log_error(e)
def fail(self, msg=""): """ Fail and give msg """ msg = msg.strip() if msg: self.pyfile.error = msg else: msg = self.pyfile.error or self.info.get("error") or self.pyfile.getStatusName() raise Fail(encode(msg)) # @TODO: Remove `encode` in 0.4.10
def restart(self, msg="", premium=True): if not msg: msg = _("Restart plugin") if premium else _("Fallback to free processing") if not premium: if self.premium: self.restart_free = True else: self.fail("%s | %s" % (msg, _("Url was already processed as free"))) self.req.clearCookies() raise Retry(encode(msg)) # @TODO: Remove `encode` in 0.4.10
def periodical(self): folder = encode(self.get_config('folder')) file = encode(self.get_config('file')) try: if not os.path.isdir(os.path.join(folder, "finished")): os.makedirs(os.path.join(folder, "finished")) if self.get_config('watchfile'): with open(file, "a+") as f: f.seek(0) content = f.read().strip() if content: f = open(file, "wb") f.close() name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y")) with open(fs_join(folder, "finished", name), "wb") as f: f.write(content) self.pyload.api.addPackage(f.name, [f.name], 1) for f in os.listdir(folder): path = os.path.join(folder, f) if not os.path.isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."): continue newpath = os.path.join(folder, "finished", "tmp_" + f if self.get_config('delete') else f) shutil.move(path, newpath) self.log_info(_("Added %s from HotFolder") % f) self.pyload.api.addPackage(f, [newpath], 1) except (IOError, OSError), e: self.log_error(e, trace=True)
def add_password(self, password): """ Adds a password to saved list """ try: self.passwords = uniqify([password] + self.passwords) file = encode(self.get_config('passwordfile')) with open(file, "wb") as f: for pw in self.passwords: f.write(pw + '\n') except IOError, e: self.log_error(e)
def download_preparing(self, pyfile): connecttimeout = self.get_config('connecttimeout') maxredirs = self.get_config('maxredirs') useragent = self.get_config('useragent') if connecttimeout: pyfile.plugin.req.http.c.setopt(pycurl.CONNECTTIMEOUT, connecttimeout) if maxredirs: pyfile.plugin.req.http.c.setopt(pycurl.MAXREDIRS, maxredirs) if useragent: self.log_debug("Use custom user-agent string `%s`" % useragent) pyfile.plugin.req.http.c.setopt(pycurl.USERAGENT, encode(useragent))
def _load2disk(self): """ Loads container to disk if its stored remotely and overwrite url, or check existent on several places at disk """ if self.pyfile.url.startswith("http"): self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1] content = self.load(self.pyfile.url) self.pyfile.url = fs_join(self.pyload.config.get("general", "download_folder"), self.pyfile.name) try: with open(self.pyfile.url, "wb") as f: f.write(encode(content)) except IOError, e: self.fail(e)
def check_file(self, rules, delete=False, read_size=1048576, file_size=0, size_tolerance=1024): """ Checks the content of the last downloaded file, re match is saved to `last_check` :param rules: dict with names and rules to match (compiled regexp or strings) :param delete: delete if matched :param file_size: expected file size :param size_tolerance: size check tolerance :param read_size: amount of bytes to read from files :return: dictionary key of the first rule that matched """ do_delete = False last_download = encode(self.last_download) #@TODO: Recheck in 0.4.10 if not self.last_download or not exists(last_download): self.fail(self.pyfile.error or _("No file downloaded")) try: self.check_filesize(file_size, size_tolerance) with open(last_download, "rb") as f: content = f.read(read_size) #: Produces encoding errors, better log to other file in the future? # self.log_debug("Content: %s" % content) for name, rule in rules.items(): if isinstance(rule, basestring): if rule in content: do_delete = True return name elif hasattr(rule, "search"): m = rule.search(content) if m is not None: do_delete = True self.last_check = m return name finally: if delete and do_delete: try: os.remove(last_download) except OSError, e: self.log_warning(_("Error removing `%s`") % last_download, e) else: self.log_info(_("File deleted: ") + self.last_download) self.last_download = "" #: Recheck in 0.4.10
def call(self, script, args=[], lock=None): if lock is None: lock = self.get_config('lock') try: script = os.path.abspath(script) args = [script] + map(lambda arg: encode(arg) if isinstance(arg, basestring) else encode(str(arg)), args) self.log_info(_("EXECUTE [%s] %s") % (os.path.dirname(script), args)) p = subprocess.Popen(args, bufsize=-1) #@NOTE: output goes to pyload if lock: p.communicate() except Exception, e: self.log_error(_("Runtime error: %s") % script, e or _("Unknown error"))
def decrypt(self, pyfile): try: encoding = codecs.lookup(self.get_config('encoding')).name except Exception: encoding = "utf-8" fs_filename = encode(pyfile.url.strip()) txt = codecs.open(fs_filename, 'r', encoding) curPack = "Parsed links from %s" % pyfile.name packages = {curPack:[],} for link in txt.readlines(): link = link.strip() if not link: continue if link.startswith(";"): continue if link.startswith("[") and link.endswith("]"): #: New package curPack = link[1:-1] packages[curPack] = [] continue packages[curPack].append(link) txt.close() #: Empty packages fix for key, value in packages.items(): if not value: packages.pop(key, None) if self.get_config('flush'): try: txt = open(fs_filename, 'wb') txt.close() except IOError: self.log_warning(_("Failed to flush list")) for name, links in packages.items(): self.packages.append((name, links, name))
def decrypt(self, pyfile): fs_filename = encode(pyfile.url.strip()) opener = urllib2.build_opener(MultipartPostHandler.MultipartPostHandler) dlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', {'src' : "ccf", 'filename': "test.ccf", 'upload' : open(fs_filename, "rb")}).read() dl_folder = self.pyload.config.get("general", "download_folder") dlc_file = fs_join(dl_folder, "tmp_%s.dlc" % pyfile.name) try: dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) with open(dlc_file, "w") as tempdlc: tempdlc.write(dlc) self.links = [dlc_file]
def decrypt(self, pyfile): fs_filename = encode(pyfile.url.strip()) with open(fs_filename) as dlc: data = dlc.read().strip() data += '=' * (-len(data) % 4) dlc_key = data[-88:] dlc_data = data[:-88].decode('base64') dlc_content = self.load(self.API_URL % dlc_key) try: rc = re.search(r'<rc>(.+)</rc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) key = iv = AES.new(self.KEY, AES.MODE_CBC, self.IV).decrypt(rc) self.data = AES.new(key, AES.MODE_CBC, iv).decrypt(dlc_data).decode('base64') self.packages = [(name or pyfile.name, links, name or pyfile.name) \ for name, links in self.get_packages()]
def decrypt_file(self, key): """ Decrypts the file at last_download` """ #: Upper 64 bit of counter start n = self.b64_decode(key)[16:24] #: Convert counter to long and shift bytes k, iv, meta_mac = self.get_cipher_key(key) ctr = Counter.new(128, initial_value=long(n.encode("hex"), 16) << 64) cipher = AES.new(k, AES.MODE_CTR, counter=ctr) self.pyfile.setStatus("decrypting") self.pyfile.setProgress(0) file_crypted = encode(self.last_download) file_decrypted = file_crypted.rsplit(self.FILE_SUFFIX)[0] try: f = open(file_crypted, "rb") df = open(file_decrypted, "wb") except IOError, e: self.fail(e)
def scan(self, pyfile, thread): avfile = encode(self.get_config('avfile')) avargs = encode(self.get_config('avargs').strip()) if not os.path.isfile(avfile): self.fail(_("Antivirus executable not found")) scanfolder = self.get_config('avtarget') is "folder" if scanfolder: dl_folder = self.pyload.config.get("general", "download_folder") package_folder = pyfile.package().folder if self.pyload.config.get("general", "folder_per_package") else "" target = fs_join(dl_folder, package_folder, pyfile.name) target_repr = "Folder: " + package_folder or dl_folder else: target = encode(pyfile.plugin.last_download) target_repr = "File: " + os.path.basename(pyfile.plugin.last_download) if not exists(target): return thread.addActive(pyfile) pyfile.setCustomStatus(_("virus scanning")) pyfile.setProgress(0) try: p = subprocess.Popen([avfile, avargs, target], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = map(str.strip, p.communicate()) if out: self.log_info(target_repr, out) if err: self.log_warning(target_repr, err) if not self.get_config('ignore-err'): self.log_debug("Delete/Quarantine task aborted due scan error") return if p.returncode: action = self.get_config('action') if scanfolder: if action is "Antivirus default": self.log_warning(_("Delete/Quarantine task skipped in folder scan mode")) return pyfile.error = _("Infected file") try: if action is "Delete": if not self.get_config('deltotrash'): os.remove(file) else: try: send2trash.send2trash(file) except NameError: self.log_warning(_("Send2Trash lib not found, moving to quarantine instead")) pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.get_config('quardir')) except Exception, e: self.log_warning(_("Unable to move file to trash: %s, moving to quarantine instead") % e.message) pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.get_config('quardir')) else: self.log_debug("Successfully moved file to trash") elif action is "Quarantine": pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.get_config('quardir')) except (IOError, shutil.Error), e: self.log_error(target_repr, action + " action failed!", e)
def _update_plugins(self, updates): """ Check for plugin updates """ updated = [] updatelist, blacklist = self.parse_list(updates) url = updates[1] req = self.pyload.requestFactory.getRequest(self.classname) if blacklist: #@NOTE: Protect UpdateManager from self-removing type_plugins = [(plugin['type'], plugin['name']) for plugin in blacklist \ if plugin['name'] is not self.classname and plugin['type'] is not self.__type__] c = 1 l = len(type_plugins) for idx, plugin in enumerate(updatelist): if c > l: break name = plugin['name'] type = plugin['type'] for t, n in type_plugins: if n != name or t != type: continue updatelist.pop(idx) c += 1 break for t, n in self.remove_plugins(type_plugins): self.log_info(_("Removed blacklisted plugin: %(type)s %(name)s") % { 'type': t.upper(), 'name': n, }) for plugin in updatelist: name = plugin['name'] type = plugin['type'] version = plugin['version'] plugins = getattr(self.pyload.pluginManager, "%sPlugins" % type.rstrip('s')) #@TODO: Remove rstrip in 0.4.10 oldver = float(plugins[name]['v']) if name in plugins else None newver = float(version) if not oldver: msg = "New plugin: %(type)s %(name)s (v%(newver).2f)" elif newver > oldver: msg = "New version of plugin: %(type)s %(name)s (v%(oldver).2f -> v%(newver).2f)" else: continue self.log_info(_(msg) % {'type' : type.rstrip('s').upper(), #@TODO: Remove rstrip in 0.4.10 'name' : name, 'oldver': oldver, 'newver': newver}) try: content = self.load(url % plugin + ".py", decode=False, req=req) if req.code == 404: raise Exception(_("URL not found")) m = self._VERSION.search(content) if m and m.group(2) == version: with open(fs_join("userplugins", type, name + ".py"), "wb") as f: f.write(encode(content)) updated.append((type, name)) else: raise Exception(_("Version mismatch")) except Exception, e: self.log_error(_("Error updating plugin: %s %s") % (type.rstrip('s').upper(), name), e) #@TODO: Remove rstrip in 0.4.10
def download_finished(self, pyfile): """ Compute checksum for the downloaded file and compare it with the hash provided by the hoster. pyfile.plugin.check_data should be a dictionary which can contain: a) if known, the exact filesize in bytes (e.g. 'size': 123456789) b) hexadecimal hash string with algorithm name as key (e.g. 'md5': "d76505d0869f9f928a17d42d66326307") """ if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict): data = pyfile.plugin.check_data.copy() elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict): data = pyfile.plugin.api_data.copy() elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict): data = pyfile.plugin.info.copy() data.pop('size', None) #@NOTE: Don't check file size until a similary matcher will be implemented else: return self.log_debug(data) if not pyfile.plugin.last_download: self.check_failed(pyfile, None, "No file downloaded") local_file = encode(pyfile.plugin.last_download) # dl_folder = self.pyload.config.get("general", "download_folder") # local_file = encode(fs_join(dl_folder, pyfile.package().folder, pyfile.name)) if not os.path.isfile(local_file): self.check_failed(pyfile, None, "File does not exist") #: Validate file size if "size" in data: api_size = int(data['size']) file_size = os.path.getsize(local_file) if api_size != file_size: self.log_warning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size)) self.check_failed(pyfile, local_file, "Incorrect file size") data.pop('size', None) #: Validate checksum if data and self.get_config('check_checksum'): if not 'md5' in data: for type in ("checksum", "hashsum", "hash"): if type in data: data['md5'] = data[type] #@NOTE: What happens if it's not an md5 hash? break for key in self.algorithms: if key in data: checksum = compute_checksum(local_file, key.replace("-", "").lower()) if checksum: if checksum == data[key].lower(): self.log_info(_('File integrity of "%s" verified by %s checksum (%s)') % (pyfile.name, key.upper(), checksum)) break else: self.log_warning(_("%s checksum for file %s does not match (%s != %s)") % (key.upper(), pyfile.name, checksum, data[key].lower())) self.check_failed(pyfile, local_file, "Checksums do not match") else: self.log_warning(_("Unsupported hashing algorithm"), key.upper()) else: self.log_warning(_("Unable to validate checksum for file: ") + pyfile.name)
def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability if not ids: return False processed = [] extracted = [] failed = [] toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|') destination = self.get_config('destination') subfolder = self.get_config('subfolder') fullpath = self.get_config('fullpath') overwrite = self.get_config('overwrite') priority = self.get_config('priority') recursive = self.get_config('recursive') keepbroken = self.get_config('keepbroken') extensions = [x.lstrip('.').lower() for x in toList(self.get_config('extensions'))] excludefiles = toList(self.get_config('excludefiles')) if extensions: self.log_debug("Use for extensions: %s" % "|.".join(extensions)) #: Reload from txt file self.reload_passwords() dl_folder = self.pyload.config.get("general", "download_folder") #: Iterate packages -> extractors -> targets for pid in ids: pypack = self.pyload.files.getPackage(pid) if not pypack: self.queue.remove(pid) continue self.log_info(_("Check package: %s") % pypack.name) #: Determine output folder out = fs_join(dl_folder, pypack.folder, destination, "") #: Force trailing slash if subfolder: out = fs_join(out, pypack.folder) if not exists(out): os.makedirs(out) matched = False success = True files_ids = dict((pylink['name'], ((fs_join(dl_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \ in sorted(pypack.getChildren().values(), key=lambda k: k['name'])).values() #: Remove duplicates #: Check as long there are unseen files while files_ids: new_files_ids = [] if extensions: files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \ if filter(lambda ext: fname.lower().endswith(ext), extensions)] for Extractor in self.extractors: targets = Extractor.get_targets(files_ids) if targets: self.log_debug("Targets for %s: %s" % (Extractor.__name__, targets)) matched = True for fname, fid, fout in targets: name = os.path.basename(fname) if not exists(fname): self.log_debug(name, "File not found") continue self.log_info(name, _("Extract to: %s") % fout) try: pyfile = self.pyload.files.getFile(fid) archive = Extractor(self, fname, fout, fullpath, overwrite, excludefiles, priority, keepbroken, fid) thread.addActive(pyfile) archive.init() try: new_files = self._extract(pyfile, archive, pypack.password) finally: pyfile.setProgress(100) thread.finishFile(pyfile) except Exception, e: self.log_error(name, e) success = False continue #: Remove processed file and related multiparts from list files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \ if fname not in archive.items()] self.log_debug("Extracted files: %s" % new_files) for file in new_files: self.set_permissions(file) for filename in new_files: file = encode(fs_join(os.path.dirname(archive.filename), filename)) if not exists(file): self.log_debug("New file %s does not exists" % filename) continue if recursive and os.path.isfile(file): new_files_ids.append((filename, fid, os.path.dirname(filename))) #: Append as new target self.manager.dispatchEvent("archive_extracted", pyfile, archive) files_ids = new_files_ids #: Also check extracted files if matched: if success: extracted.append(pid) self.manager.dispatchEvent("package_extracted", pypack) else: failed.append(pid) self.manager.dispatchEvent("package_extract_failed", pypack) self.failed.add(pid) else: self.log_info(_("No files found to extract")) if not matched or not success and subfolder: try: os.rmdir(out) except OSError: pass self.queue.remove(pid)
self.log_debug("Password was wrong") else: raise PasswordError pyfile.setProgress(100) pyfile.setStatus("processing") delfiles = archive.items() self.log_debug("Would delete: " + ", ".join(delfiles)) if self.get_config('delete'): self.log_info(_("Deleting %s files") % len(delfiles)) deltotrash = self.get_config('deltotrash') for f in delfiles: file = encode(f) if not exists(file): continue if not deltotrash: os.remove(file) else: try: send2trash.send2trash(file) except NameError: self.log_warning(_("Unable to move %s to trash") % os.path.basename(f), _("Send2Trash lib not found")) except Exception, e:
try: old_file = fs_join(dl_dirname, newname) new_file = fs_join(dl_dirname, safename) os.rename(old_file, new_file) except OSError, e: self.log_warning(_("Error renaming `%s` to `%s`") % (newname, safename), e) safename = newname self.log_info(_("`%s` saved as `%s`") % (self.pyfile.name, safename)) self.pyfile.name = safename dl_filename = os.path.join(dl_dirname, safename) dl_file = encode(dl_filename) self.set_permissions(dl_file) self.last_download = dl_filename return dl_filename def check_filesize(self, file_size, size_tolerance=1024): """ Checks the file size of the last downloaded file :param file_size: expected file size :param size_tolerance: size check tolerance """
def parse_fileInfo(klass, url="", html=""): info = klass.get_info(url, html) return encode(info["name"]), info["size"], info["status"], info["url"]
def skip(self, msg=""): """ Skip and give msg """ raise Skip(encode(msg or self.pyfile.error or self.pyfile.pluginname)) # @TODO: Remove `encode` in 0.4.10