def package_finished(self, pypack): dl_folder = fs_join(self.pyload.config.get("general", "download_folder"), pypack.folder, "") for link in pypack.getChildren().values(): file_type = os.path.splitext(link['name'])[1][1:].lower() if file_type not in self.formats: continue hash_file = encode(fs_join(dl_folder, link['name'])) if not os.path.isfile(hash_file): self.log_warning(_("File not found"), link['name']) continue with open(hash_file) as f: text = f.read() for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text): data = m.groupdict() self.log_debug(link['name'], data) local_file = encode(fs_join(dl_folder, data['NAME'])) algorithm = self.methods.get(file_type, file_type) checksum = compute_checksum(local_file, algorithm) if checksum is data['HASH']: self.log_info(_('File integrity of "%s" verified by %s checksum (%s)') % (data['NAME'], algorithm, checksum)) else: self.log_warning(_("%s checksum for file %s does not match (%s != %s)") % (algorithm, data['NAME'], checksum, data['HASH']))
def download_finished(self, pyfile): if self.pyload.config.get("general", "folder_per_package"): dl_folder = fs_join(self.pyload.config.get("general", "download_folder"), pyfile.package().folder) else: dl_folder = self.pyload.config.get("general", "download_folder") file = fs_join(dl_folder, pyfile.name) args = [pyfile.id, pyfile.name, file, pyfile.pluginname, pyfile.url] self._call("download_finished", args)
def package_finished(self, pack): files = {} fid_dict = {} for fid, data in pack.getChildren().items(): if re.search("\.\d{3}$", data['name']): if data['name'][:-4] not in files: files[data['name'][:-4]] = [] files[data['name'][:-4]].append(data['name']) files[data['name'][:-4]].sort() fid_dict[data['name']] = fid dl_folder = self.pyload.config.get("general", "download_folder") if self.pyload.config.get("general", "folder_per_package"): dl_folder = fs_join(dl_folder, pack.folder) for name, file_list in files.items(): self.log_info(_("Starting merging of"), name) with open(fs_join(dl_folder, name), "wb") as final_file: for splitted_file in file_list: self.log_debug("Merging part", splitted_file) pyfile = self.pyload.files.getFile(fid_dict[splitted_file]) pyfile.setStatus("processing") try: with open(fs_join(dl_folder, splitted_file), "rb") as s_file: size_written = 0 s_file_size = int(os.path.getsize(os.path.join(dl_folder, splitted_file))) while True: f_buffer = s_file.read(self.BUFFER_SIZE) if f_buffer: final_file.write(f_buffer) size_written += self.BUFFER_SIZE pyfile.setProgress((size_written * 100) / s_file_size) else: break self.log_debug("Finished merging part", splitted_file) except Exception, e: self.log_error(e, trace=True) finally: pyfile.setProgress(100) pyfile.setStatus("finished") pyfile.release()
def check_filedupe(self): """ Checks if same file was/is downloaded within same package :param starting: indicates that the current download is going to start :raises Skip: """ pack = self.pyfile.package() for pyfile in self.pyload.files.cache.values(): if pyfile is self.pyfile: continue if pyfile.name != self.pyfile.name or pyfile.package().folder != pack.folder: continue if pyfile.status in (0, 5, 7, 12): #: (finished, waiting, starting, downloading) self.skip(pyfile.pluginname) dl_folder = self.pyload.config.get("general", "download_folder") package_folder = pack.folder if self.pyload.config.get("general", "folder_per_package") else "" dl_location = fs_join(dl_folder, package_folder, self.pyfile.name) if not exists(dl_location): return pyfile = self.pyload.db.findDuplicates(self.pyfile.id, package_folder, self.pyfile.name) if pyfile: self.skip(pyfile[0]) size = os.stat(dl_location).st_size if size >= self.pyfile.size: self.skip(_("File exists"))
def package_extracted(self, pypack): if self.pyload.config.get("general", "folder_per_package"): dl_folder = fs_join(self.pyload.config.get("general", "download_folder"), pypack.folder) else: dl_folder = self.pyload.config.get("general", "download_folder") args = [pypack.id, pypack.name, dl_folder] self._call("package_extracted", args)
def max_mtime(self, path): return max( 0, 0, *( os.path.getmtime(fs_join(root, file)) for root, dirs, files in os.walk(encode(path), topdown=False) for file in files ) )
def package_deleted(self, pid): pdata = self.pyload.api.getPackageInfo(pid) if self.pyload.config.get("general", "folder_per_package"): dl_folder = fs_join(self.pyload.config.get("general", "download_folder"), pdata.folder) else: dl_folder = self.pyload.config.get("general", "download_folder") args = [pdata.pid, pdata.name, dl_folder, pdata.password] self._call("package_deleted", args)
def items(self): dir, name = os.path.split(self.filename) #: Actually extracted file files = [self.filename] #: eventually Multipart Files files.extend(fs_join(dir, os.path.basename(file)) for file in filter(self.ismultipart, os.listdir(dir)) if re.sub(self.re_multipart, ".rar", name) == re.sub(self.re_multipart, ".rar", file)) return files
def decrypt(self, pyfile): m = re.match(self.__pattern__, pyfile.url) m_id = m.group('ID') m_type = m.group('TYPE') if m_type == "user": self.log_debug("Url recognized as Channel") user = m_id channel = self.get_channel(user) if channel: playlists = self.get_playlists(channel['id']) self.log_debug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel['title'])) relatedplaylist = dict((p_name, self.get_playlist(p_id)) for p_name, p_id in channel['relatedPlaylists'].items()) self.log_debug("Channel's related playlists found = %s" % relatedplaylist.keys()) relatedplaylist['uploads']['title'] = "Unplaylisted videos" relatedplaylist['uploads']['checkDups'] = True #: checkDups flag for p_name, p_data in relatedplaylist.items(): if self.get_config(p_name): p_data['title'] += " of " + user playlists.append(p_data) else: playlists = [] else: self.log_debug("Url recognized as Playlist") playlists = [self.get_playlist(m_id)] if not playlists: self.fail(_("No playlist available")) addedvideos = [] urlize = lambda x: "https://www.youtube.com/watch?v=" + x for p in playlists: p_name = p['title'] p_videos = self.get_videos_id(p['id']) p_folder = fs_join(self.pyload.config.get("general", "download_folder"), p['channelTitle'], p_name) self.log_debug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name)) if not p_videos: continue elif "checkDups" in p: p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos] self.log_debug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name)) else: p_urls = map(urlize, p_videos) self.packages.append((p_name, p_urls, p_folder)) #: Folder is NOT recognized by pyload 0.4.9! addedvideos.extend(p_videos)
def list(self, password=None): command = "vb" if self.fullpath else "lb" p = self.call_cmd(command, "-v", self.target, password=password) out, err = p.communicate() if "Cannot open" in err: raise ArchiveError(_("Cannot open file")) if err.strip(): #: Only log error at this point self.log_error(err.strip()) result = set() if not self.fullpath and self.VERSION.startswith('5'): #@NOTE: Unrar 5 always list full path for f in decode(out).splitlines(): f = fs_join(self.out, os.path.basename(f.strip())) if os.path.isfile(f): result.add(fs_join(self.out, os.path.basename(f))) else: for f in decode(out).splitlines(): result.add(fs_join(self.out, f.strip())) return list(result)
def _load2disk(self): """ Loads container to disk if its stored remotely and overwrite url, or check existent on several places at disk """ if self.pyfile.url.startswith("http"): self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1] content = self.load(self.pyfile.url) self.pyfile.url = fs_join(self.pyload.config.get("general", "download_folder"), self.pyfile.name) try: with open(self.pyfile.url, "wb") as f: f.write(encode(content)) except IOError, e: self.fail(e)
def list(self, password=None): command = "l" if self.fullpath else "l" p = self.call_cmd(command, self.target, password=password) out, err = p.communicate() if "Can not open" in err: raise ArchiveError(_("Cannot open file")) if p.returncode > 1: raise ArchiveError(_("Process return code: %d") % p.returncode) result = set() for groups in self.re_filelist.findall(out): f = groups[-1].strip() result.add(fs_join(self.out, f)) return list(result)
def decrypt(self, pyfile): m_id = self.info['pattern']['ID'] m_type = self.info['pattern']['TYPE'] if m_type == "playlist": self.log_debug("Url recognized as Playlist") p_info = self.get_playlist_info(m_id) playlists = [(m_id,) + p_info] if p_info else None else: self.log_debug("Url recognized as Channel") playlists = self.get_playlists(m_id) self.log_debug("%s playlist\s found on channel \"%s\"" % (len(playlists), m_id)) if not playlists: self.fail(_("No playlist available")) for p_id, p_name, p_owner in playlists: p_videos = self.get_videos(p_id) p_folder = fs_join(self.pyload.config.get("general", "download_folder"), p_owner, p_name) self.log_debug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name)) self.packages.append((p_name, p_videos, p_folder)) #@NOTE: Folder is NOT recognized by pyload 0.4.9!
def decrypt(self, pyfile): fs_filename = encode(pyfile.url.strip()) opener = urllib2.build_opener(MultipartPostHandler.MultipartPostHandler) dlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', {'src' : "ccf", 'filename': "test.ccf", 'upload' : open(fs_filename, "rb")}).read() dl_folder = self.pyload.config.get("general", "download_folder") dlc_file = fs_join(dl_folder, "tmp_%s.dlc" % pyfile.name) try: dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content, re.S).group(1).decode('base64') except AttributeError: self.fail(_("Container is corrupted")) with open(dlc_file, "w") as tempdlc: tempdlc.write(dlc) self.links = [dlc_file]
def periodical(self): folder = encode(self.get_config('folder')) file = encode(self.get_config('file')) try: if not os.path.isdir(os.path.join(folder, "finished")): os.makedirs(os.path.join(folder, "finished")) if self.get_config('watchfile'): with open(file, "a+") as f: f.seek(0) content = f.read().strip() if content: f = open(file, "wb") f.close() name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y")) with open(fs_join(folder, "finished", name), "wb") as f: f.write(content) self.pyload.api.addPackage(f.name, [f.name], 1) for f in os.listdir(folder): path = os.path.join(folder, f) if not os.path.isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."): continue newpath = os.path.join(folder, "finished", "tmp_" + f if self.get_config('delete') else f) shutil.move(path, newpath) self.log_info(_("Added %s from HotFolder") % f) self.pyload.api.addPackage(f, [newpath], 1) except (IOError, OSError), e: self.log_error(e, trace=True)
def remove_plugins(self, type_plugins): """ Delete plugins from disk """ if not type_plugins: return removed = set() self.log_debug("Requested deletion of plugins: %s" % type_plugins) for type, name in type_plugins: rootplugins = os.path.join(pypath, "module", "plugins") for dir in ("userplugins", rootplugins): py_filename = fs_join(dir, type, name + ".py") pyc_filename = py_filename + "c" if type is "hook": try: self.manager.deactivateHook(name) except Exception, e: self.log_debug(e, trace=True) for filename in (py_filename, pyc_filename): if not exists(filename): continue try: os.remove(filename) except OSError, e: self.log_warning(_("Error removing `%s`") % filename, e) else: id = (type, name) removed.add(id)
def scan(self, pyfile, thread): avfile = encode(self.get_config('avfile')) avargs = encode(self.get_config('avargs').strip()) if not os.path.isfile(avfile): self.fail(_("Antivirus executable not found")) scanfolder = self.get_config('avtarget') is "folder" if scanfolder: dl_folder = self.pyload.config.get("general", "download_folder") package_folder = pyfile.package().folder if self.pyload.config.get("general", "folder_per_package") else "" target = fs_join(dl_folder, package_folder, pyfile.name) target_repr = "Folder: " + package_folder or dl_folder else: target = encode(pyfile.plugin.last_download) target_repr = "File: " + os.path.basename(pyfile.plugin.last_download) if not exists(target): return thread.addActive(pyfile) pyfile.setCustomStatus(_("virus scanning")) pyfile.setProgress(0) try: p = subprocess.Popen([avfile, avargs, target], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = map(str.strip, p.communicate()) if out: self.log_info(target_repr, out) if err: self.log_warning(target_repr, err) if not self.get_config('ignore-err'): self.log_debug("Delete/Quarantine task aborted due scan error") return if p.returncode: action = self.get_config('action') if scanfolder: if action is "Antivirus default": self.log_warning(_("Delete/Quarantine task skipped in folder scan mode")) return pyfile.error = _("Infected file") try: if action is "Delete": if not self.get_config('deltotrash'): os.remove(file) else: try: send2trash.send2trash(file) except NameError: self.log_warning(_("Send2Trash lib not found, moving to quarantine instead")) pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.get_config('quardir')) except Exception, e: self.log_warning(_("Unable to move file to trash: %s, moving to quarantine instead") % e.message) pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.get_config('quardir')) else: self.log_debug("Successfully moved file to trash") elif action is "Quarantine": pyfile.setCustomStatus(_("file moving")) shutil.move(file, self.get_config('quardir')) except (IOError, shutil.Error), e: self.log_error(target_repr, action + " action failed!", e)
def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability if not ids: return False processed = [] extracted = [] failed = [] toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|') destination = self.get_config('destination') subfolder = self.get_config('subfolder') fullpath = self.get_config('fullpath') overwrite = self.get_config('overwrite') priority = self.get_config('priority') recursive = self.get_config('recursive') keepbroken = self.get_config('keepbroken') extensions = [x.lstrip('.').lower() for x in toList(self.get_config('extensions'))] excludefiles = toList(self.get_config('excludefiles')) if extensions: self.log_debug("Use for extensions: %s" % "|.".join(extensions)) #: Reload from txt file self.reload_passwords() dl_folder = self.pyload.config.get("general", "download_folder") #: Iterate packages -> extractors -> targets for pid in ids: pypack = self.pyload.files.getPackage(pid) if not pypack: self.queue.remove(pid) continue self.log_info(_("Check package: %s") % pypack.name) #: Determine output folder out = fs_join(dl_folder, pypack.folder, destination, "") #: Force trailing slash if subfolder: out = fs_join(out, pypack.folder) if not exists(out): os.makedirs(out) matched = False success = True files_ids = dict((pylink['name'], ((fs_join(dl_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \ in sorted(pypack.getChildren().values(), key=lambda k: k['name'])).values() #: Remove duplicates #: Check as long there are unseen files while files_ids: new_files_ids = [] if extensions: files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \ if filter(lambda ext: fname.lower().endswith(ext), extensions)] for Extractor in self.extractors: targets = Extractor.get_targets(files_ids) if targets: self.log_debug("Targets for %s: %s" % (Extractor.__name__, targets)) matched = True for fname, fid, fout in targets: name = os.path.basename(fname) if not exists(fname): self.log_debug(name, "File not found") continue self.log_info(name, _("Extract to: %s") % fout) try: pyfile = self.pyload.files.getFile(fid) archive = Extractor(self, fname, fout, fullpath, overwrite, excludefiles, priority, keepbroken, fid) thread.addActive(pyfile) archive.init() try: new_files = self._extract(pyfile, archive, pypack.password) finally: pyfile.setProgress(100) thread.finishFile(pyfile) except Exception, e: self.log_error(name, e) success = False continue #: Remove processed file and related multiparts from list files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \ if fname not in archive.items()] self.log_debug("Extracted files: %s" % new_files) for file in new_files: self.set_permissions(file) for filename in new_files: file = encode(fs_join(os.path.dirname(archive.filename), filename)) if not exists(file): self.log_debug("New file %s does not exists" % filename) continue if recursive and os.path.isfile(file): new_files_ids.append((filename, fid, os.path.dirname(filename))) #: Append as new target self.manager.dispatchEvent("archive_extracted", pyfile, archive) files_ids = new_files_ids #: Also check extracted files if matched: if success: extracted.append(pid) self.manager.dispatchEvent("package_extracted", pypack) else: failed.append(pid) self.manager.dispatchEvent("package_extract_failed", pypack) self.failed.add(pid) else: self.log_info(_("No files found to extract")) if not matched or not success and subfolder: try: os.rmdir(out) except OSError: pass self.queue.remove(pid)
def do_download(self, url): self.pyfile.setStatus("waiting") #: Real link m = re.match(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url) server = m.group(1) chan = m.group(2) bot = m.group(3) pack = m.group(4) nick = self.get_config('nick') ident = self.get_config('ident') real = self.get_config('realname') temp = server.split(':') ln = len(temp) if ln == 2: host, port = temp elif ln == 1: host, port = temp[0], 6667 else: self.fail(_("Invalid hostname for IRC Server: %s") % server) ####################### #: CONNECT TO IRC AND IDLE FOR REAL LINK dl_time = time.time() sock = socket.socket() sock.connect((host, int(port))) if nick == "pyload": nick = "pyload-%d" % (time.time() % 1000) #: last 3 digits sock.send("NICK %s\r\n" % nick) sock.send("USER %s %s bla :%s\r\n" % (ident, host, real)) self.wait(3) sock.send("JOIN #%s\r\n" % chan) sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) #: IRC recv loop readbuffer = "" done = False retry = None m = None while True: #: Done is set if we got our real link if done: break if retry: if time.time() > retry: retry = None dl_time = time.time() sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) else: if (dl_time + self.timeout) < time.time(): #@TODO: add in config sock.send("QUIT :byebye\r\n") sock.close() self.fail(_("XDCC Bot did not answer")) fdset = select([sock], [], [], 0) if sock not in fdset[0]: continue readbuffer += sock.recv(1024) temp = readbuffer.split("\n") readbuffer = temp.pop() for line in temp: # if self.pyload.debug: # self.log_debug("*> " + decode(line)) line = line.rstrip() first = line.split() if first[0] == "PING": sock.send("PONG %s\r\n" % first[1]) if first[0] == "ERROR": self.fail(_("IRC-Error: %s") % line) msg = line.split(None, 3) if len(msg) != 4: continue msg = { 'origin': msg[0][1:], 'action': msg[1], 'target': msg[2], 'text': msg[3][1:] } if nick is msg['target'][0:len(nick)] and "PRIVMSG" is msg['action']: if msg['text'] == "\x01VERSION\x01": self.log_debug("Sending CTCP VERSION") sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface")) elif msg['text'] == "\x01TIME\x01": self.log_debug("Sending CTCP TIME") sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) elif msg['text'] == "\x01LAG\x01": pass #: don't know how to answer if not (bot is msg['origin'][0:len(bot)] and nick is msg['target'][0:len(nick)] and msg['action'] in ("PRIVMSG", "NOTICE")): continue if self.pyload.debug: self.log_debug(msg['origin'], msg['text']) if "You already requested that pack" in msg['text']: retry = time.time() + 300 if "you must be on a known channel to request a pack" in msg['text']: self.fail(_("Invalid channel")) m = re.match('\x01DCC SEND (.*?) (\d+) (\d+)(?: (\d+))?\x01', msg['text']) if m is not None: done = True #: Get connection data ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2))))) port = int(m.group(3)) packname = m.group(1) if len(m.groups()) > 3: self.req.filesize = int(m.group(4)) self.pyfile.name = packname dl_folder = self.pyload.config.get("general", "download_folder") filename = fs_join(dl_folder, packname) self.log_info(_("Downloading %s from %s:%d") % (packname, ip, port)) self.pyfile.setStatus("downloading") newname = self.req.download(ip, port, filename, sock, self.pyfile.setProgress) if newname and newname is not filename: self.log_info(_("%(name)s saved as %(newname)s") % {'name': self.pyfile.name, 'newname': newname}) filename = newname #: kill IRC socket #: sock.send("QUIT :byebye\r\n") sock.close() self.last_download = filename return self.last_download
def _update_plugins(self, updates): """ Check for plugin updates """ updated = [] updatelist, blacklist = self.parse_list(updates) url = updates[1] req = self.pyload.requestFactory.getRequest(self.classname) if blacklist: #@NOTE: Protect UpdateManager from self-removing type_plugins = [(plugin['type'], plugin['name']) for plugin in blacklist \ if plugin['name'] is not self.classname and plugin['type'] is not self.__type__] c = 1 l = len(type_plugins) for idx, plugin in enumerate(updatelist): if c > l: break name = plugin['name'] type = plugin['type'] for t, n in type_plugins: if n != name or t != type: continue updatelist.pop(idx) c += 1 break for t, n in self.remove_plugins(type_plugins): self.log_info(_("Removed blacklisted plugin: %(type)s %(name)s") % { 'type': t.upper(), 'name': n, }) for plugin in updatelist: name = plugin['name'] type = plugin['type'] version = plugin['version'] plugins = getattr(self.pyload.pluginManager, "%sPlugins" % type.rstrip('s')) #@TODO: Remove rstrip in 0.4.10 oldver = float(plugins[name]['v']) if name in plugins else None newver = float(version) if not oldver: msg = "New plugin: %(type)s %(name)s (v%(newver).2f)" elif newver > oldver: msg = "New version of plugin: %(type)s %(name)s (v%(oldver).2f -> v%(newver).2f)" else: continue self.log_info(_(msg) % {'type' : type.rstrip('s').upper(), #@TODO: Remove rstrip in 0.4.10 'name' : name, 'oldver': oldver, 'newver': newver}) try: content = self.load(url % plugin + ".py", decode=False, req=req) if req.code == 404: raise Exception(_("URL not found")) m = self._VERSION.search(content) if m and m.group(2) == version: with open(fs_join("userplugins", type, name + ".py"), "wb") as f: f.write(encode(content)) updated.append((type, name)) else: raise Exception(_("Version mismatch")) except Exception, e: self.log_error(_("Error updating plugin: %s %s") % (type.rstrip('s').upper(), name), e) #@TODO: Remove rstrip in 0.4.10
try: newname = self.req.httpDownload(dl_url, dl_file, get=get, post=post, ref=ref, cookies=cookies, chunks=chunks, resume=resume, progressNotify=self.pyfile.setProgress, disposition=disposition) except BadHeader, e: self.req.code = e.code raise BadHeader(e) finally: self.pyfile.size = self.req.size if self.req.code in (404, 410): bad_file = fs_join(dl_dirname, newname) try: os.remove(bad_file) except OSError, e: self.log_debug(_("Error removing `%s`") % bad_file, e) else: return "" #@TODO: Recheck in 0.4.10 if disposition and newname: safename = parse_name(newname.split(' filename*=')[0]) if safename != newname: try:
if self.pyfile.url.startswith("http"): self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1] content = self.load(self.pyfile.url) self.pyfile.url = fs_join(self.pyload.config.get("general", "download_folder"), self.pyfile.name) try: with open(self.pyfile.url, "wb") as f: f.write(encode(content)) except IOError, e: self.fail(e) else: self.pyfile.name = os.path.basename(self.pyfile.url) if not exists(self.pyfile.url): if exists(fs_join(pypath, self.pyfile.url)): self.pyfile.url = fs_join(pypath, self.pyfile.url) else: self.fail(_("File not exists")) else: self.data = self.pyfile.url #@NOTE: ??? def delete_tmp(self): if not self.pyfile.name.startswith("tmp_"): return try: os.remove(self.pyfile.url) except OSError, e: self.log_warning(_("Error removing `%s`") % self.pyfile.url, e)