def _log(self, level, plugintype, pluginname, messages): log = getattr(self.pyload.log, level) msg = u" | ".join(decode(a).strip() for a in messages if a) log( "%(plugintype)s %(pluginname)s[%(id)s]: %(msg)s" % {"plugintype": plugintype.upper(), "pluginname": pluginname, "id": self.pyfile.id, "msg": msg} )
def parse_domains(self, list): regexp = re.compile(r'^(?:https?://)?(?:www\.)?(?:\w+\.)*((?:[\d.]+|[\w\-^_]{3,63}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)', re.I | re.U) r'^(?:https?://)?(?:www\.)?(?:\w+\.)*((?:[\d.]+|[\w\-^_]{3,63}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)' domains = [decode(domain).strip().lower() for url in list for domain in regexp.findall(url)] return self.replace_domains(uniqify(domains))
def decrypt_attr(self, data, key): k, iv, meta_mac = self.get_cipher_key(key) cbc = AES.new(k, AES.MODE_CBC, "\0" * 16) attr = decode(cbc.decrypt(self.b64_decode(data))) self.log_debug("Decrypted Attr: %s" % attr) if not attr.startswith("MEGA"): self.fail(_("Decryption failed")) #: Data is padded, 0-bytes must be stripped return json.loads(re.search(r'{.+?}', attr).group(0))
def list(self, password=None): command = "vb" if self.fullpath else "lb" p = self.call_cmd(command, "-v", self.target, password=password) out, err = p.communicate() if "Cannot open" in err: raise ArchiveError(_("Cannot open file")) if err.strip(): #: Only log error at this point self.log_error(err.strip()) result = set() if not self.fullpath and self.VERSION.startswith('5'): #@NOTE: Unrar 5 always list full path for f in decode(out).splitlines(): f = fs_join(self.out, os.path.basename(f.strip())) if os.path.isfile(f): result.add(fs_join(self.out, os.path.basename(f))) else: for f in decode(out).splitlines(): result.add(fs_join(self.out, f.strip())) return list(result)
def parse_packages(self, startNode): return [(decode(node.getAttribute("name")).decode('base64'), self.parse_links(node)) \ for node in startNode.getElementsByTagName("package")]
def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=True, multipart=False, redirect=True, req=None): """ Load content at url and returns it :param url: :param get: :param post: :param ref: :param cookies: :param just_header: If True only the header will be retrieved and returned as dict :param decode: Wether to decode the output according to http header, should be True in most cases :return: Loaded content """ if self.pyload.debug: self.log_debug("LOAD URL " + url, *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url", "_[1]")]) url = fixurl(url, unquote=True) #: Recheck in 0.4.10 if req is None: req = self.req or self.pyload.requestFactory.getRequest(self.classname) #@TODO: Move to network in 0.4.10 if isinstance(cookies, list): set_cookies(req.cj, cookies) #@TODO: Move to network in 0.4.10 if not redirect: req.http.c.setopt(pycurl.FOLLOWLOCATION, 0) elif type(redirect) is int: req.http.c.setopt(pycurl.MAXREDIRS, redirect) html = req.load(url, get, post, ref, bool(cookies), just_header, multipart, decode is True) #@TODO: Fix network multipart in 0.4.10 #@TODO: Move to network in 0.4.10 if not redirect: req.http.c.setopt(pycurl.FOLLOWLOCATION, 1) elif type(redirect) is int: maxredirs = self.get_config("maxredirs", default=5, plugin="UserAgentSwitcher") req.http.c.setopt(pycurl.MAXREDIRS, maxredirs) #@TODO: Move to network in 0.4.10 if decode: html = html_unescape(html) #@TODO: Move to network in 0.4.10 if isinstance(decode, basestring): html = utils.decode(html, decode) self.last_html = html if self.pyload.debug: frame = inspect.currentframe() try: framefile = fs_join("tmp", self.classname, "%s_line%s.dump.html" % (frame.f_back.f_code.co_name, frame.f_back.f_lineno)) if not exists(os.path.join("tmp", self.classname)): os.makedirs(os.path.join("tmp", self.classname)) with open(framefile, "wb") as f: f.write(encode(html)) except IOError, e: self.log_error(e) finally: