def check_for_update(self): """ Checks if an update is available, return result. """ try: if self.version == "None": #: No updated known version_check = get_url( self.URL.format( self.pyload.api.get_server_version()).splitlines()) self.version = version_check[0] # Still no updates, plugins will be checked if self.version == "None": self.log_info(_("No Updates for pyLoad")) return version_check[1:] self.info['pyload'] = True self.log_info( _("*** New pyLoad Version {0} available ***").format( self.version)) self.log_info( _("*** Get it here: https://github.com/pyload/pyload/releases ***" )) except Exception: self.log_warning(_("Not able to connect server for updates")) return None #: Nothing will be done
def process(self, pyfile): pyfile.url = replace_patterns(pyfile.url, self.FILE_URL_REPLACEMENTS) self.req.set_option("timeout", 120) # Due to a 0.4.9 core bug self.load would keep previous cookies even if overridden by cookies parameter. # Workaround using get_url. Can be reverted in 0.5 as the cookies bug # has been fixed. self.html = get_url(pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES) premium_only = hasattr(self, 'PREMIUM_ONLY_PATTERN') and re.search( self.PREMIUM_ONLY_PATTERN, self.html) if not premium_only: #: Usually premium only pages does not show the file information self.get_file_info() if self.premium and (not self.SH_CHECK_TRAFFIC or self.check_traffic_left()): self.handle_premium() elif premium_only: self.fail(_("This link require a premium account")) else: # This line is required due to the get_url workaround. Can be # removed in 0.5 self.html = self.load(pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES) self.handle_free()
def process(self, pyfile): self.prepare() pyfile.url = replace_patterns(pyfile.url, self.FILE_URL_REPLACEMENTS) if not re.match(self.__pattern__, pyfile.url): if self.premium: self.handle_overriden() else: self.fail(_("Only premium users can download from other hosters with {0}").format( self.HOSTER_NAME)) else: try: # Due to a 0.4.9 core bug self.load would use cookies even if # cookies=False. Workaround using get_url to avoid cookies. # Can be reverted in 0.5 as the cookies bug has been fixed. self.html = get_url(pyfile.url, decode=True) self.file_info = self.get_file_info() except PluginParseError: self.file_info = None self.location = self.get_direct_download_link() if not self.file_info: pyfile.name = webpurge.escape(unquote(urlparse( self.location if self.location else pyfile.url).path.split("/")[-1])) if self.location: self.start_download(self.location) elif self.premium: self.handle_premium() else: self.handle_free()
def check_for_update(self): """ Checks if an update is available, return result. """ try: if self.version == "None": #: No updated known version_check = get_url(self.URL.format( self.pyload.api.get_server_version()).splitlines()) self.version = version_check[0] # Still no updates, plugins will be checked if self.version == "None": self.log_info(_("No Updates for pyLoad")) return version_check[1:] self.info['pyload'] = True self.log_info( _("*** New pyLoad Version {0} available ***").format(self.version)) self.log_info( _("*** Get it here: https://github.com/pyload/pyload/releases ***")) except Exception: self.log_warning(_("Not able to connect server for updates")) return None #: Nothing will be done
def get_info(urls): for url in urls: cj = CookieJar(plugin.__name__) if isinstance(plugin.SH_COOKIES, list): set_cookies(cj, plugin.SH_COOKIES) file_info = parseFileInfo(plugin, url, get_url(replace_patterns(url, plugin.FILE_URL_REPLACEMENTS), decode=not plugin.SH_BROKEN_ENCODING, cookies=cj)) yield file_info
def process(self, pyfile): pyfile.url = replace_patterns(pyfile.url, self.FILE_URL_REPLACEMENTS) self.req.set_option("timeout", 120) # Due to a 0.4.9 core bug self.load would keep previous cookies even if overridden by cookies parameter. # Workaround using get_url. Can be reverted in 0.5 as the cookies bug # has been fixed. self.html = get_url( pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES) premium_only = hasattr(self, 'PREMIUM_ONLY_PATTERN') and re.search( self.PREMIUM_ONLY_PATTERN, self.html) if not premium_only: #: Usually premium only pages does not show the file information self.get_file_info() if self.premium and ( not self.SH_CHECK_TRAFFIC or self.check_traffic_left()): self.handle_premium() elif premium_only: self.fail(_("This link require a premium account")) else: # This line is required due to the get_url workaround. Can be # removed in 0.5 self.html = self.load( pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES) self.handle_free()
def check_plugins(self, updates): """ Checks for plugins updates. """ # plugins were already updated if self.info['plugins']: return None reloads = [] vre = re.compile(r'__version__.*=.*("|\')([0-9.]+)') url = updates[0] schema = updates[1].split("|") updates = updates[2:] for plugin in updates: info = dict(list(zip(schema, plugin.split("|")))) filename = info['name'] prefix = info['type'] version = info['version'] if filename.endswith(".pyc"): name = filename[:filename.find("_")] else: name = filename.replace(".py", "") # TODO: obsolete in 1.0.0 if prefix.endswith("s"): _type = prefix[:-1] else: _type = prefix plugins = getattr(self.pyload.pgm, "{0}Plugins".format(_type)) if name in plugins: if float(plugins[name]['v']) >= float(version): continue if name in IGNORE or (_type, name) in IGNORE: continue self.log_info( _("New version of {0}|{1} : {2:.2f}").format( _type, name, version)) try: content = get_url(url.format(info)) except Exception as e: self.log_warning( _("Error when updating {0}").format(filename), e.message) continue m = vre.search(content) if not m or m.group(2) != version: self.log_warning( _("Error when updating {0}").format(name), _("Version mismatch")) continue with io.open(os.path.join("userplugins", prefix, filename), mode='wb') as fp: fp.write(content) self.updated = True reloads.append((prefix, name)) self.reloaded = self.pyload.pgm.reload_plugins(reloads)
def check_plugins(self, updates): """ Checks for plugins updates. """ # plugins were already updated if self.info['plugins']: return None reloads = [] vre = re.compile(r'__version__.*=.*("|\')([0-9.]+)') url = updates[0] schema = updates[1].split("|") updates = updates[2:] for plugin in updates: info = dict(list(zip(schema, plugin.split("|")))) filename = info['name'] prefix = info['type'] version = info['version'] if filename.endswith(".pyc"): name = filename[:filename.find("_")] else: name = filename.replace(".py", "") # TODO: obsolete in 1.0.0 if prefix.endswith("s"): _type = prefix[:-1] else: _type = prefix plugins = getattr(self.pyload.pgm, "{0}Plugins".format(_type)) if name in plugins: if float(plugins[name]['v']) >= float(version): continue if name in IGNORE or (_type, name) in IGNORE: continue self.log_info(_("New version of {0}|{1} : {2:.2f}").format( _type, name, version)) try: content = get_url(url.format(info)) except Exception as e: self.log_warning( _("Error when updating {0}").format(filename), e.message) continue m = vre.search(content) if not m or m.group(2) != version: self.log_warning(_("Error when updating {0}").format( name), _("Version mismatch")) continue with io.open(os.path.join("userplugins", prefix, filename), mode='wb') as fp: fp.write(content) self.updated = True reloads.append((prefix, name)) self.reloaded = self.pyload.pgm.reload_plugins(reloads)