def __init__(self, parser, config=None, label=None, desc=None): super(ConfigSection, self).__init__() self.parser = parser self.label = '' if label is None else to_str(label) self.desc = '' if desc is None else to_str(desc) if config is not None: self.update(config)
def set(self, domain, name, value, path='/', expires=None, secure=False, tailmatch=False): self.__dict__[name] = to_str(value) self.__dict__[name]['domain'] = to_str(domain) self.__dict__[name]['tailmatch'] = 'TRUE' if tailmatch else 'FALSE' self.__dict__[name]['path'] = to_str(path) self.__dict__[name]['secure'] = 'TRUE' if secure else 'FALSE' self.__dict__[name]['expires'] = int( expires or time.time() + self.EXPIRE_TIME)
def set(self, domain, name, value, path='/', expires=None, secure=False, tailmatch=False): self.__dict__[name] = dict() self.__dict__[name]['id'] = to_str(value) self.__dict__[name]['domain'] = to_str(domain) self.__dict__[name]['tailmatch'] = 'TRUE' if tailmatch else 'FALSE' self.__dict__[name]['path'] = to_str(path) self.__dict__[name]['secure'] = 'TRUE' if secure else 'FALSE' self.__dict__[name]['expires'] = int( expires or time.time() + self.EXPIRE_TIME)
def result(self, server, challenge, key): #TODO: is the next statement required? self.file.plugin.load( "http://www.google.com/recaptcha/api/js/recaptcha.js") html = to_str( self.file.plugin.load("http://www.google.com/recaptcha/api/reload", get={ 'c': challenge, 'k': key, 'reason': "i", 'type': "image" })) try: challenge = re.search('\(\'(.+?)\',', html).group(1) except (AttributeError, IndexError): self.fail(self._("ReCaptcha second challenge pattern not found")) self.pyload.log.debug("Second challenge: %s" % challenge) result = self.decrypt(urllib.parse.urljoin(server, "image"), get={'c': challenge}, cookies=True, input_type="jpg") return result, challenge
def handle_premium(self, pyfile): self.api_data = dlinfo = {} html = to_str( self.load("https://api.share-online.biz/account.php", get={ 'username': self.account.user, 'password': self.account.get_login('password'), 'act': "download", 'lid': self.info['fileid'] })) self.pyload.log.debug(html) for line in html.splitlines(): try: key, value = line.split(": ") dlinfo[key.lower()] = value except ValueError: pass if dlinfo['status'] != "online": self.offline() else: pyfile.name = dlinfo['name'] pyfile.size = int(dlinfo['size']) self.link = dlinfo['url'] if self.link == "server_under_maintenance": self.temp_offline() else: self.multiDL = True
def init(self): """Main loop, which executes commands.""" version = self._check_version() self.conn = sqlite3.connect(self.DB_FILE) os.chmod(self.DB_FILE, 0o600) self.c = self.conn.cursor() if version is not None and version < DB_VERSION: success = self._convert_db(version) # delete database if not success: self.c.close() self.conn.close() remove(self.VERSION_FILE) shutil.move(self.DB_FILE, self.DB_FILE + '.bak') self.pyload.log.warning( self._('Database was deleted due to incompatible version')) with io.open(self.VERSION_FILE, mode='wb') as fp: fp.write(to_str(DB_VERSION)) self.conn = sqlite3.connect(self.DB_FILE) os.chmod(self.DB_FILE, 0o600) self.c = self.conn.cursor() self._create_tables() self.conn.commit()
def bytesize(text, from_unit=None): # returns integer bytes DEFAULT_UNIT = "byte" m = _RE_SIZE.match(to_str(text)) if m is None: return None rawsize = m.group("S") if re.match(_RE_SIZEFORMAT1, rawsize): rawsize = rawsize.replace(",", "") elif re.match(_RE_SIZEFORMAT2, rawsize): rawsize = rawsize.replace(",", ".") # elif not re.match(_RE_SIZEFORMAT3, rawsize): # return 0 #: Unknown format if from_unit is None: from_unit = m.group("U") or DEFAULT_UNIT size = float(rawsize) unit = from_unit[0].lower() return convert.size(size, unit, "byte")
def attributes(obj, ignore=None): if ignore is None: attrs = tuple(map(to_str, obj)) else: ignored = ignore if isiterable(ignore) else (ignore,) attrs = (to_str(x) for x in obj if x not in ignored) return attrs
def attributes(obj, ignore=None): if ignore is None: attrs = tuple(map(to_str, obj)) else: ignored = ignore if isiterable(ignore) else (ignore, ) attrs = (to_str(x) for x in obj if x not in ignored) return attrs
def to_config_holder(section, config, values): holder = ConfigHolder(section, config.label, config.description, config.explanation) holder.items = [ConfigItem(option, x.label, x.description, x.input, to_str(values.get(option, x.input.default_value), values.get(option, x.input.default_value))) for option, x in config.config.items()] return holder
def to_info_data(self): info = AccountInfo(self.aid, self.__name__, self.loginname, self.owner, self.valid, self.validuntil, self.trafficleft, self.maxtraffic, self.premium, self.activated, self.shared, self.options) info.config = [ConfigItem(name, item.label, item.description, item.input, to_str(self.get_config(name), self.get_config(name))) for name, item in self.config_data.items()] return info
def _check_version(self): """Get db version.""" if not os.path.isfile(self.VERSION_FILE) or not os.path.getsize( self.VERSION_FILE): with io.open(self.VERSION_FILE, mode='w') as fp: fp.write(to_str(DB_VERSION)) with io.open(self.VERSION_FILE, mode='r') as fp: v = int(fp.read().strip()) return v
def handle_free(self, pyfile): self.wait(3) self.data = to_str( self.load("%s/free/" % pyfile.url, post={ 'dl_free': "1", 'choice': "free" })) self.check_errors() res = self.handle_captcha() self.link = to_str(base64.b64decode(res)) if not self.link.startswith("http://"): self.error(self._("Invalid url")) self.wait()
def convert_obj(dct): if '@class' in dct: cls = getattr(datatype, dct['@class']) del dct['@class'] # convert keywords to str, <=2.6 does not accept unicode return cls(**dict((convert.to_str(x, x), y) for x, y in dct.items())) elif '@compact' in dct: cls = getattr(datatype, dct['@compact'][0]) return cls(*dct['@compact'][1:]) return dct
def get_config_value(self, section, option): """ Retrieve config value. :param section: name of category, or plugin :param option: config option :rtype: str :return: config value as string """ value = self.pyload.config.get(section, option, self.primary_uid) return to_str(value, value)
def _log(self, level, *args, **kwargs): if "sep" in kwargs: sep = "{0}".format(kwargs['sep']) else: sep = " | " strings = [] for obj in args: strings.append(to_str(obj, obj)) getattr(self.log, level)("{0}: {1}".format(self.__name__, sep.join(strings)))
def ishost(value): MAX_HOSTNAME_LEN = 253 try: # returns bytestring, then encode to str value = to_str(idna.encode(value)) except AttributeError: pass if value.endswith('.'): value = value[:-1] if not value or len(value) > MAX_HOSTNAME_LEN: return False return all(map(_RE_ISH.match, value.split('.')))
def _log(self, level, *args, **kwargs): if 'sep' in kwargs: sep = '{0}'.format(kwargs['sep']) else: sep = ' | ' strings = [] for obj in args: strings.append(to_str(obj)) getattr(self.log, level)('{0}: {1}'.format(self.__name__, sep.join(strings)))
def _log(self, level, *args, **kwargs): if "sep" in kwargs: sep = "{0}".format(kwargs['sep']) else: sep = " | " strings = [] for obj in args: strings.append(to_str(obj, obj)) getattr(self.log, level)("{0}: {1}".format( self.__name__, sep.join(strings)))
def _handle_failed(self, file, exc): file.set_status('failed') self.pyload.log.warning( self._('Download failed: {0}').format(file.name)) file.error = to_str(exc) if self.pyload.debug: self.pyload.log.error(exc, exc_info=self.pyload.debug) self.debug_report(file) self.pyload.adm.download_failed(file) self.clean(file)
def is_host(value): MAX_HOSTNAME_LEN = 253 try: # returns bytestring, then encode to str value = to_str(idna.encode(value)) except AttributeError: pass if value.endswith("."): value = value[:-1] if not value or len(value) > MAX_HOSTNAME_LEN: return False return all(map(_RE_ISH.match, value.split(".")))
def upload_container(self, filename, data): """Uploads and adds a container file to pyLoad. :param filename: name of the file :param data: file content """ storagedir = self.pyload.config.get('general', 'storage_folder') filename = 'tmp_{0}'.format(filename) filepath = os.path.join(storagedir, filename) with io.open(filepath, mode='wb') as fp: fp.write(to_str(data)) return self.add_package(fp.name, [fp.name])
def check_container(self, filename, data): """Checks online status of urls and a submitted container file. :param filename: name of the file :param data: file content :return: :class:`OnlineCheck` """ storagedir = self.pyload.config.get('general', 'storage_folder') filename = 'tmp_{0}'.format(filename) filepath = os.path.join(storagedir, filename) with io.open(filepath, mode='wb') as fp: fp.write(to_str(data)) return self.check_links([fp.name])
def splitaddress(address): try: address = to_str(idna.encode(address)) except (AttributeError, idna.IDNAError): pass sep = ']:' if address.split(':', 2)[2:] else ':' parts = address.rsplit(sep, 1) try: addr, port = parts port = int(port) except ValueError: addr = parts[0] port = None return addr, port
def bytesize(text, unit=None): # returns integer bytes DEFAULT_INPUTUNIT = 'byte' m = _RE_SIZE.match(to_str(text)) if m is None: return None if unit is None: unit = m.group('U') or DEFAULT_INPUTUNIT size = float(m.group('S').replace(',', '.')) unit = unit[0].lower() return int(convert.size(size, unit, 'byte'))
def splitaddress(address): try: address = to_str(idna.encode(address)) except (AttributeError, idna.IDNAError): pass sep = "]:" if address.split(":", 2)[2:] else ":" parts = address.rsplit(sep, 1) try: addr, port = parts port = int(port) except ValueError: addr = parts[0] port = None return addr, port
def _challenge_v1(self, key, secure_token): html = to_str( self.file.plugin.load( "http://www.google.com/recaptcha/api/challenge", get={'k': key})) try: challenge = re.search("challenge : '(.+?)',", html).group(1) server = re.search("server : '(.+?)',", html).group(1) except (AttributeError, IndexError): self.fail(self._("ReCaptcha challenge pattern not found")) self.pyload.log.debug("Challenge: %s" % challenge) return self.result(server, challenge, key)
def call_cmd(command, *args, **kwargs): ignore_errors = kwargs.pop('ignore_errors', False) try: sp = exec_cmd(command, *args, **kwargs) except Exception as exc: if not ignore_errors: raise returncode = 1 stdoutdata = '' stderrdata = to_str(exc).strip() else: returncode = sp.returncode stdoutdata, stderrdata = map(str.strip, sp.communicate()) return returncode, stdoutdata, stderrdata
def call_cmd(command, *args, **kwargs): ignore_errors = kwargs.pop("ignore_errors", False) try: sp = exec_cmd(command, *args, **kwargs) except Exception as exc: if not ignore_errors: raise returncode = 1 stdoutdata = "" stderrdata = to_str(exc).strip() else: returncode = sp.returncode stdoutdata, stderrdata = map(str.strip, sp.communicate()) return returncode, stdoutdata, stderrdata
def write_header(self, buf): buf = to_str(buf) # everything uses buf as string, so a conversion is needed self.header += buf # TODO: forward headers?, this is possibly unneeded, # when we just parse valid 200 headers as first chunk, # we will parse the headers if not self.range and self.header.endswith(os.linesep * 2): self.parse_header() # ftp file size parsing elif (not self.range and buf.startswith('150') and 'data connection' in buf): size = re.search(r"(\d+) bytes", buf) if size is not None: self.p._size = int(size.group(1)) self.p.chunk_support = True self.header_parsed = True
def write_header(self, buf): buf = to_str( buf) # everything uses buf as string, so a conversion is needed self.header += buf # TODO: forward headers?, this is possibly unneeded, # when we just parse valid 200 headers as first chunk, # we will parse the headers if not self.range and self.header.endswith(os.linesep * 2): self.parse_header() # ftp file size parsing elif (not self.range and buf.startswith('150') and 'data connection' in buf): size = re.search(r"(\d+) bytes", buf) if size is not None: self.p._size = int(size.group(1)) self.p.chunk_support = True self.header_parsed = True
def _handle_fail(self, file, exc): errmsg = to_str(exc) if errmsg == 'offline': file.set_status('offline') self.pyload.log.warning( self._('Download is offline: {0}').format(file.name)) elif errmsg == 'temp. offline': file.set_status('temp. offline') self.pyload.log.warning( self._('Download is temporary offline: {0}').format(file.name)) else: file.set_status('failed') self.pyload.log.warning( self._('Download failed: {0}').format(file.name)) file.error = errmsg self.pyload.adm.download_failed(file) self.clean(file)
def handle_captcha(self): #self.captcha = ReCaptcha(self.pyfile) response, challenge = self.challenge(self.RECAPTCHA_KEY) m = re.search(r'var wait=(\d+);', self.data) self.set_wait(int(m.group(1)) if m else 30) res = to_str( self.load("%s/free/captcha/%d" % (self.file.url, int(time.time() * 1000)), post={ 'dl_free': "1", 'recaptcha_challenge_field': challenge, 'recaptcha_response_field': response })) if res != "0": #self.captcha.correct() return res else: #self.retry_captcha() pass
def get_account_info(self, force=False): """Retrieve account info's for an user, do **not** overwrite this method! just use it to retrieve info's in hoster plugins. See `load_account_info` :param name: username :param force: reloads cached account information """ if force or self.timestamp + self.info_threshold * 60 < time.time(): # make sure to login with self.get_account_request() as req: self.check_login(req) self.log_info( self._('Get Account Info for {0}').format(self.loginname)) try: try: infos = self.load_account_info(req) except TypeError: # TODO: temporary self.log_debug( 'Deprecated .load_account_info(...) signature, omit user argument' ) infos = self.load_account_info(self.loginname, req) except Exception as exc: infos = {'error': to_str(exc)} self.pyload.log.error(exc, exc_info=self.pyload.debug) self.restore_defaults() # reset to initial state if isinstance(infos, dict): # copy result from dict to class for k, v in infos.items(): if hasattr(self, k): setattr(self, k, v) else: self.log_debug('Unknown attribute {0}={1}'.format( k, v)) self.log_debug('Account Info: {0}'.format(infos)) self.timestamp = time.time() self.pyload.evm.fire('account:loaded', self.to_info_data())
def get_account_info(self, force=False): """Retrieve account info's for an user, do **not** overwrite this method! just use it to retrieve info's in hoster plugins. See `load_account_info` :param name: username :param force: reloads cached account information """ if force or self.timestamp + self.info_threshold * 60 < time.time(): # make sure to login with self.get_account_request() as req: self.check_login(req) self.log_info( self._('Get Account Info for {0}').format(self.loginname)) try: try: infos = self.load_account_info(req) except TypeError: # TODO: temporary self.log_debug( 'Deprecated .load_account_info(...) signature, omit user argument') infos = self.load_account_info(self.loginname, req) except Exception as exc: infos = {'error': to_str(exc)} self.pyload.log.error(exc, exc_info=self.pyload.debug) self.restore_defaults() # reset to initial state if isinstance(infos, dict): # copy result from dict to class for k, v in infos.items(): if hasattr(self, k): setattr(self, k, v) else: self.log_debug( 'Unknown attribute {0}={1}'.format(k, v)) self.log_debug('Account Info: {0}'.format(infos)) self.timestamp = time.time() self.pyload.evm.fire('account:loaded', self.to_info_data())
def decode(path): try: return os.fsdecode(path) except AttributeError: return to_str(path)
def url(obj): url = urllib.parse.unquote(to_str(obj).decode('unicode-escape')) url = purge.text(url).lstrip('.').lower() url = _RE_URL.sub('/', url).rstrip('/') return url
def write_header(self, buf): """Writes header.""" self.header += to_str(buf)
def _setup_info(self, label, desc): self.label = '' if label is None else to_str(label) self.desc = '' if desc is None else to_str(desc)
def url(obj): url = urllib.parse.unquote(to_str(obj).decode("unicode-escape")) url = purge.text(url).lstrip(".").lower() url = _RE_URL.sub("/", url).rstrip("/") return url