class DownloadRequest(with_metaclass(ABCMeta, Request)): """Abstract class for download request.""" def __init__(self, bucket, request=None, logger=None): if logger is None: self.log = logging.getLogger('null') self.log.addHandler(logging.NullHandler()) else: self.log = logger # Copies the context context = request.get_context() if request else [{}] super(DownloadRequest, self).__init__(*context) self.__running = Event() self._name = None self._size = 0 # bucket used for rate limiting self.bucket = bucket @abstractmethod def download(self, uri, filename, *args, **kwargs): """Downloads the resource with additional options depending on implementation.""" @property def running(self): return self.__running.is_set() @property def size(self): """Size in bytes.""" return self._size @property def name(self): """Name of the resource if known.""" return self._name @property def speed(self): """Download rate in bytes per second.""" return 0 @property def arrived(self): """Number of bytes already loaded.""" return 0
class DownloadRequest(with_metaclass(ABCMeta, Request)): """Abstract class for download request.""" def __init__(self, bucket, request=None, logger=None): if logger is None: self.log = logging.getLogger('null') self.log.addHandler(logging.NullHandler()) else: self.log = logger # Copies the context context = request.get_context() if request else [{}] super(DownloadRequest, self).__init__(*context) self.__running = Event() self._name = None self._size = 0 # bucket used for rate limiting self.bucket = bucket @abstractmethod def download(self, uri, filename, *args, **kwargs): """Downloads the resource with additional options depending on implementation.""" @property def running(self): return self.__running.is_set() @property def size(self): """Size in bytes.""" return self._size @property def name(self): """Name of the resource if known.""" return self._name @property def speed(self): """Download rate in bytes per second.""" return 0 @property def arrived(self): """Number of bytes already loaded.""" return 0
class RemoteBackend(Thread): def __init__(self, manager): Thread.__init__(self) self.manager = manager self.pyload = manager.pyload self._ = manager.pyload._ self.enabled = True self.__running = Event() @property def running(self): return self.__running.is_set() def run(self): self.__running.set() try: self.serve() except Exception as e: self.pyload.log.error( self._("Remote backend error: {0}").format(str(e))) if self.pyload.debug: print_exc() finally: self.__running.clear() def setup(self, host, port): raise NotImplementedError def check_deps(self): return True def serve(self): raise NotImplementedError def shutdown(self): raise NotImplementedError def quit(self): self.enabled = False #: set flag and call shutdowm message, so thread can react self.shutdown()
class DownloadThread(PluginThread): """ Thread for downloading files from 'real' hoster plugins. """ __slots__ = ['active', 'queue', 'running'] def __init__(self, manager): """ Constructor. """ PluginThread.__init__(self, manager) self.__running = Event() self.queue = Queue() # job queue self.active = None @property def running(self): return self.__running.is_set() def _handle_abort(self, file): try: self.__pyload.log.info( self._("Download aborted: {0}").format(file.name)) except Exception: pass file.set_status("aborted") def _handle_reconnect(self, file): self.queue.put(file) # file.req.clear_cookies() while self.__manager.reconnecting.isSet(): time.sleep(0.5) def _handle_retry(self, file, reason): self.__pyload.log.info( self._("Download restarted: {0} | {1}").format(file.name, reason)) self.queue.put(file) def _handle_notimplement(self, file): self.__pyload.log.error( self._("Plugin {0} is missing a function").format(file.pluginname)) file.set_status("failed") file.error = "Plugin does not work" self.clean(file) def _handle_tempoffline(self, file): file.set_status("temp. offline") self.__pyload.log.warning( self._("Download is temporary offline: {0}").format(file.name)) file.error = self._("Internal Server Error") if self.__pyload.debug: print_exc() self.debug_report(file) self.__pyload.adm.download_failed(file) self.clean(file) def _handle_failed(self, file, errmsg): file.set_status("failed") self.__pyload.log.warning( self._("Download failed: {0} | {1}").format(file.name, errmsg)) file.error = errmsg if self.__pyload.debug: print_exc() self.debug_report(file) self.__pyload.adm.download_failed(file) self.clean(file) # TODO: activate former skipped downloads def _handle_fail(self, file, errmsg): if errmsg == "offline": file.set_status("offline") self.__pyload.log.warning( self._("Download is offline: {0}").format(file.name)) elif errmsg == "temp. offline": file.set_status("temp. offline") self.__pyload.log.warning( self._("Download is temporary offline: {0}").format(file.name)) else: file.set_status("failed") self.__pyload.log.warning( self._("Download failed: {0} | {1}").format(file.name, errmsg)) file.error = errmsg self.__pyload.adm.download_failed(file) self.clean(file) def _handle_skip(self, file, errmsg): file.set_status("skipped") self.__pyload.log.info( self._("Download skipped: {0} due to {1}").format( file.name, errmsg)) self.clean(file) self.__pyload.files.check_package_finished(file) self.active = None self.__pyload.files.save() def _handle_error(self, file, errmsg, errcode=None): self.__pyload.log.debug("pycurl exception {0}: {1}".format( errcode, errmsg)) if errcode in (7, 18, 28, 52, 56): self.__pyload.log.warning( self._("Couldn't connect to host or connection reset, " "waiting 1 minute and retry")) wait = time.time() + 60 file.wait_until = wait file.set_status("waiting") while time.time() < wait: time.sleep(0.5) if file.abort: break if file.abort: self.__pyload.log.info( self._("Download aborted: {0}").format(file.name)) file.set_status("aborted") # do not clean, aborting function does this itself # self.clean(file) else: self.queue.put(file) else: file.set_status("failed") self.__pyload.log.error( self._("pycurl error {0}: {1}").format(errcode, errmsg)) if self.__pyload.debug: print_exc() self.debug_report(file) self.__pyload.adm.download_failed(file) def _run(self, file): file.init_plugin() # after initialization the thread is fully ready self.__running.set() # this file was deleted while queuing # TODO: what will happen with new thread manager? # if not file.has_plugin(): continue file.plugin.check_for_same_files(starting=True) self.__pyload.log.info(self._("Download starts: {0}".format( file.name))) # start download self.__pyload.adm.download_preparing(file) file.plugin.preprocessing(self) self.__pyload.log.info( self._("Download finished: {0}").format(file.name)) self.__pyload.adm.download_finished(file) self.__pyload.files.check_package_finished(file) def _finalize(self, file): self.__pyload.files.save() file.check_if_processed() sys.exc_clear() # manager could still be waiting for it self.__running.set() # only done when job was not put back if self.queue.empty(): self.__manager.done(self) def run(self): """ Run method. """ file = None while True: del file self.active = self.queue.get() file = self.active if self.active == "quit": self.active = None self.__manager.discard(self) return True try: self._run(file) except NotImplementedError: self._handle_notimplement(file) continue except Abort: self._handle_abort(file) # abort cleans the file # self.clean(file) continue except Reconnect: self._handle_reconnect(file) continue except Retry as e: self._handle_retry(file, e.args[0]) continue except Fail as e: self._handle_fail(file, e.args[0]) continue except error as e: errcode = None errmsg = e.args if len(e.args) == 2: errcode, errmsg = e.args self._handle_error(file, errmsg, errcode) self.clean(file) continue except Skip as e: self._handle_skip(file, str(e)) continue except Exception as e: if isinstance(e, ResponseException) and e.code == 500: self._handle_tempoffline(file) else: self._handle_failed(file, str(e)) continue finally: self._finalize(file) # file.plugin.req.clean() self.active = None file.finish_if_done() self.__pyload.files.save() def get_progress_info(self): if not self.active: return None return self.active.get_progress_info() def put(self, job): """ Assign a job to the thread. """ self.queue.put(job) def clean(self, file): """ Set thread inactive and release file. """ file.release() def quit(self): """ Stops the thread. """ self.put("quit")
class DatabaseBackend(Thread): subs = [] DB_FILE = 'pyload.db' VERSION_FILE = 'db.version' def __init__(self, core): super(DatabaseBackend, self).__init__() self.setDaemon(True) self.pyload = core self._ = core._ self.manager = None # set later self.error = None # TODO: Recheck... self.__running = Event() self.jobs = Queue() set_db(self) @property def running(self): return self.__running.is_set() def setup(self): """ *MUST* be called before db can be used !. """ self.start() self.__running.wait() def init(self): """Main loop, which executes commands.""" version = self._check_version() self.conn = sqlite3.connect(self.DB_FILE) os.chmod(self.DB_FILE, 0o600) self.c = self.conn.cursor() if version is not None and version < DB_VERSION: success = self._convert_db(version) # delete database if not success: self.c.close() self.conn.close() remove(self.VERSION_FILE) shutil.move(self.DB_FILE, self.DB_FILE + '.bak') self.pyload.log.warning( self._('Database was deleted due to incompatible version')) with io.open(self.VERSION_FILE, mode='wb') as fp: fp.write(to_str(DB_VERSION)) self.conn = sqlite3.connect(self.DB_FILE) os.chmod(self.DB_FILE, 0o600) self.c = self.conn.cursor() self._create_tables() self.conn.commit() def run(self): try: self.init() except Exception as exc: self.error = exc finally: self.__running.set() while True: j = self.jobs.get() if j == 'quit': self.c.close() self.conn.commit() self.conn.close() self.closing.set() break j.process_job() # TODO: Recheck... def exit(self): self.__running.clear() self.closing = Event() self.jobs.put('quit') self.closing.wait(1) def _check_version(self): """Get db version.""" if not os.path.isfile(self.VERSION_FILE) or not os.path.getsize( self.VERSION_FILE): with io.open(self.VERSION_FILE, mode='w') as fp: fp.write(to_str(DB_VERSION)) with io.open(self.VERSION_FILE, mode='r') as fp: v = int(fp.read().strip()) return v def _convert_db(self, v): try: return getattr(self, '_convertV{0:d}'.format(v))() except Exception: return False # -- convert scripts start -- def _convert_v6(self): return False # -- convert scripts end -- def _create_tables(self): """Create tables for database.""" self.c.execute( 'CREATE TABLE IF NOT EXISTS "packages" (' '"pid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"name" TEXT NOT NULL, ' '"folder" TEXT DEFAULT "" NOT NULL, ' '"site" TEXT DEFAULT "" NOT NULL, ' '"comment" TEXT DEFAULT "" NOT NULL, ' '"password" TEXT DEFAULT "" NOT NULL, ' '"added" INTEGER DEFAULT 0 NOT NULL,' # set by trigger '"status" INTEGER DEFAULT 0 NOT NULL,' '"tags" TEXT DEFAULT "" NOT NULL,' '"shared" INTEGER DEFAULT 0 NOT NULL,' '"packageorder" INTEGER DEFAULT -1 NOT NULL,' # inc by trigger '"root" INTEGER DEFAULT -1 NOT NULL, ' '"owner" INTEGER NOT NULL, ' 'FOREIGN KEY(owner) REFERENCES users(uid), ' 'CHECK (root != pid)' ')') self.c.execute('CREATE TRIGGER IF NOT EXISTS "insert_package" ' 'AFTER INSERT ON "packages"' 'BEGIN ' 'UPDATE packages SET added = strftime("%s", "now"), ' 'packageorder = (SELECT max(p.packageorder) + 1 FROM ' 'packages p WHERE p.root=new.root) ' 'WHERE rowid = new.rowid;' 'END') self.c.execute( 'CREATE TRIGGER IF NOT EXISTS "delete_package" ' 'AFTER DELETE ON "packages"' 'BEGIN ' 'DELETE FROM files WHERE package = old.pid;' 'UPDATE packages SET packageorder=packageorder-1 ' 'WHERE packageorder > old.packageorder AND root=old.pid;' 'END') self.c.execute('CREATE INDEX IF NOT EXISTS "package_index" ON ' 'packages(root, owner)') self.c.execute( 'CREATE INDEX IF NOT EXISTS "package_owner" ON packages(owner)') self.c.execute('CREATE TABLE IF NOT EXISTS "files" (' '"fid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"name" TEXT NOT NULL, ' '"size" INTEGER DEFAULT 0 NOT NULL, ' '"status" INTEGER DEFAULT 0 NOT NULL, ' '"media" INTEGER DEFAULT 1 NOT NULL,' '"added" INTEGER DEFAULT 0 NOT NULL,' '"fileorder" INTEGER DEFAULT -1 NOT NULL, ' '"url" TEXT DEFAULT "" NOT NULL, ' '"plugin" TEXT DEFAULT "" NOT NULL, ' '"hash" TEXT DEFAULT "" NOT NULL, ' '"dlstatus" INTEGER DEFAULT 0 NOT NULL, ' '"error" TEXT DEFAULT "" NOT NULL, ' '"package" INTEGER NOT NULL, ' '"owner" INTEGER NOT NULL, ' 'FOREIGN KEY(owner) REFERENCES users(uid), ' 'FOREIGN KEY(package) REFERENCES packages(id)' ')') self.c.execute( 'CREATE INDEX IF NOT EXISTS "file_index" ON files(package, owner)') self.c.execute( 'CREATE INDEX IF NOT EXISTS "file_owner" ON files(owner)') self.c.execute( 'CREATE INDEX IF NOT EXISTS "file_plugin" ON files(plugin)') self.c.execute('CREATE TRIGGER IF NOT EXISTS "insert_file" ' 'AFTER INSERT ON "files"' 'BEGIN ' 'UPDATE files SET added = strftime("%s", "now"), ' 'fileorder = (SELECT max(f.fileorder) + 1 FROM files f ' 'WHERE f.package=new.package) ' 'WHERE rowid = new.rowid;' 'END') self.c.execute('CREATE TABLE IF NOT EXISTS "collector" (' '"owner" INTEGER NOT NULL, ' '"data" TEXT NOT NULL, ' 'FOREIGN KEY(owner) REFERENCES users(uid), ' 'PRIMARY KEY(owner) ON CONFLICT REPLACE' ') ') self.c.execute('CREATE TABLE IF NOT EXISTS "storage" (' '"identifier" TEXT NOT NULL, ' '"key" TEXT NOT NULL, ' '"value" TEXT DEFAULT "", ' 'PRIMARY KEY (identifier, key) ON CONFLICT REPLACE' ')') self.c.execute( 'CREATE TABLE IF NOT EXISTS "users" (' '"uid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"name" TEXT NOT NULL UNIQUE, ' '"email" TEXT DEFAULT "" NOT NULL, ' '"password" TEXT NOT NULL, ' '"role" INTEGER DEFAULT 0 NOT NULL, ' '"permission" INTEGER DEFAULT 0 NOT NULL, ' '"folder" TEXT DEFAULT "" NOT NULL, ' '"traffic" INTEGER DEFAULT -1 NOT NULL, ' '"dllimit" INTEGER DEFAULT -1 NOT NULL, ' '"dlquota" TEXT DEFAULT "" NOT NULL, ' '"hddquota" INTEGER DEFAULT -1 NOT NULL, ' '"template" TEXT DEFAULT "default" NOT NULL, ' '"user" INTEGER DEFAULT -1 NOT NULL, ' # set by trigger to self 'FOREIGN KEY(user) REFERENCES users(uid)' ')') self.c.execute( 'CREATE INDEX IF NOT EXISTS "username_index" ON users(name)') self.c.execute('CREATE TRIGGER IF NOT EXISTS "insert_user" AFTER ' 'INSERT ON "users"' 'BEGIN ' 'UPDATE users SET user = new.uid, folder=new.name ' 'WHERE rowid = new.rowid;' 'END') self.c.execute('CREATE TABLE IF NOT EXISTS "settings" (' '"plugin" TEXT NOT NULL, ' '"user" INTEGER DEFAULT -1 NOT NULL, ' '"config" TEXT NOT NULL, ' 'FOREIGN KEY(user) REFERENCES users(uid), ' 'PRIMARY KEY (plugin, user) ON CONFLICT REPLACE' ')') self.c.execute('CREATE TABLE IF NOT EXISTS "accounts" (' '"aid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"plugin" TEXT NOT NULL, ' '"loginname" TEXT NOT NULL, ' '"owner" INTEGER NOT NULL, ' '"activated" INTEGER NOT NULL DEFAULT 1, ' '"password" TEXT DEFAULT "", ' '"shared" INTEGER NOT NULL DEFAULT 0, ' '"options" TEXT DEFAULT "", ' 'FOREIGN KEY(owner) REFERENCES users(uid)' ')') self.c.execute('CREATE INDEX IF NOT EXISTS "accounts_login" ON ' 'accounts(plugin, loginname)') self.c.execute('CREATE TABLE IF NOT EXISTS "stats" (' '"id" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"user" INTEGER NOT NULL, ' '"plugin" TEXT NOT NULL, ' '"time" INTEGER NOT NULL, ' '"premium" INTEGER DEFAULT 0 NOT NULL, ' '"amount" INTEGER DEFAULT 0 NOT NULL, ' 'FOREIGN KEY(user) REFERENCES users(uid)' ')') self.c.execute( 'CREATE INDEX IF NOT EXISTS "stats_time" ON stats(user, time)') # try to lower ids self.c.execute('SELECT max(fid) FROM files') fid = self.c.fetchone()[0] fid = int(fid) if fid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (fid, 'files')) self.c.execute('SELECT max(pid) FROM packages') pid = self.c.fetchone()[0] pid = int(pid) if pid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (pid, 'packages')) self.c.execute('VACUUM') def create_cursor(self): return self.conn.cursor() @async def commit(self): self.conn.commit() @queue def sync_save(self): self.conn.commit() @async def rollback(self): self.conn.rollback() def async (self, f, *args, **kwargs): args = (self, ) + args job = DatabaseJob(f, *args, **kwargs) self.jobs.put(job) def queue(self, f, *args, **kwargs): # Raise previous error of initialization if isinstance(self.error, Exception): raise self.error args = (self, ) + args job = DatabaseJob(f, *args, **kwargs) self.jobs.put(job) # only wait when db is running if self.running: job.wait() return job.result @classmethod def register_sub(cls, klass): cls.subs.append(klass) @classmethod def unregister_sub(cls, klass): cls.subs.remove(klass) def __getattr__(self, attr): for sub in DatabaseBackend.subs: if hasattr(sub, attr): return getattr(sub, attr) raise AttributeError(attr)