def __init__(self, cfgdir, tmpdir, debug=None, restore=False): self.__running = Event() self.__do_restart = False self.__do_exit = False self._ = lambda x: x self.cfgdir = fullpath(cfgdir) self.tmpdir = fullpath(tmpdir) os.chdir(self.cfgdir) # if self.tmpdir not in sys.path: # sys.path.append(self.tmpdir) # if refresh: # cleanpy(PACKDIR) self.config = ConfigParser(self.DEFAULT_CONFIGNAME) self.debug = self.config.get('log', 'debug') if debug is None else debug self.log = LoggerFactory(self, self.debug) self._init_database(restore) self._init_managers() self.request = self.req = RequestFactory(self) self._init_api() atexit.register(self.exit)
def run(self): self.running = Event() self.running.set() global webinterface from pyload.webui import interface as webinterface if self.https: if not os.path.exists(self.cert) or not os.path.exists(self.key): log.warning(_("SSL certificates not found")) self.https = False if webinterface.UNAVAILALBE: log.warning(_("WebUI built is not available")) # elif webinterface.APP_PATH == "app": # log.info(_("Running webui in development mode")) prefer = None # These cases covers all settings if self.server == "threaded": prefer = "threaded" elif self.server == "fastcgi": prefer = "flup" elif self.server == "fallback": prefer = "wsgiref" server = self.select_server(prefer) try: self.start_server(server) except Exception as e: log.error(_("Failed starting webserver: {0}").format(e.message)) self.error = e
def run(self): self.__running = Event() self.__running.set() if self.https: if not os.path.isfile(self.cert) or not os.path.isfile(self.key): log.warning(self._("SSL certificates not found")) self.https = False if iface.UNAVAILALBE: log.warning(self._("WebUI built is not available")) elif iface.APPDIR.endswith('app'): log.info(self._("Running webui in development mode")) prefer = None # These cases covers all settings if self.server == "threaded": prefer = "threaded" elif self.server == "fastcgi": prefer = "flup" elif self.server == "fallback": prefer = "wsgiref" server = self.select_server(prefer) try: self.start_server(server) except Exception as e: log.error(self._("Failed starting webserver: {0}").format(str(e))) self.error = e
def __init__(self, manager): """Constructor.""" super(DownloadThread, self).__init__(manager) self.__running = Event() self.queue = Queue() # job queue self.active = None
def __init__(self, manager): Thread.__init__(self) self.manager = manager self.pyload = manager.pyload self._ = manager.pyload._ self.enabled = True self.__running = Event()
def __init__(self, options): """ Override SocketServer.TCPServer.__init__ to set SSL enabled socket object to self.socket before server_bind and server_activate, if necessary """ # Removed dispatcher init here self._logger = logging.getLogger('pyload') self.request_queue_size = options.request_queue_size self.__ws_is_shut_down = Event() self.__ws_serving = False socketserver.BaseServer.__init__(self, (options.server_host, options.port), WebSocketRequestHandler) # Expose the options object to allow handler objects access it. We name # it with websocket_ prefix to avoid conflict. self.websocket_server_options = options self._create_sockets() self.server_bind() self.server_activate()
def __init__(self, fn, *args, **kwargs): self.done = Event() self.fn = fn self.args = args self.kwargs = kwargs self.result = None self.exception = False
def __init__(self, func, *args, **kwargs): self.done = Event() self.func = func self.args = args self.kwgs = kwargs self.result = None self.exception = False
def __init__(self, manager): """ Constructor. """ PluginThread.__init__(self, manager) self.__running = Event() self.queue = Queue() # job queue self.active = None
def __init__(self, core): Thread.__init__(self) self.setDaemon(True) self.pyload = core self.manager = None #: set later self.error = None self.running = Event() self.jobs = Queue() set_db(self)
def __init__(self, func, *args, **kwargs): self.done = Event() self.func = func self.args = args self.kwgs = kwargs self.result = None self.exception = False self.pyload = self.args[0].pyload # self.args[0] is DatabaseBackend
def __init__(self, core): super(DatabaseBackend, self).__init__() self.setDaemon(True) self.pyload = core self._ = core._ self.manager = None # set later self.error = None # TODO: Recheck... self.__running = Event() self.jobs = Queue() set_db(self)
def setup(self): # won't start download when true self.pause = True # each thread is in exactly one category self.free = [] # a thread that in working must have a file as active attribute self.downloading = [] # holds the decrypter threads self.decrypting = [] # indicates when reconnect has occurred self.reconnecting = Event() self.lock = RWLock()
def run(self): self.running = Event() self.running.set() global webinterface from pyload.webui import interface as webinterface if self.https: if not os.path.exists(self.cert) or not os.path.exists(self.key): log.warning(_("SSL certificates not found")) self.https = False if webinterface.UNAVAILALBE: log.warning(_("WebUI built is not available")) elif webinterface.APPDIR.endswith('app'): log.info(_("Running webui in development mode")) prefer = None # These cases covers all settings if self.server == "threaded": prefer = "threaded" elif self.server == "fastcgi": prefer = "flup" elif self.server == "fallback": prefer = "wsgiref" server = self.select_server(prefer) try: self.start_server(server) except Exception as e: log.error(_("Failed starting webserver: {0}").format(e.message)) self.error = e
def __init__(self, cfgdir, tmpdir, debug=None, restore=False): self.__running = Event() self.__do_restart = False self.__do_exit = False self._ = lambda x: x self.cfgdir = fullpath(cfgdir) self.tmpdir = fullpath(tmpdir) os.chdir(self.cfgdir) # if self.tmpdir not in sys.path: # sys.path.append(self.tmpdir) # if refresh: # cleanpy(PACKDIR) self.config = ConfigParser(self.DEFAULT_CONFIGNAME) self.debug = self.config.get( 'log', 'debug') if debug is None else debug self.log = LoggerFactory(self, self.debug) self._init_database(restore) self._init_managers() self.request = self.req = RequestFactory(self) self._init_api() atexit.register(self.exit)
def __init__(self, bucket, request=None, logger=None): if logger is None: self.log = logging.getLogger('null') self.log.addHandler(logging.NullHandler()) else: self.log = logger # Copies the context context = request.get_context() if request else [{}] super(DownloadRequest, self).__init__(*context) self.__running = Event() self._name = None self._size = 0 # bucket used for rate limiting self.bucket = bucket
class DatabaseJob(object): __slots__ = ['args', 'done', 'exception', 'fn', 'kwargs', 'result'] def __init__(self, fn, *args, **kwargs): self.done = Event() self.fn = fn self.args = args self.kwargs = kwargs self.result = None self.exception = False # import inspect # self.frame = inspect.currentframe() def __repr__(self): # frame = self.frame.f_back output = "" # for i in range(5): # output += "\t{0}:{1}, {2}\n".format( # os.path.basename(frame.f_code.co_filename), frame.f_lineno, frame.f_code.co_name) # frame = frame.f_back # del frame # del self.frame return "DataBase Job {0}:{1}\n{2}Result: {3}".format( self.fn.__name__, self.args[1:], output, self.result) def process_job(self): try: self.result = self.fn(*self.args, **self.kwargs) except Exception as e: print_exc() try: print("Database Error @", self.fn.__name__, self.args[1:], self.kwargs, str(e)) except Exception: pass self.exception = e finally: self.done.set() def wait(self): self.done.wait()
def __init__(self, core): self.pyload = core #: won't start download when true self.pause = True #: each thread is in exactly one category self.free = [] #: a thread that in working must have a file as active attribute self.downloading = [] #: holds the decrypter threads self.decrypting = [] #: indicates when reconnect has occurred self.reconnecting = Event() self.lock = ReadWriteLock()
def __init__(self, manager): """ Constructor. """ PluginThread.__init__(self, manager) self.running = Event() self.queue = Queue() #: job queue self.active = None self.start()
class DatabaseJob(object): def __init__(self, func, *args, **kwargs): self.done = Event() self.func = func self.args = args self.kwgs = kwargs self.result = None self.exception = False self.pyload = self.args[0].pyload # self.args[0] is DatabaseBackend # import inspect # self.frame = inspect.currentframe() def __repr__(self): # frame = self.frame.f_back output = '' # for i in range(5): # output += "\t{0}:{1}, {2}\n".format( # os.path.basename( # frame.f_code.co_filename), frame.f_lineno, frame.f_code.co_name) # frame = frame.f_back # del frame # del self.frame return 'DataBase Job {0}:{1}{2}{3}Result: {4}'.format( self.func.__name__, self.args[1:], os.linesep, output, self.result) def process_job(self): try: self.result = self.func(*self.args, **self.kwgs) except Exception as exc: self.pyload.log.error(exc, exc_info=self.pyload.debug) self.exception = exc finally: self.done.set() def wait(self): self.done.wait()
class RemoteBackend(Thread): def __init__(self, manager): Thread.__init__(self) self.manager = manager self.pyload = manager.pyload self._ = manager.pyload._ self.enabled = True self.__running = Event() @property def running(self): return self.__running.is_set() def run(self): self.__running.set() try: self.serve() except Exception as e: self.pyload.log.error( self._("Remote backend error: {0}").format(str(e))) if self.pyload.debug: print_exc() finally: self.__running.clear() def setup(self, host, port): raise NotImplementedError def check_deps(self): return True def serve(self): raise NotImplementedError def shutdown(self): raise NotImplementedError def quit(self): self.enabled = False #: set flag and call shutdowm message, so thread can react self.shutdown()
class RemoteBackend(Thread): __slots__ = ['enabled', 'manager', 'pyload', 'running'] def __init__(self, manager): Thread.__init__(self) self.manager = manager self.pyload = manager.pyload self.enabled = True self.running = Event() def run(self): self.running.set() try: self.serve() except Exception as e: self.pyload.log.error( _("Remote backend error: {0}").format(e.message)) if self.pyload.debug: print_exc() finally: self.running.clear() def setup(self, host, port): raise NotImplementedError def check_deps(self): return True def serve(self): raise NotImplementedError def shutdown(self): raise NotImplementedError def stop(self): self.enabled = False #: set flag and call shutdowm message, so thread can react self.shutdown()
class DownloadRequest(with_metaclass(ABCMeta, Request)): """Abstract class for download request.""" def __init__(self, bucket, request=None, logger=None): if logger is None: self.log = logging.getLogger('null') self.log.addHandler(logging.NullHandler()) else: self.log = logger # Copies the context context = request.get_context() if request else [{}] super(DownloadRequest, self).__init__(*context) self.__running = Event() self._name = None self._size = 0 # bucket used for rate limiting self.bucket = bucket @abstractmethod def download(self, uri, filename, *args, **kwargs): """Downloads the resource with additional options depending on implementation.""" @property def running(self): return self.__running.is_set() @property def size(self): """Size in bytes.""" return self._size @property def name(self): """Name of the resource if known.""" return self._name @property def speed(self): """Download rate in bytes per second.""" return 0 @property def arrived(self): """Number of bytes already loaded.""" return 0
def __init__(self, core): self.pyload = core #: won't start download when true self.pause = True #: each thread is in exactly one category self.free = [] #: a thread that in working must have a pyfile as active attribute self.working = [] #: holds the decrypter threads self.decrypter = [] #: indicates when reconnect has occurred self.reconnecting = Event() self.lock = ReadWriteLock()
class DownloadThread(PluginThread): """ Thread for downloading files from 'real' hoster plugins. """ __slots__ = ['active', 'queue', 'running'] def __init__(self, manager): """ Constructor. """ PluginThread.__init__(self, manager) self.__running = Event() self.queue = Queue() # job queue self.active = None @property def running(self): return self.__running.is_set() def _handle_abort(self, file): try: self.__pyload.log.info( self._("Download aborted: {0}").format(file.name)) except Exception: pass file.set_status("aborted") def _handle_reconnect(self, file): self.queue.put(file) # file.req.clear_cookies() while self.__manager.reconnecting.isSet(): time.sleep(0.5) def _handle_retry(self, file, reason): self.__pyload.log.info( self._("Download restarted: {0} | {1}").format(file.name, reason)) self.queue.put(file) def _handle_notimplement(self, file): self.__pyload.log.error( self._("Plugin {0} is missing a function").format(file.pluginname)) file.set_status("failed") file.error = "Plugin does not work" self.clean(file) def _handle_tempoffline(self, file): file.set_status("temp. offline") self.__pyload.log.warning( self._("Download is temporary offline: {0}").format(file.name)) file.error = self._("Internal Server Error") if self.__pyload.debug: print_exc() self.debug_report(file) self.__pyload.adm.download_failed(file) self.clean(file) def _handle_failed(self, file, errmsg): file.set_status("failed") self.__pyload.log.warning( self._("Download failed: {0} | {1}").format(file.name, errmsg)) file.error = errmsg if self.__pyload.debug: print_exc() self.debug_report(file) self.__pyload.adm.download_failed(file) self.clean(file) # TODO: activate former skipped downloads def _handle_fail(self, file, errmsg): if errmsg == "offline": file.set_status("offline") self.__pyload.log.warning( self._("Download is offline: {0}").format(file.name)) elif errmsg == "temp. offline": file.set_status("temp. offline") self.__pyload.log.warning( self._("Download is temporary offline: {0}").format(file.name)) else: file.set_status("failed") self.__pyload.log.warning( self._("Download failed: {0} | {1}").format(file.name, errmsg)) file.error = errmsg self.__pyload.adm.download_failed(file) self.clean(file) def _handle_skip(self, file, errmsg): file.set_status("skipped") self.__pyload.log.info( self._("Download skipped: {0} due to {1}").format( file.name, errmsg)) self.clean(file) self.__pyload.files.check_package_finished(file) self.active = None self.__pyload.files.save() def _handle_error(self, file, errmsg, errcode=None): self.__pyload.log.debug("pycurl exception {0}: {1}".format( errcode, errmsg)) if errcode in (7, 18, 28, 52, 56): self.__pyload.log.warning( self._("Couldn't connect to host or connection reset, " "waiting 1 minute and retry")) wait = time.time() + 60 file.wait_until = wait file.set_status("waiting") while time.time() < wait: time.sleep(0.5) if file.abort: break if file.abort: self.__pyload.log.info( self._("Download aborted: {0}").format(file.name)) file.set_status("aborted") # do not clean, aborting function does this itself # self.clean(file) else: self.queue.put(file) else: file.set_status("failed") self.__pyload.log.error( self._("pycurl error {0}: {1}").format(errcode, errmsg)) if self.__pyload.debug: print_exc() self.debug_report(file) self.__pyload.adm.download_failed(file) def _run(self, file): file.init_plugin() # after initialization the thread is fully ready self.__running.set() # this file was deleted while queuing # TODO: what will happen with new thread manager? # if not file.has_plugin(): continue file.plugin.check_for_same_files(starting=True) self.__pyload.log.info(self._("Download starts: {0}".format( file.name))) # start download self.__pyload.adm.download_preparing(file) file.plugin.preprocessing(self) self.__pyload.log.info( self._("Download finished: {0}").format(file.name)) self.__pyload.adm.download_finished(file) self.__pyload.files.check_package_finished(file) def _finalize(self, file): self.__pyload.files.save() file.check_if_processed() sys.exc_clear() # manager could still be waiting for it self.__running.set() # only done when job was not put back if self.queue.empty(): self.__manager.done(self) def run(self): """ Run method. """ file = None while True: del file self.active = self.queue.get() file = self.active if self.active == "quit": self.active = None self.__manager.discard(self) return True try: self._run(file) except NotImplementedError: self._handle_notimplement(file) continue except Abort: self._handle_abort(file) # abort cleans the file # self.clean(file) continue except Reconnect: self._handle_reconnect(file) continue except Retry as e: self._handle_retry(file, e.args[0]) continue except Fail as e: self._handle_fail(file, e.args[0]) continue except error as e: errcode = None errmsg = e.args if len(e.args) == 2: errcode, errmsg = e.args self._handle_error(file, errmsg, errcode) self.clean(file) continue except Skip as e: self._handle_skip(file, str(e)) continue except Exception as e: if isinstance(e, ResponseException) and e.code == 500: self._handle_tempoffline(file) else: self._handle_failed(file, str(e)) continue finally: self._finalize(file) # file.plugin.req.clean() self.active = None file.finish_if_done() self.__pyload.files.save() def get_progress_info(self): if not self.active: return None return self.active.get_progress_info() def put(self, job): """ Assign a job to the thread. """ self.queue.put(job) def clean(self, file): """ Set thread inactive and release file. """ file.release() def quit(self): """ Stops the thread. """ self.put("quit")
class DownloadThread(PluginThread): """Thread for downloading files from 'real' hoster plugins.""" __slots__ = ['active', 'queue'] def __init__(self, manager): """Constructor.""" super(DownloadThread, self).__init__(manager) self.__running = Event() self.queue = Queue() # job queue self.active = None @property def running(self): return self.__running def _handle_abort(self, file): self.pyload.log.info( self._('Download aborted: {0}').format(file.name)) file.set_status('aborted') def _handle_reconnect(self, file): self.queue.put(file) # file.req.clear_cookies() while self.manager.reconnecting.isSet(): time.sleep(0.5) def _handle_retry(self, file, reason): self.pyload.log.info( self._('Download restarted: {0}').format(file.name), reason) self.queue.put(file) def _handle_notimplement(self, file): self.pyload.log.error( self._('Plugin {0} is missing a function').format(file.pluginname)) file.set_status('failed') file.error = 'Plugin does not work' self.clean(file) def _handle_tempoffline(self, file, exc): file.set_status('temp. offline') self.pyload.log.warning( self._('Download is temporary offline: {0}').format(file.name)) file.error = to_str(exc) if self.pyload.debug: self.pyload.log.error(exc, exc_info=self.pyload.debug) self.debug_report(file) self.pyload.adm.download_failed(file) self.clean(file) def _handle_failed(self, file, exc): file.set_status('failed') self.pyload.log.warning( self._('Download failed: {0}').format(file.name)) file.error = to_str(exc) if self.pyload.debug: self.pyload.log.error(exc, exc_info=self.pyload.debug) self.debug_report(file) self.pyload.adm.download_failed(file) self.clean(file) # TODO: activate former skipped downloads def _handle_fail(self, file, exc): errmsg = to_str(exc) if errmsg == 'offline': file.set_status('offline') self.pyload.log.warning( self._('Download is offline: {0}').format(file.name)) elif errmsg == 'temp. offline': file.set_status('temp. offline') self.pyload.log.warning( self._('Download is temporary offline: {0}').format(file.name)) else: file.set_status('failed') self.pyload.log.warning( self._('Download failed: {0}').format(file.name)) file.error = errmsg self.pyload.adm.download_failed(file) self.clean(file) def _handle_skip(self, file, exc): file.set_status('skipped') self.pyload.log.info( self._('Download skipped: {0} due to {1}').format( file.name, exc)) if self.pyload.debug: self.pyload.log.error(exc, exc_info=self.pyload.debug) self.clean(file) self.pyload.files.check_package_finished(file) self.active = None self.pyload.files.save() def _handle_error(self, file, exc): errcode = None errmsg = exc.args if len(exc.args) == 2: errcode, errmsg = exc.args self.pyload.log.debug( 'pycurl exception {0}: {1}'.format(errcode, errmsg)) if errcode in (7, 18, 28, 52, 56): self.pyload.log.warning( self._( "Couldn't connect to host or connection reset, " 'waiting 1 minute and retry')) wait = time.time() + 60 file.wait_until = wait file.set_status('waiting') while time.time() < wait: time.sleep(0.5) if file.abort: break if file.abort: self.pyload.log.info( self._('Download aborted: {0}').format(file.name)) file.set_status('aborted') # do not clean, aborting function does this itself # self.clean(file) else: self.queue.put(file) else: file.set_status('failed') self.pyload.log.error( self._('pycurl error {0}: {1}').format(errcode, errmsg)) if self.pyload.debug: self.pyload.log.error(exc, exc_info=self.pyload.debug) self.debug_report(file) self.pyload.adm.download_failed(file) def _run(self, file): file.init_plugin() # after initialization the thread is fully ready self.__running.set() # this file was deleted while queuing # TODO: what will happen with new thread manager? # if not file.has_plugin(): continue file.plugin.check_for_same_files(starting=True) self.pyload.log.info( self._('Download starts: {0}'.format(file.name))) # start download self.pyload.adm.download_preparing(file) file.plugin.preprocessing(self) self.pyload.log.info( self._('Download finished: {0}').format(file.name)) self.pyload.adm.download_finished(file) self.pyload.files.check_package_finished(file) def _finalize(self, file): self.pyload.files.save() file.check_if_processed() if sys.version_info[0] < 3: # not available in python 3 sys.exc_clear() # manager could still be waiting for it self.__running.set() # only done when job was not put back if self.queue.empty(): self.manager.done(self) def run(self): """Run method.""" file = None while True: del file self.active = self.queue.get() file = self.active if self.active == 'quit': self.active = None self.manager.discard(self) return True try: self._run(file) except NotImplementedError: self._handle_notimplement(file) continue except Abort: self._handle_abort(file) # abort cleans the file # self.clean(file) continue except Reconnect: self._handle_reconnect(file) continue except Retry as exc: self._handle_retry(file, exc.args[0]) continue except Fail as exc: self._handle_fail(file, exc.args[0]) continue except pycurl.error as exc: self._handle_error(file, exc) self.clean(file) continue except Skip as exc: self._handle_skip(file, exc) continue except Exception as exc: if isinstance(exc, ResponseException) and exc.code == 500: self._handle_tempoffline(file, exc) else: self._handle_failed(file, exc) continue finally: self._finalize(file) # file.plugin.req.clean() self.active = None file.finish_if_done() self.pyload.files.save() def get_progress_info(self): if not self.active: return return self.active.get_progress_info() def put(self, job): """Assign a job to the thread.""" self.queue.put(job) def clean(self, file): """Set thread inactive and release file.""" file.release() def quit(self): """Stops the thread.""" self.put('quit')
def exit(self): self.__running.clear() self.closing = Event() self.jobs.put('quit') self.closing.wait(1)
class WebServer(Thread): __slots__ = [ 'cert', 'debug', 'error', 'force_server', 'host', 'https', 'key', 'port', 'server'] def __init__(self, pycore=None, pysetup=None): global core, setup Thread.__init__(self) if pycore: core = pycore config = pycore.config elif pysetup: setup = pysetup config = pysetup.config else: raise Exception("No config context provided") self.server = config.get('webui', 'server') self.https = config.get('aal', 'activated') self.cert = config.get('ssl', 'cert') self.key = config.get('ssl', 'key') self.host = config.get('webui', 'host') self.port = config.get('webui', 'port') self.debug = config.get('webui', 'debug') self.force_server = config.get('webui', 'force_server') self.error = None self.setDaemon(True) def run(self): self.running = Event() self.running.set() global webinterface from pyload.webui import interface as webinterface if self.https: if not os.path.exists(self.cert) or not os.path.exists(self.key): log.warning(_("SSL certificates not found")) self.https = False if webinterface.UNAVAILALBE: log.warning(_("WebUI built is not available")) # elif webinterface.APP_PATH == "app": # log.info(_("Running webui in development mode")) prefer = None # These cases covers all settings if self.server == "threaded": prefer = "threaded" elif self.server == "fastcgi": prefer = "flup" elif self.server == "fallback": prefer = "wsgiref" server = self.select_server(prefer) try: self.start_server(server) except Exception as e: log.error(_("Failed starting webserver: {0}").format(e.message)) self.error = e # if core: # core.print_exc() def select_server(self, prefer=None): """ Find a working server. """ from pyload.webui.servers import all_server unavailable = [] server = None for server in all_server: if self.force_server and self.force_server == server.NAME: break #: Found server # When force_server is set, no further checks have to be made elif self.force_server: continue if prefer and prefer == server.NAME: break #: found prefered server elif prefer: #: prefer is similar to force, but force has precedence continue # Filter for server that offer ssl if needed if self.https and not server.SSL: continue try: if server.find(): break #: Found a server else: unavailable.append(server.NAME) except Exception as e: log.error( _("Failed importing webserver: {0}").format( e.message)) if unavailable: #: Just log whats not available to have some debug information log.debug("Unavailable webserver: {0}".format( ", ".join(unavailable))) if not server and self.force_server: server = self.force_server #: just return the name return server def start_server(self, server): from pyload.webui.servers import ServerAdapter if issubclass(server, ServerAdapter): if self.https and not server.SSL: log.warning( _("This server offers no SSL, please consider using threaded instead")) elif not self.https: self.cert = self.key = None #: This implicitly disables SSL # there is no extra argument for the server adapter # TODO: check for openSSL ? # Now instantiate the serverAdapter server = server(self.host, self.port, self.key, self.cert, 6, self.debug) #: todo, num_connections name = server.NAME else: #: server is just a string name = server log.info(_("Starting {0} webserver: {1}:{2:d}").format( name, self.host, self.port)) webinterface.run_server(host=self.host, port=self.port, server=server) # check if an error was raised for n seconds def check_error(self, n=1): t = time.time() + n while time.time() < t: if self.error: return self.error time.sleep(0.1)
class DatabaseBackend(Thread): subs = [] DB_FILE = 'pyload.db' VERSION_FILE = 'db.version' def __init__(self, core): super(DatabaseBackend, self).__init__() self.setDaemon(True) self.pyload = core self._ = core._ self.manager = None # set later self.error = None # TODO: Recheck... self.__running = Event() self.jobs = Queue() set_db(self) @property def running(self): return self.__running.is_set() def setup(self): """ *MUST* be called before db can be used !. """ self.start() self.__running.wait() def init(self): """Main loop, which executes commands.""" version = self._check_version() self.conn = sqlite3.connect(self.DB_FILE) os.chmod(self.DB_FILE, 0o600) self.c = self.conn.cursor() if version is not None and version < DB_VERSION: success = self._convert_db(version) # delete database if not success: self.c.close() self.conn.close() remove(self.VERSION_FILE) shutil.move(self.DB_FILE, self.DB_FILE + '.bak') self.pyload.log.warning( self._('Database was deleted due to incompatible version')) with io.open(self.VERSION_FILE, mode='wb') as fp: fp.write(to_str(DB_VERSION)) self.conn = sqlite3.connect(self.DB_FILE) os.chmod(self.DB_FILE, 0o600) self.c = self.conn.cursor() self._create_tables() self.conn.commit() def run(self): try: self.init() except Exception as exc: self.error = exc finally: self.__running.set() while True: j = self.jobs.get() if j == 'quit': self.c.close() self.conn.commit() self.conn.close() self.closing.set() break j.process_job() # TODO: Recheck... def exit(self): self.__running.clear() self.closing = Event() self.jobs.put('quit') self.closing.wait(1) def _check_version(self): """Get db version.""" if not os.path.isfile(self.VERSION_FILE) or not os.path.getsize( self.VERSION_FILE): with io.open(self.VERSION_FILE, mode='w') as fp: fp.write(to_str(DB_VERSION)) with io.open(self.VERSION_FILE, mode='r') as fp: v = int(fp.read().strip()) return v def _convert_db(self, v): try: return getattr(self, '_convertV{0:d}'.format(v))() except Exception: return False # -- convert scripts start -- def _convert_v6(self): return False # -- convert scripts end -- def _create_tables(self): """Create tables for database.""" self.c.execute( 'CREATE TABLE IF NOT EXISTS "packages" (' '"pid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"name" TEXT NOT NULL, ' '"folder" TEXT DEFAULT "" NOT NULL, ' '"site" TEXT DEFAULT "" NOT NULL, ' '"comment" TEXT DEFAULT "" NOT NULL, ' '"password" TEXT DEFAULT "" NOT NULL, ' '"added" INTEGER DEFAULT 0 NOT NULL,' # set by trigger '"status" INTEGER DEFAULT 0 NOT NULL,' '"tags" TEXT DEFAULT "" NOT NULL,' '"shared" INTEGER DEFAULT 0 NOT NULL,' '"packageorder" INTEGER DEFAULT -1 NOT NULL,' # inc by trigger '"root" INTEGER DEFAULT -1 NOT NULL, ' '"owner" INTEGER NOT NULL, ' 'FOREIGN KEY(owner) REFERENCES users(uid), ' 'CHECK (root != pid)' ')') self.c.execute('CREATE TRIGGER IF NOT EXISTS "insert_package" ' 'AFTER INSERT ON "packages"' 'BEGIN ' 'UPDATE packages SET added = strftime("%s", "now"), ' 'packageorder = (SELECT max(p.packageorder) + 1 FROM ' 'packages p WHERE p.root=new.root) ' 'WHERE rowid = new.rowid;' 'END') self.c.execute( 'CREATE TRIGGER IF NOT EXISTS "delete_package" ' 'AFTER DELETE ON "packages"' 'BEGIN ' 'DELETE FROM files WHERE package = old.pid;' 'UPDATE packages SET packageorder=packageorder-1 ' 'WHERE packageorder > old.packageorder AND root=old.pid;' 'END') self.c.execute('CREATE INDEX IF NOT EXISTS "package_index" ON ' 'packages(root, owner)') self.c.execute( 'CREATE INDEX IF NOT EXISTS "package_owner" ON packages(owner)') self.c.execute('CREATE TABLE IF NOT EXISTS "files" (' '"fid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"name" TEXT NOT NULL, ' '"size" INTEGER DEFAULT 0 NOT NULL, ' '"status" INTEGER DEFAULT 0 NOT NULL, ' '"media" INTEGER DEFAULT 1 NOT NULL,' '"added" INTEGER DEFAULT 0 NOT NULL,' '"fileorder" INTEGER DEFAULT -1 NOT NULL, ' '"url" TEXT DEFAULT "" NOT NULL, ' '"plugin" TEXT DEFAULT "" NOT NULL, ' '"hash" TEXT DEFAULT "" NOT NULL, ' '"dlstatus" INTEGER DEFAULT 0 NOT NULL, ' '"error" TEXT DEFAULT "" NOT NULL, ' '"package" INTEGER NOT NULL, ' '"owner" INTEGER NOT NULL, ' 'FOREIGN KEY(owner) REFERENCES users(uid), ' 'FOREIGN KEY(package) REFERENCES packages(id)' ')') self.c.execute( 'CREATE INDEX IF NOT EXISTS "file_index" ON files(package, owner)') self.c.execute( 'CREATE INDEX IF NOT EXISTS "file_owner" ON files(owner)') self.c.execute( 'CREATE INDEX IF NOT EXISTS "file_plugin" ON files(plugin)') self.c.execute('CREATE TRIGGER IF NOT EXISTS "insert_file" ' 'AFTER INSERT ON "files"' 'BEGIN ' 'UPDATE files SET added = strftime("%s", "now"), ' 'fileorder = (SELECT max(f.fileorder) + 1 FROM files f ' 'WHERE f.package=new.package) ' 'WHERE rowid = new.rowid;' 'END') self.c.execute('CREATE TABLE IF NOT EXISTS "collector" (' '"owner" INTEGER NOT NULL, ' '"data" TEXT NOT NULL, ' 'FOREIGN KEY(owner) REFERENCES users(uid), ' 'PRIMARY KEY(owner) ON CONFLICT REPLACE' ') ') self.c.execute('CREATE TABLE IF NOT EXISTS "storage" (' '"identifier" TEXT NOT NULL, ' '"key" TEXT NOT NULL, ' '"value" TEXT DEFAULT "", ' 'PRIMARY KEY (identifier, key) ON CONFLICT REPLACE' ')') self.c.execute( 'CREATE TABLE IF NOT EXISTS "users" (' '"uid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"name" TEXT NOT NULL UNIQUE, ' '"email" TEXT DEFAULT "" NOT NULL, ' '"password" TEXT NOT NULL, ' '"role" INTEGER DEFAULT 0 NOT NULL, ' '"permission" INTEGER DEFAULT 0 NOT NULL, ' '"folder" TEXT DEFAULT "" NOT NULL, ' '"traffic" INTEGER DEFAULT -1 NOT NULL, ' '"dllimit" INTEGER DEFAULT -1 NOT NULL, ' '"dlquota" TEXT DEFAULT "" NOT NULL, ' '"hddquota" INTEGER DEFAULT -1 NOT NULL, ' '"template" TEXT DEFAULT "default" NOT NULL, ' '"user" INTEGER DEFAULT -1 NOT NULL, ' # set by trigger to self 'FOREIGN KEY(user) REFERENCES users(uid)' ')') self.c.execute( 'CREATE INDEX IF NOT EXISTS "username_index" ON users(name)') self.c.execute('CREATE TRIGGER IF NOT EXISTS "insert_user" AFTER ' 'INSERT ON "users"' 'BEGIN ' 'UPDATE users SET user = new.uid, folder=new.name ' 'WHERE rowid = new.rowid;' 'END') self.c.execute('CREATE TABLE IF NOT EXISTS "settings" (' '"plugin" TEXT NOT NULL, ' '"user" INTEGER DEFAULT -1 NOT NULL, ' '"config" TEXT NOT NULL, ' 'FOREIGN KEY(user) REFERENCES users(uid), ' 'PRIMARY KEY (plugin, user) ON CONFLICT REPLACE' ')') self.c.execute('CREATE TABLE IF NOT EXISTS "accounts" (' '"aid" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"plugin" TEXT NOT NULL, ' '"loginname" TEXT NOT NULL, ' '"owner" INTEGER NOT NULL, ' '"activated" INTEGER NOT NULL DEFAULT 1, ' '"password" TEXT DEFAULT "", ' '"shared" INTEGER NOT NULL DEFAULT 0, ' '"options" TEXT DEFAULT "", ' 'FOREIGN KEY(owner) REFERENCES users(uid)' ')') self.c.execute('CREATE INDEX IF NOT EXISTS "accounts_login" ON ' 'accounts(plugin, loginname)') self.c.execute('CREATE TABLE IF NOT EXISTS "stats" (' '"id" INTEGER PRIMARY KEY AUTOINCREMENT, ' '"user" INTEGER NOT NULL, ' '"plugin" TEXT NOT NULL, ' '"time" INTEGER NOT NULL, ' '"premium" INTEGER DEFAULT 0 NOT NULL, ' '"amount" INTEGER DEFAULT 0 NOT NULL, ' 'FOREIGN KEY(user) REFERENCES users(uid)' ')') self.c.execute( 'CREATE INDEX IF NOT EXISTS "stats_time" ON stats(user, time)') # try to lower ids self.c.execute('SELECT max(fid) FROM files') fid = self.c.fetchone()[0] fid = int(fid) if fid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (fid, 'files')) self.c.execute('SELECT max(pid) FROM packages') pid = self.c.fetchone()[0] pid = int(pid) if pid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (pid, 'packages')) self.c.execute('VACUUM') def create_cursor(self): return self.conn.cursor() @async def commit(self): self.conn.commit() @queue def sync_save(self): self.conn.commit() @async def rollback(self): self.conn.rollback() def async (self, f, *args, **kwargs): args = (self, ) + args job = DatabaseJob(f, *args, **kwargs) self.jobs.put(job) def queue(self, f, *args, **kwargs): # Raise previous error of initialization if isinstance(self.error, Exception): raise self.error args = (self, ) + args job = DatabaseJob(f, *args, **kwargs) self.jobs.put(job) # only wait when db is running if self.running: job.wait() return job.result @classmethod def register_sub(cls, klass): cls.subs.append(klass) @classmethod def unregister_sub(cls, klass): cls.subs.remove(klass) def __getattr__(self, attr): for sub in DatabaseBackend.subs: if hasattr(sub, attr): return getattr(sub, attr) raise AttributeError(attr)
class DownloadThread(PluginThread): """ Thread for downloading files from 'real' hoster plugins. """ __slots__ = ['active', 'queue', 'running'] def __init__(self, manager): """ Constructor. """ PluginThread.__init__(self, manager) self.running = Event() self.queue = Queue() #: job queue self.active = None self.start() def run(self): """ Run method. """ pyfile = None while True: del pyfile self.active = self.queue.get() pyfile = self.active if self.active == "quit": self.active = None self.manager.stop(self) return True try: pyfile.init_plugin() # after initialization the thread is fully ready self.running.set() # this pyfile was deleted while queuing # TODO: what will happen with new thread manager? # if not pyfile.has_plugin(): continue pyfile.plugin.check_for_same_files(starting=True) self.pyload.log.info( _("Download starts: {0}".format(pyfile.name))) # start download self.pyload.adm.download_preparing(pyfile) pyfile.plugin.preprocessing(self) self.pyload.log.info( _("Download finished: {0}").format(pyfile.name)) self.pyload.adm.download_finished(pyfile) self.pyload.files.check_package_finished(pyfile) except NotImplementedError: self.pyload.log.error( _("Plugin {0} is missing a function").format(pyfile.pluginname)) pyfile.set_status("failed") pyfile.error = "Plugin does not work" self.clean(pyfile) continue except Abort: try: self.pyload.log.info( _("Download aborted: {0}").format(pyfile.name)) except Exception: pass pyfile.set_status("aborted") # abort cleans the file # self.clean(pyfile) continue except Reconnect: self.queue.put(pyfile) # pyfile.req.clear_cookies() while self.manager.reconnecting.isSet(): time.sleep(0.5) continue except Retry as e: reason = e.args[0] self.pyload.log.info( _("Download restarted: {0} | {1}").format(pyfile.name, reason)) self.queue.put(pyfile) continue except Fail as e: msg = e.args[0] # TODO: activate former skipped downloads if msg == "offline": pyfile.set_status("offline") self.pyload.log.warning( _("Download is offline: {0}").format(pyfile.name)) elif msg == "temp. offline": pyfile.set_status("temp. offline") self.pyload.log.warning( _("Download is temporary offline: {0}").format(pyfile.name)) else: pyfile.set_status("failed") self.pyload.log.warning( _("Download failed: {0} | {1}").format(pyfile.name, msg)) pyfile.error = msg self.pyload.adm.download_failed(pyfile) self.clean(pyfile) continue except error as e: if len(e.args) == 2: code, msg = e.args else: code = 0 msg = e.args self.pyload.log.debug( "pycurl exception {0}: {1}".format(code, msg)) if code in (7, 18, 28, 52, 56): self.pyload.log.warning( _("Couldn't connect to host or connection reset, waiting 1 minute and retry")) wait = time.time() + 60 pyfile.wait_until = wait pyfile.set_status("waiting") while time.time() < wait: time.sleep(0.5) if pyfile.abort: break if pyfile.abort: self.pyload.log.info( _("Download aborted: {0}").format(pyfile.name)) pyfile.set_status("aborted") # do not clean, aborting function does this itself # self.clean(pyfile) else: self.queue.put(pyfile) continue else: pyfile.set_status("failed") self.pyload.log.error( _("pycurl error {0}: {1}").format(code, msg)) if self.pyload.debug: print_exc() self.write_debug_report(pyfile.plugin.__name__, pyfile) self.pyload.adm.download_failed(pyfile) self.clean(pyfile) continue except Skip as e: pyfile.set_status("skipped") self.pyload.log.info( _("Download skipped: {0} due to {1}").format(pyfile.name, e.message)) self.clean(pyfile) self.pyload.files.check_package_finished(pyfile) self.active = False self.pyload.files.save() continue except Exception as e: if isinstance(e, ResponseException) and e.code == 500: pyfile.set_status("temp. offline") self.pyload.log.warning( _("Download is temporary offline: {0}").format(pyfile.name)) pyfile.error = _("Internal Server Error") else: pyfile.set_status("failed") self.pyload.log.warning( _("Download failed: {0} | {1}").format(pyfile.name, e.message)) pyfile.error = e.message if self.pyload.debug: print_exc() self.write_debug_report(pyfile.plugin.__name__, pyfile) self.pyload.adm.download_failed(pyfile) self.clean(pyfile) continue finally: self.pyload.files.save() pyfile.check_if_processed() sys.exc_clear() # manager could still be waiting for it self.running.set() # only done when job was not put back if self.queue.empty(): self.manager.done(self) # pyfile.plugin.req.clean() self.active = False pyfile.finish_if_done() self.pyload.files.save() def get_progress(self): if self.active: return self.active.get_progress_info() def put(self, job): """ Assign a job to the thread. """ self.queue.put(job) def clean(self, pyfile): """ Set thread inactive and release pyfile. """ pyfile.release() def stop(self): """ Stops the thread. """ self.put("quit")
def __init__(self, manager): Thread.__init__(self) self.manager = manager self.pyload = manager.pyload self.enabled = True self.running = Event()
class Core(object): DEFAULT_CONFIGNAME = 'config.ini' DEFAULT_LANGUAGE = 'english' DEFAULT_USERNAME = '******' DEFAULT_PASSWORD = '******' DEFAULT_STORAGENAME = 'downloads' @property def version(self): return __version__ @property def version_info(self): return __version_info__ @property def running(self): return self.__running.is_set() def __init__(self, cfgdir, tmpdir, debug=None, restore=False): self.__running = Event() self.__do_restart = False self.__do_exit = False self._ = lambda x: x self.cfgdir = fullpath(cfgdir) self.tmpdir = fullpath(tmpdir) os.chdir(self.cfgdir) # if self.tmpdir not in sys.path: # sys.path.append(self.tmpdir) # if refresh: # cleanpy(PACKDIR) self.config = ConfigParser(self.DEFAULT_CONFIGNAME) self.debug = self.config.get('log', 'debug') if debug is None else debug self.log = LoggerFactory(self, self.debug) self._init_database(restore) self._init_managers() self.request = self.req = RequestFactory(self) self._init_api() atexit.register(self.exit) def _init_api(self): from pyload.api import Api self.api = Api(self) def _init_database(self, restore): from pyload.core.database import DatabaseBackend from pyload.core.datatype import Permission, Role # TODO: Move inside DatabaseBackend newdb = not os.path.isfile(DatabaseBackend.DB_FILE) self.db = DatabaseBackend(self) self.db.setup() if restore or newdb: self.db.add_user(self.DEFAULT_USERNAME, self.DEFAULT_PASSWORD, Role.Admin, Permission.All) if restore: self.log.warning( self._('Restored default login credentials `admin|pyload`')) def _init_managers(self): from pyload.core.manager import (AccountManager, AddonManager, EventManager, ExchangeManager, FileManager, InfoManager, PluginManager, TransferManager) self.scheduler = sched.scheduler(time.time, time.sleep) self.filemanager = self.files = FileManager(self) self.pluginmanager = self.pgm = PluginManager(self) self.exchangemanager = self.exm = ExchangeManager(self) self.eventmanager = self.evm = EventManager(self) self.accountmanager = self.acm = AccountManager(self) self.infomanager = self.iom = InfoManager(self) self.transfermanager = self.tsm = TransferManager(self) # TODO: Remove builtins.ADDONMANAGER builtins.ADDONMANAGER = self.addonmanager = self.adm = AddonManager( self) # self.remotemanager = self.rem = RemoteManager(self) # self.servermanager = self.svm = ServerManager(self) self.db.manager = self.files # ugly? def _setup_permissions(self): self.log.debug('Setup permissions...') if os.name == 'nt': return change_group = self.config.get('permission', 'change_group') change_user = self.config.get('permission', 'change_user') if change_group: try: group = self.config.get('permission', 'group') set_process_group(group) except Exception as exc: self.log.error(self._('Unable to change gid')) self.log.error(exc, exc_info=self.debug) if change_user: try: user = self.config.get('permission', 'user') set_process_user(user) except Exception as exc: self.log.error(self._('Unable to change uid')) self.log.error(exc, exc_info=self.debug) def set_language(self, lang): domain = 'core' localedir = resource_filename(__package__, 'locale') languages = (locale.locale_alias[lang.lower()].split('_', 1)[0], ) self._set_language(domain, localedir, languages) def _set_language(self, *args, **kwargs): trans = gettext.translation(*args, **kwargs) try: self._ = trans.ugettext except AttributeError: self._ = trans.gettext def _setup_language(self): self.log.debug('Setup language...') lang = self.config.get('general', 'language') if not lang: lc = locale.getlocale()[0] or locale.getdefaultlocale()[0] lang = lc.split('_', 1)[0] if lc else 'en' try: self.set_language(lang) except IOError as exc: self.log.error(exc, exc_info=self.debug) self._set_language('core', fallback=True) # def _setup_niceness(self): # niceness = self.config.get('general', 'niceness') # renice(niceness=niceness) # ioniceness = int(self.config.get('general', 'ioniceness')) # ionice(niceness=ioniceness) def _setup_storage(self): self.log.debug('Setup storage...') storage_folder = self.config.get('general', 'storage_folder') if storage_folder is None: storage_folder = os.path.join(builtins.USERDIR, self.DEFAULT_STORAGENAME) self.log.info(self._('Storage: {0}'.format(storage_folder))) makedirs(storage_folder, exist_ok=True) avail_space = format.size(availspace(storage_folder)) self.log.info( self._('Available storage space: {0}').format(avail_space)) def _setup_network(self): self.log.debug('Setup network...') # TODO: Move to accountmanager self.log.info(self._('Activating accounts...')) self.acm.load_accounts() # self.scheduler.enter(0, 0, self.acm.load_accounts) self.adm.activate_addons() def run(self): self.log.info('Welcome to pyLoad v{0}'.format(self.version)) if self.debug: self.log.warning('*** DEBUG MODE ***') try: self.log.debug('Starting pyLoad...') self.evm.fire('pyload:starting') self.__running.set() self._setup_language() self._setup_permissions() self.log.info(self._('Config directory: {0}').format(self.cfgdir)) self.log.info(self._('Cache directory: {0}').format(self.tmpdir)) self._setup_storage() self._setup_network() # self._setup_niceness() # # some memory stats # from guppy import hpy # hp=hpy() # print(hp.heap()) # import objgraph # objgraph.show_most_common_types(limit=30) # import memdebug # memdebug.start(8002) # from meliae import scanner # scanner.dump_all_objects(os.path.join(PACKDIR, 'objs.json')) self.log.debug('pyLoad is up and running') self.evm.fire('pyload:started') self.tsm.pause = False # NOTE: Recheck... while True: self.__running.wait() self.tsm.work() self.iom.work() self.exm.work() if self.__do_restart: raise Restart if self.__do_exit: raise Exit self.scheduler.run() time.sleep(1) except Restart: self.restart() except (Exit, KeyboardInterrupt, SystemExit): self.exit() except Exception as exc: self.log.critical(exc, exc_info=True) self.exit() def _remove_loggers(self): for handler in self.log.handlers: with closing(handler) as hdlr: self.log.removeHandler(hdlr) def restart(self): self.stop() self.log.info(self._('Restarting pyLoad...')) self.evm.fire('pyload:restarting') self.run() def exit(self): self.stop() self.log.info(self._('Exiting pyLoad...')) self.tsm.exit() self.db.exit() # NOTE: Why here? self._remove_loggers() # if cleanup: # self.log.info(self._("Deleting temp files...")) # remove(self.tmpdir, ignore_errors=True) def stop(self): try: self.log.debug('Stopping pyLoad...') self.evm.fire('pyload:stopping') self.adm.deactivate_addons() self.api.stop_all_downloads() finally: self.files.sync_save() self.__running.clear() self.evm.fire('pyload:stopped')
class TransferManager(BaseManager): """Schedules and manages download and decrypter jobs.""" def setup(self): # won't start download when true self.pause = True # each thread is in exactly one category self.free = [] # a thread that in working must have a file as active attribute self.downloading = [] # holds the decrypter threads self.decrypting = [] # indicates when reconnect has occurred self.reconnecting = Event() self.lock = RWLock() @lock def done(self, thread): """Switch thread from working to free state.""" # only download threads will be re-used if isinstance(thread, DownloadThread): # clean local var thread.active = None self.downloading.remove(thread) self.free.append(thread) thread.running.clear() elif isinstance(thread, DecrypterThread): self.decrypting.remove(thread) @lock def discard(self, thread): """Removes a thread from all lists.""" if thread in self.free: self.free.remove(thread) elif thread in self.downloading: self.downloading.remove(thread) @lock def start_download_thread(self, info): """Use a free dl thread or create a new one.""" if self.free: thread = self.free[0] del self.free[0] else: thread = DownloadThread(self) thread.put(self.pyload.files.get_file(info.fid)) thread.start() # wait until it picked up the task thread.running.wait() self.downloading.append(thread) return thread @lock def start_decrypter_thread(self, info): """Start decrypting of entered data, all links in one package are accumulated to one thread.""" self.pyload.files.set_download_status( info.fid, DownloadStatus.Decrypting) thread = DecrypterThread( self, [(info.download.url, info.download.plugin)], info.fid, info.package, info.owner ) thread.start() self.decrypting.append(thread) return thread @lock(shared=True) def active_downloads(self, uid=None): """Retrieve pyfiles of running downloads.""" return [x.active for x in self.downloading if uid is None or x.active.owner == uid] @lock(shared=True) def waiting_downloads(self): """All waiting downloads.""" return [x.active for x in self.downloading if x.active.has_status('waiting')] @lock(shared=True) def get_progress_list(self, uid): """Progress of all running downloads.""" # decrypter progress could be none return [ x for x in [ thd.get_progress_info() for thd in self.downloading + self.decrypting if uid is None or thd.owner == uid] if x is not None] def processing_ids(self): """Get a id list of all pyfiles processed.""" return [x.fid for x in self.active_downloads(None)] @lock(shared=True) def exit(self): """End all threads.""" self.pause = True for thread in self.downloading + self.free: thread.put('quit') def work(self): """Main routine that does the periodical work.""" self.try_reconnect() if (availspace(self.pyload.config.get('general', 'storage_folder')) < self.pyload.config.get('general', 'min_storage_size') << 20): self.pyload.log.warning( self._('Not enough space left on device')) self.pause = True # if self.pause or not self.pyload.api.is_time_download(): # return False if self.pause: return False # at least one thread want reconnect and we are supposed to wait if self.pyload.config.get( 'reconnect', 'wait') and self.want_reconnect() > 1: return False self.assign_jobs() # TODO: clean free threads def assign_jobs(self): """Load jobs from db and try to assign them.""" limit = self.pyload.config.get( 'connection', 'max_transfers') - len(self.active_downloads()) # check for waiting dl rule if limit <= 0: # increase limit if there are waiting downloads limit += min( len(self.waiting_downloads()), self.pyload.config.get('connection', 'wait') + self.pyload.config.get('connection', 'max_transfers') - len(self.active_downloads())) slots = self.get_remaining_plugin_slots() occ = tuple(plugin for plugin, v in slots.items() if v == 0) jobs = self.pyload.files.get_jobs(occ) # map plugin to list of jobs plugins = defaultdict(list) for uid, info in jobs.items(): # check the quota of each user and filter quota = self.pyload.api.calc_quota(uid) if -1 < quota < info.size: del jobs[uid] plugins[info.download.plugin].append(info) for plugin, jobs in plugins.items(): # we know exactly the number of remaining jobs # or only can start one job if limit is not known to_schedule = slots[plugin] if plugin in slots else 1 # start all chosen jobs for job in self.choose_jobs(jobs, to_schedule): # if the job was started the limit will be reduced if self.start_job(job, limit): limit -= 1 def choose_jobs(self, jobs, k): """Make a fair choice of which k jobs to start.""" # TODO: prefer admins, make a fairer choice? if k <= 0: return [] if k >= len(jobs): return jobs return random.sample(jobs, k) def start_job(self, info, limit): """Start a download or decrypter thread with given file info.""" plugin = self.pyload.pgm.find_type(info.download.plugin) # this plugin does not exits if plugin is None: self.pyload.log.error( self._("Plugin '{0}' does not exists").format( info.download.plugin)) self.pyload.files.set_download_status( info.fid, DownloadStatus.Failed) return False if plugin == 'hoster': # this job can't be started if limit <= 0: return False self.start_download_thread(info) return True elif plugin == 'crypter': self.start_decrypter_thread(info) else: self.pyload.log.error( self._("Plugin type '{0}' " "can't be used for downloading").format(plugin)) return False @lock(shared=True) def try_reconnect(self): """Checks if reconnect needed.""" if not self.pyload.config.get('reconnect', 'activated'): return False # only reconnect when all threads are ready if not (0 < self.want_reconnect() == len(self.downloading)): return False script = self.pyload.config.get('reconnect', 'script') if not os.path.isfile(script): self.pyload.config.set('reconnect', 'activated', False) self.pyload.log.warning(self._('Reconnect script not found!')) return self.reconnecting.set() self.pyload.log.info(self._('Starting reconnect')) # wait until all thread got the event while [x.active.plugin.waiting for x in self.downloading].count(True): time.sleep(0.25) old_ip = get_ip() self.pyload.evm.fire('reconnect:before', old_ip) self.pyload.log.debug('Old IP: {0}'.format(old_ip)) try: subprocess.call( self.pyload.config.get( 'reconnect', 'script'), shell=True) except Exception as exc: self.pyload.log.warning( self._('Failed executing reconnect script!')) self.pyload.log.error(exc, exc_info=self.pyload.debug) self.pyload.config.set('reconnect', 'activated', False) self.reconnecting.clear() return time.sleep(1) ip = get_ip() self.pyload.evm.fire('reconnect:after', ip) if not old_ip or old_ip == ip: self.pyload.log.warning(self._('Reconnect not successful')) else: self.pyload.log.info( self._('Reconnected, new IP: {0}').format(ip)) self.reconnecting.clear() @lock(shared=True) def want_reconnect(self): """Number of downloads that are waiting for reconnect.""" active = [ x.active.has_plugin() and x.active.plugin.want_reconnect and x.active.plugin.waiting for x in self.downloading] return active.count(True) @lock(shared=True) def get_remaining_plugin_slots(self): """Dict of plugin names mapped to remaining dls.""" occ = {} # decrypter are treated as occupied for thd in self.decrypting: if not thd.progress: continue occ[thd.progress.plugin] = 0 # get all default dl limits for thd in self.downloading: if not thd.active.has_plugin(): continue limit = thd.active.plugin.get_download_limit() # limit <= 0 means no limit occ[thd.active.pluginname] = limit if limit > 0 else float('inf') # subtract with running downloads for thd in self.downloading: if not thd.active.has_plugin(): continue plugin = thd.active.pluginname if plugin in occ: occ[plugin] -= 1 return occ
class WebServer(Thread): __slots__ = [ 'cert', 'debug', 'error', 'force_server', 'host', 'https', 'key', 'port', 'server'] def __init__(self, pycore=None, pysetup=None): global core, setup Thread.__init__(self) if pycore: core = pycore config = pycore.config elif pysetup: setup = pysetup config = pysetup.config else: raise Exception("No config context provided") self.server = config.get('webui', 'server') self.https = config.get('aal', 'activated') self.cert = config.get('ssl', 'cert') self.key = config.get('ssl', 'key') self.host = config.get('webui', 'host') self.port = config.get('webui', 'port') self.debug = config.get('webui', 'debug') self.force_server = config.get('webui', 'force_server') self.error = None self.setDaemon(True) def run(self): self.running = Event() self.running.set() global webinterface from pyload.webui import interface as webinterface if self.https: if not os.path.exists(self.cert) or not os.path.exists(self.key): log.warning(_("SSL certificates not found")) self.https = False if webinterface.UNAVAILALBE: log.warning(_("WebUI built is not available")) elif webinterface.APPDIR.endswith('app'): log.info(_("Running webui in development mode")) prefer = None # These cases covers all settings if self.server == "threaded": prefer = "threaded" elif self.server == "fastcgi": prefer = "flup" elif self.server == "fallback": prefer = "wsgiref" server = self.select_server(prefer) try: self.start_server(server) except Exception as e: log.error(_("Failed starting webserver: {0}").format(e.message)) self.error = e # if core: # core.print_exc() def select_server(self, prefer=None): """ Find a working server. """ from pyload.webui.servers import all_server unavailable = [] server = None for server in all_server: if self.force_server and self.force_server == server.NAME: break #: Found server # When force_server is set, no further checks have to be made elif self.force_server: continue if prefer and prefer == server.NAME: break #: found prefered server elif prefer: #: prefer is similar to force, but force has precedence continue # Filter for server that offer ssl if needed if self.https and not server.SSL: continue try: if server.find(): break #: Found a server else: unavailable.append(server.NAME) except Exception as e: log.error( _("Failed importing webserver: {0}").format( e.message)) if unavailable: #: Just log whats not available to have some debug information log.debug("Unavailable webserver: {0}".format( ", ".join(unavailable))) if not server and self.force_server: server = self.force_server #: just return the name return server def start_server(self, server): from pyload.webui.servers import ServerAdapter if issubclass(server, ServerAdapter): if self.https and not server.SSL: log.warning( _("This server offers no SSL, please consider using threaded instead")) elif not self.https: self.cert = self.key = None #: This implicitly disables SSL # there is no extra argument for the server adapter # TODO: check for openSSL ? # Now instantiate the serverAdapter server = server(self.host, self.port, self.key, self.cert, 6, self.debug) #: todo, num_connections name = server.NAME else: #: server is just a string name = server log.info(_("Starting {0} webserver: {1}:{2:d}").format( name, self.host, self.port)) webinterface.run_server(host=self.host, port=self.port, server=server) # check if an error was raised for n seconds def check_error(self, n=1): t = time.time() + n while time.time() < t: if self.error: return self.error time.sleep(0.1)
def __init__(self, file): super(ShareonlineBiz, self).__init__(file) self.info = {} self.link = '' self._continue = Event()
class WebSocketServer(socketserver.ThreadingMixIn, http.server.HTTPServer): """ HTTPServer specialized for WebSocket. """ # Overrides SocketServer.ThreadingMixIn.daemon_threads daemon_threads = True # Overrides BaseHTTPServer.HTTPServer.allow_reuse_address allow_reuse_address = True def __init__(self, options): """ Override SocketServer.TCPServer.__init__ to set SSL enabled socket object to self.socket before server_bind and server_activate, if necessary """ # Removed dispatcher init here self._logger = logging.getLogger('pyload') self.request_queue_size = options.request_queue_size self.__ws_is_shut_down = Event() self.__ws_serving = False socketserver.BaseServer.__init__(self, (options.server_host, options.port), WebSocketRequestHandler) # Expose the options object to allow handler objects access it. We name # it with websocket_ prefix to avoid conflict. self.websocket_server_options = options self._create_sockets() self.server_bind() self.server_activate() def _create_sockets(self): self.server_name, self.server_port = self.server_address self._sockets = [] if not self.server_name: # On platforms that does not support IPv6, the first bind fails. # On platforms that supports IPv6 # - If it binds both IPv4 and IPv6 on call with AF_INET6, the # first bind succeeds and the second fails (we'll see 'Address # already in use' error). # - If it binds only IPv6 on call with AF_INET6, both call are # expected to succeed to listen both protocol. addrinfo_array = [(socket.AF_INET6, socket.SOCK_STREAM, '', '', ''), (socket.AF_INET, socket.SOCK_STREAM, '', '', '')] else: addrinfo_array = socket.getaddrinfo(self.server_name, self.server_port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP) for addrinfo in addrinfo_array: family, socktype, proto, canonname, sockaddr = addrinfo try: socket_ = socket.socket(family, socktype) except Exception as e: self._logger.info(_("Skip by failure: {0}").format(e.message)) continue server_options = self.websocket_server_options if server_options.use_tls: # For the case of _HAS_OPEN_SSL, we do wrapper setup after # accept. if server_options.tls_module == _TLS_BY_STANDARD_MODULE: if server_options.tls_client_auth: if server_options.tls_client_cert_optional: client_cert_ = ssl.CERT_OPTIONAL else: client_cert_ = ssl.CERT_REQUIRED else: client_cert_ = ssl.CERT_NONE socket_ = ssl.layer_socket( socket_, keyfile=server_options.private_key, certfile=server_options.certificate, ssl_version=ssl.PROTOCOL_SSLv23, ca_certs=server_options.tls_client_ca, cert_reqs=client_cert_, do_handshake_on_connect=False) self._sockets.append((socket_, addrinfo)) def server_bind(self): """ Override SocketServer.TCPServer.server_bind to enable multiple sockets bind """ failed_sockets = [] for socketinfo in self._sockets: socket_, addrinfo = socketinfo if self.allow_reuse_address: socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: socket_.bind(self.server_address) except Exception as e: self._logger.info(_("Skip by failure: {0}").format(e.message)) socket_.close() failed_sockets.append(socketinfo) if self.server_address[1] == 0: # The operating system assigns the actual port number for port # number 0. This case, the second and later sockets should use # the same port number. Also self.server_port is rewritten # because it is exported, and will be used by external code. self.server_address = (self.server_name, socket_.getsockname()[1]) self.server_port = self.server_address[1] self._logger.info( _('Port {0:d} is assigned').format(self.server_port)) for socketinfo in failed_sockets: self._sockets.remove(socketinfo) def server_activate(self): """ Override SocketServer.TCPServer.server_activate to enable multiple sockets listen """ failed_sockets = [] for socketinfo in self._sockets: socket_, addrinfo = socketinfo self._logger.debug("Listen on: {0}".format(addrinfo)) try: socket_.listen(self.request_queue_size) except Exception as e: self._logger.info(_("Skip by failure: {0}").format(e.message)) socket_.close() failed_sockets.append(socketinfo) for socketinfo in failed_sockets: self._sockets.remove(socketinfo) if len(self._sockets) == 0: self._logger.critical( _('No sockets activated. Use info log level to see the reason') ) def server_close(self): """ Override SocketServer.TCPServer.server_close to enable multiple sockets close """ for socketinfo in self._sockets: socket_, addrinfo = socketinfo self._logger.info(_("Close on: {0}").format(addrinfo)) socket_.close() def fileno(self): """ Override SocketServer.TCPServer.fileno. """ self._logger.critical(_('Not supported: fileno')) return self._sockets[0][0].fileno() # NOTE: client_address is a tuple def handle_error(self, request, client_address): """ Override SocketServer.handle_error. """ self._logger.error( "Exception in processing request from: {0}\n{1}".format( client_address, util.get_stack_trace())) def get_request(self): """ Override TCPServer.get_request to lib OpenSSL.SSL.Connection object with _StandaloneSSLConnection to provide makefile method. We cannot substitute OpenSSL.SSL.Connection.makefile since it's readonly attribute """ accepted_socket, client_address = self.socket.accept() server_options = self.websocket_server_options if server_options.use_tls: if server_options.tls_module == _TLS_BY_STANDARD_MODULE: try: accepted_socket.do_handshake() except ssl.SSLError as e: self._logger.debug("{0}".format(e.message)) raise # Print cipher in use. Handshake is done on accept. self._logger.debug("Cipher: {0}".format( accepted_socket.cipher())) self._logger.debug("Client cert: {0}".format( accepted_socket.getpeercert())) elif server_options.tls_module == _TLS_BY_PYOPENSSL: # We cannot print(the cipher in use. pyOpenSSL does not provide) # any method to fetch that. ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD) ctx.use_privatekey_file(server_options.private_key) ctx.use_certificate_file(server_options.certificate) def default_callback(conn, cert, errnum, errdepth, ok): return ok == 1 # See the OpenSSL document for SSL_CTX_set_verify. if server_options.tls_client_auth: verify_mode = OpenSSL.SSL.VERIFY_PEER if not server_options.tls_client_cert_optional: verify_mode |= OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT ctx.set_verify(verify_mode, default_callback) ctx.load_verify_locations(server_options.tls_client_ca, None) else: ctx.set_verify(OpenSSL.SSL.VERIFY_NONE, default_callback) accepted_socket = OpenSSL.SSL.Connection(ctx, accepted_socket) accepted_socket.set_accept_state() # Convert SSL related error into socket.error so that # SocketServer ignores them and keeps running. # # TODO(tyoshino): Convert all kinds of errors. try: accepted_socket.do_handshake() except OpenSSL.SSL.Error as e: # Set errno part to 1 (SSL_ERROR_SSL) like the ssl module # does. self._logger.debug('{0!r}'.format(e)) raise socket.error(1, repr(e)) cert = accepted_socket.get_peer_certificate() self._logger.debug("Client cert subject: {0}".format( cert.get_subject().get_components())) accepted_socket = _StandaloneSSLConnection(accepted_socket) else: raise ValueError('No TLS support module is available') return accepted_socket, client_address def serve_forever(self, poll_interval=0.5): """ Override SocketServer.BaseServer.serve_forever. """ self.__ws_serving = True self.__ws_is_shut_down.clear() handle_request = self.handle_request if hasattr(self, '_handle_request_noblock'): handle_request = self._handle_request_noblock else: self._logger.warning(_('Fallback to blocking request handler')) try: while self.__ws_serving: r, w, e = select.select( [socket_[0] for socket_ in self._sockets], [], [], poll_interval) for socket_ in r: self.socket = socket_ handle_request() self.socket = None finally: self.__ws_is_shut_down.set() def shutdown(self): """ Override SocketServer.BaseServer.shutdown. """ self.__ws_serving = False self.__ws_is_shut_down.wait()
def shutdown(self): self.running.clear() self.closing = Event() self.jobs.put("quit") self.closing.wait(1)
class Core(object): DEFAULT_CONFIGNAME = 'config.ini' DEFAULT_LANGUAGE = 'english' DEFAULT_USERNAME = '******' DEFAULT_PASSWORD = '******' DEFAULT_STORAGENAME = 'downloads' @property def version(self): return __version__ @property def version_info(self): return __version_info__ @property def running(self): return self.__running.is_set() def __init__(self, cfgdir, tmpdir, debug=None, restore=False): self.__running = Event() self.__do_restart = False self.__do_exit = False self._ = lambda x: x self.cfgdir = fullpath(cfgdir) self.tmpdir = fullpath(tmpdir) os.chdir(self.cfgdir) # if self.tmpdir not in sys.path: # sys.path.append(self.tmpdir) # if refresh: # cleanpy(PACKDIR) self.config = ConfigParser(self.DEFAULT_CONFIGNAME) self.debug = self.config.get( 'log', 'debug') if debug is None else debug self.log = LoggerFactory(self, self.debug) self._init_database(restore) self._init_managers() self.request = self.req = RequestFactory(self) self._init_api() atexit.register(self.exit) def _init_api(self): from pyload.api import Api self.api = Api(self) def _init_database(self, restore): from pyload.core.database import DatabaseBackend from pyload.core.datatype import Permission, Role # TODO: Move inside DatabaseBackend newdb = not os.path.isfile(DatabaseBackend.DB_FILE) self.db = DatabaseBackend(self) self.db.setup() if restore or newdb: self.db.add_user( self.DEFAULT_USERNAME, self.DEFAULT_PASSWORD, Role.Admin, Permission.All) if restore: self.log.warning( self._('Restored default login credentials `admin|pyload`')) def _init_managers(self): from pyload.core.manager import ( AccountManager, AddonManager, EventManager, ExchangeManager, FileManager, InfoManager, PluginManager, TransferManager) self.scheduler = sched.scheduler(time.time, time.sleep) self.filemanager = self.files = FileManager(self) self.pluginmanager = self.pgm = PluginManager(self) self.exchangemanager = self.exm = ExchangeManager(self) self.eventmanager = self.evm = EventManager(self) self.accountmanager = self.acm = AccountManager(self) self.infomanager = self.iom = InfoManager(self) self.transfermanager = self.tsm = TransferManager(self) # TODO: Remove builtins.ADDONMANAGER builtins.ADDONMANAGER = self.addonmanager = self.adm = AddonManager( self) # self.remotemanager = self.rem = RemoteManager(self) # self.servermanager = self.svm = ServerManager(self) self.db.manager = self.files # ugly? def _setup_permissions(self): self.log.debug('Setup permissions...') if os.name == 'nt': return change_group = self.config.get('permission', 'change_group') change_user = self.config.get('permission', 'change_user') if change_group: try: group = self.config.get('permission', 'group') set_process_group(group) except Exception as exc: self.log.error(self._('Unable to change gid')) self.log.error(exc, exc_info=self.debug) if change_user: try: user = self.config.get('permission', 'user') set_process_user(user) except Exception as exc: self.log.error(self._('Unable to change uid')) self.log.error(exc, exc_info=self.debug) def set_language(self, lang): domain = 'core' localedir = resource_filename(__package__, 'locale') languages = (locale.locale_alias[lang.lower()].split('_', 1)[0],) self._set_language(domain, localedir, languages) def _set_language(self, *args, **kwargs): trans = gettext.translation(*args, **kwargs) try: self._ = trans.ugettext except AttributeError: self._ = trans.gettext def _setup_language(self): self.log.debug('Setup language...') lang = self.config.get('general', 'language') if not lang: lc = locale.getlocale()[0] or locale.getdefaultlocale()[0] lang = lc.split('_', 1)[0] if lc else 'en' try: self.set_language(lang) except IOError as exc: self.log.error(exc, exc_info=self.debug) self._set_language('core', fallback=True) # def _setup_niceness(self): # niceness = self.config.get('general', 'niceness') # renice(niceness=niceness) # ioniceness = int(self.config.get('general', 'ioniceness')) # ionice(niceness=ioniceness) def _setup_storage(self): self.log.debug('Setup storage...') storage_folder = self.config.get('general', 'storage_folder') if storage_folder is None: storage_folder = os.path.join( builtins.USERDIR, self.DEFAULT_STORAGENAME) self.log.info(self._('Storage: {0}'.format(storage_folder))) makedirs(storage_folder, exist_ok=True) avail_space = format.size(availspace(storage_folder)) self.log.info( self._('Available storage space: {0}').format(avail_space)) def _setup_network(self): self.log.debug('Setup network...') # TODO: Move to accountmanager self.log.info(self._('Activating accounts...')) self.acm.load_accounts() # self.scheduler.enter(0, 0, self.acm.load_accounts) self.adm.activate_addons() def run(self): self.log.info('Welcome to pyLoad v{0}'.format(self.version)) if self.debug: self.log.warning('*** DEBUG MODE ***') try: self.log.debug('Starting pyLoad...') self.evm.fire('pyload:starting') self.__running.set() self._setup_language() self._setup_permissions() self.log.info(self._('Config directory: {0}').format(self.cfgdir)) self.log.info(self._('Cache directory: {0}').format(self.tmpdir)) self._setup_storage() self._setup_network() # self._setup_niceness() # # some memory stats # from guppy import hpy # hp=hpy() # print(hp.heap()) # import objgraph # objgraph.show_most_common_types(limit=30) # import memdebug # memdebug.start(8002) # from meliae import scanner # scanner.dump_all_objects(os.path.join(PACKDIR, 'objs.json')) self.log.debug('pyLoad is up and running') self.evm.fire('pyload:started') self.tsm.pause = False # NOTE: Recheck... while True: self.__running.wait() self.tsm.work() self.iom.work() self.exm.work() if self.__do_restart: raise Restart if self.__do_exit: raise Exit self.scheduler.run() time.sleep(1) except Restart: self.restart() except (Exit, KeyboardInterrupt, SystemExit): self.exit() except Exception as exc: self.log.critical(exc, exc_info=True) self.exit() def _remove_loggers(self): for handler in self.log.handlers: with closing(handler) as hdlr: self.log.removeHandler(hdlr) def restart(self): self.stop() self.log.info(self._('Restarting pyLoad...')) self.evm.fire('pyload:restarting') self.run() def exit(self): self.stop() self.log.info(self._('Exiting pyLoad...')) self.tsm.exit() self.db.exit() # NOTE: Why here? self._remove_loggers() # if cleanup: # self.log.info(self._("Deleting temp files...")) # remove(self.tmpdir, ignore_errors=True) def stop(self): try: self.log.debug('Stopping pyLoad...') self.evm.fire('pyload:stopping') self.adm.deactivate_addons() self.api.stop_all_downloads() finally: self.files.sync_save() self.__running.clear() self.evm.fire('pyload:stopped')