Exemplo n.º 1
0
    def _start_tasks(self):
        tasks = [
            Task('_xmlrpc', start_xmlrpc_server, self._xmlrpc_conn2),
            # This is not working nice when using NTK lib (maybe related to the multiprocess lib).
            # Must be executed as a separate process for now.
            #Task('_flask', start_flask_server),
            Task('_updater', start_plugins_update_scheduler,
                 self._updater_event, self._doing_backup),
            Task('_backup', start_backup_scheduler, self._doing_backup),
        ]
        # TODO: Make those work on windows
        if not _is_windows:
            tasks.extend([
                Task('_htsql', start_htsql, self._htsql_port),
                Task('_server', start_server),
                Task('_rtc', start_rtc),
            ])

        manager = get_plugin_manager()
        for plugin_name in manager.installed_plugins_names:
            plugin = manager.get_plugin(plugin_name)
            if not hasattr(plugin, 'get_server_tasks'):
                continue

            # FIXME: Check that the plugin implements IPluginTask when
            # we Stoq 1.11 is released
            for plugin_task in plugin.get_server_tasks():
                task_name = plugin_task.name
                name = _get_plugin_task_name(plugin_name, task_name)
                if self._manager.is_running(name):
                    continue

                kwargs = {}
                if plugin_task.handle_actions:
                    conn1, conn2 = multiprocessing.Pipe(True)
                    self._plugins_pipes[name] = conn1
                    kwargs['pipe_connection'] = conn2

                # Since Windows has no os.fork, multiprocessing will actually
                # run the process again and pass the required objects by
                # pickling them. For some reason, passing a plugin task will
                # break some places, since the it will make some objects
                # like PluginManager be pickled/unpicled, and when unlicking
                # it will run its contructor again, but it should wait
                # to do that until we have configured the database.
                func = (plugin_name, task_name)

                tasks.append(Task(name, func, **kwargs))

        for task in tasks:
            if not self._manager.is_running(task.name):
                self._manager.run_task(task)

        # Close the default store because it not functioning anymore since the
        # forked processes closed its "clone", but open a new one later
        # or else Stoq will not be able to find this instance
        set_default_store(None)
        get_default_store()
Exemplo n.º 2
0
    def action_pause_tasks(self):
        logger.info("Pausing the tasks as requested...")

        if not self._paused:
            self._stop_tasks()
            # None will make the default store be closed
            set_default_store(None)
            self._paused = True

        return True, "Tasks paused successfully"
Exemplo n.º 3
0
def restore_database(user_hash, time=None):
    assert user_hash

    # If the database doesn't exist, get_default_store will fail
    try:
        default_store = get_default_store()
    except Exception:
        default_store = None

    if default_store is not None and db_settings.has_database():
        try:
            default_store.lock_database()
        except DatabaseError:
            raise TaskException(
                "Could not lock database. This means that there are other "
                "clients connected. Make sure to close every Stoq client "
                "before updating the database")
        except Exception:
            raise TaskException(
                "Database is empty or in a corrupted state. Fix or drop it "
                "before trying to proceed with the restore")
        else:
            default_store.unlock_database()

        # FIXME: Windows will not liberate resource for other process to
        # write to the file. We should write our own TemporaryFile on Stoq
        # that handles all those cases for us and use here
        with tempfile.NamedTemporaryFile(delete=False) as f:
            pass
        try:
            if not db_settings.dump_database(f.name):
                raise TaskException("Failed to dump the database")
            backup_name = db_settings.restore_database(f.name)
            logger.info("Created a backup of the current database state on %s",
                        backup_name)
        finally:
            os.unlink(f.name)

    tmp_path = tempfile.mkdtemp()
    try:
        restore_path = os.path.join(tmp_path, 'stoq')
        logger.info("restoring database to %s", restore_path)
        backup.restore(restore_path, user_hash, time=time)

        # None will make the default store be closed, which we need
        # to sucessfully restore the database
        set_default_store(None)
        db_settings.clean_database(db_settings.dbname, force=True)
        db_settings.execute_sql(os.path.join(restore_path, 'stoq.dump'),
                                lock_database=True)

        logger.info("Backup restore finished sucessfully")
    finally:
        # get_default_store will recreate it (since we closed it above)
        get_default_store()
Exemplo n.º 4
0
def restore_database(user_hash, time=None):
    assert user_hash

    # If the database doesn't exist, get_default_store will fail
    try:
        default_store = get_default_store()
    except Exception:
        default_store = None

    if default_store is not None and db_settings.has_database():
        try:
            default_store.lock_database()
        except DatabaseError:
            raise TaskException(
                "Could not lock database. This means that there are other "
                "clients connected. Make sure to close every Stoq client "
                "before updating the database")
        except Exception:
            raise TaskException(
                "Database is empty or in a corrupted state. Fix or drop it "
                "before trying to proceed with the restore")
        else:
            default_store.unlock_database()

        with tempfile.NamedTemporaryFile() as f:
            if not db_settings.dump_database(f.name):
                raise TaskException("Failed to dump the database")
            backup_name = db_settings.restore_database(f.name)
            logger.info("Created a backup of the current database state on %s",
                        backup_name)

    tmp_path = tempfile.mkdtemp()
    try:
        # None will make the default store be closed, which we need
        # to sucessfully restore the database
        set_default_store(None)
        restore_path = os.path.join(tmp_path, 'stoq')

        backup.restore(restore_path, user_hash, time=time)

        db_settings.clean_database(db_settings.dbname, force=True)
        db_settings.execute_sql(os.path.join(restore_path, 'stoq.dump'),
                                lock_database=True)

        logger.info("Backup restore finished sucessfully")
    finally:
        # get_default_store will recreate it (since we closed it above)
        get_default_store()
        shutil.rmtree(tmp_path, ignore_errors=True)
Exemplo n.º 5
0
    def _start_tasks(self):
        tasks = [
            Task("_backup", start_backup_scheduler),
            Task("_server", start_server),
            Task("_rtc", start_rtc),
            Task("_xmlrpc", start_xmlrpc_server, self._xmlrpc_conn2),
            Task("_updater", start_plugins_update_scheduler, self._updater_event),
        ]

        manager = get_plugin_manager()
        for plugin_name in manager.installed_plugins_names:
            plugin = manager.get_plugin(plugin_name)
            if not hasattr(plugin, "get_server_tasks"):
                continue

            # FIXME: Check that the plugin implements IPluginTask when
            # we Stoq 1.11 is released
            for plugin_task in plugin.get_server_tasks():
                task_name = plugin_task.name
                name = _get_plugin_task_name(plugin_name, task_name)
                if self._manager.is_running(name):
                    continue

                kwargs = {}
                if plugin_task.handle_actions:
                    conn1, conn2 = multiprocessing.Pipe(True)
                    self._plugins_pipes[name] = conn1
                    kwargs["pipe_connection"] = conn2

                tasks.append(Task(name, plugin_task.start, **kwargs))

        for task in tasks:
            if not self._manager.is_running(task.name):
                self._manager.run_task(task)

        # Close the default store because it not functioning anymore since the
        # forked processes closed its "clone", but open a new one later
        # or else Stoq will not be able to find this instance
        set_default_store(None)
        get_default_store()
Exemplo n.º 6
0
        def init():
            server = ServerProxy()
            running = yield server.check_running()
            if running:
                yield server.call('pause_tasks')
            # ServerProxy may have opened a store
            set_default_store(None)

            try:
                initialize_system(password=unicode(options.password),
                                  force=options.force, empty=options.empty)
            except ValueError as e:
                # Database server is missing pg_trgm
                if 'pg_trgm' in str(e):
                    api.asyncReturn(31)
                else:
                    raise

            if options.create_examples or options.demo:
                from stoqlib.importers.stoqlibexamples import create
                create(utilities=True)

            if options.register_station and not options.empty:
                self._register_station()

            if options.plugins:
                self._enable_plugins(unicode(options.plugins).split(','))

            if options.demo:
                self._enable_demo()

            config.flush()

            # The schema was upgraded. If it was running before,
            # restart it so it can load the new code
            if running:
                yield server.call('restart')
Exemplo n.º 7
0
 def _after_fork(self):
     # Close the default store so any call to get_default_store
     # will create a new one directly in the forked process
     set_default_store(None)
Exemplo n.º 8
0
    def cmd_init(self, options):
        """Creates and initializes a database"""
        # Create a database user before trying to connect
        if options.create_dbuser:
            if not options.username:
                raise SystemExit(
                    "This option requires a --username set")
            retval = self._create_dbuser(options.username)
            if retval != 0:
                return retval
        config = self._read_config(options, register_station=False,
                                   check_schema=False,
                                   load_plugins=False)

        from stoqlib.database.admin import initialize_system
        from stoqlib.database.runtime import set_default_store
        from stoqlib.database.settings import db_settings
        from stoqlib.net.server import ServerProxy
        if options.dbname:
            db_settings.dbname = options.dbname
        if options.address:
            db_settings.address = options.address
        if options.port:
            db_settings.port = options.port
        if options.username:
            db_settings.username = options.username
        if options.password:
            db_settings.password = options.password

        server = ServerProxy()
        running = server.check_running()
        if running:
            server.call('pause_tasks')
        # ServerProxy may have opened a store
        set_default_store(None)

        try:
            initialize_system(password=unicode(options.password),
                              force=options.force, empty=options.empty)
        except ValueError as e:
            # Database server is missing pg_trgm
            if 'pg_trgm' in str(e):
                return 31
            else:
                raise

        if options.create_examples or options.demo:
            from stoqlib.importers.stoqlibexamples import create
            create(utilities=True)

        if options.register_station and not options.empty:
            self._register_station()

        if options.pre_plugins:
            self._register_plugins(unicode(options.pre_plugins).split(','))

        if options.plugins:
            self._enable_plugins(unicode(options.plugins).split(','))

        if options.demo:
            self._enable_demo()

        config.flush()

        # The schema was upgraded. If it was running before,
        # restart it so it can load the new code
        if running:
            server.call('restart')

        return 0
Exemplo n.º 9
0
 def _after_fork(self):
     # Close the default store so any call to get_default_store
     # will create a new one directly in the forked process
     set_default_store(None)
Exemplo n.º 10
0
    def cmd_init(self, options):
        """Creates and initializes a database"""
        # Create a database user before trying to connect
        if options.create_dbuser:
            if not options.username:
                raise SystemExit("This option requires a --username set")
            retval = self._create_dbuser(options.username)
            if retval != 0:
                return retval
        config = self._read_config(options,
                                   register_station=False,
                                   check_schema=False,
                                   load_plugins=False)

        from stoqlib.database.admin import initialize_system
        from stoqlib.database.runtime import set_default_store
        from stoqlib.database.settings import db_settings
        from stoqlib.lib.pgpass import write_pg_pass
        from stoqlib.net.server import ServerProxy
        if options.dbname:
            db_settings.dbname = options.dbname
        if options.address:
            db_settings.address = options.address
        if options.port:
            db_settings.port = options.port
        if options.username:
            db_settings.username = options.username
        if options.password:
            db_settings.password = options.password
            # a password was sent via command line. Make sure we can run psql by
            # setting up pgpass
            write_pg_pass(db_settings.dbname, db_settings.address,
                          db_settings.port, db_settings.username,
                          db_settings.password)

        server = ServerProxy()
        running = server.check_running()
        if running:
            server.call('pause_tasks')
        # ServerProxy may have opened a store
        set_default_store(None)

        try:
            initialize_system(password='',
                              force=options.force,
                              empty=options.empty)
        except ValueError as e:
            # Database server is missing pg_trgm
            if 'pg_trgm' in str(e):
                return 31
            else:
                raise

        if options.create_examples or options.demo:
            from stoqlib.importers.stoqlibexamples import create
            create(utilities=True)

        if options.register_station and not options.empty:
            self._register_station()

        if options.pre_plugins:
            self._register_plugins(str(options.pre_plugins).split(','))

        if options.plugins:
            self._enable_plugins(str(options.plugins).split(','))

        if options.demo:
            self._enable_demo()

        config.flush()

        # The schema was upgraded. If it was running before,
        # restart it so it can load the new code
        if running:
            server.call('restart')

        return 0
Exemplo n.º 11
0
    def _start_tasks(self):
        tasks = [
            Task('_xmlrpc', start_xmlrpc_server, self._xmlrpc_conn2),
            # This is not working nice when using NTK lib (maybe related to the multiprocess lib).
            # Must be executed as a separate process for now.
            #Task('_flask', start_flask_server),
            Task('_backup', start_backup_scheduler, self._doing_backup),
        ]
        # TODO: Make those work on windows
        if not _is_windows:
            tasks.extend([
                Task('_htsql', start_htsql, self._htsql_port),
                Task('_server', start_server),
            ])

        store = get_default_store()
        is_link = store.is_link_server()

        manager = get_plugin_manager()
        for plugin_name in manager.installed_plugins_names:
            plugin = manager.get_plugin(plugin_name)
            if not hasattr(plugin, 'get_server_tasks'):
                continue

            # FIXME: Check that the plugin implements IPluginTask when
            # we Stoq 1.11 is released
            for plugin_task in plugin.get_server_tasks():
                link_only = getattr(plugin_task, 'link_only', False)
                if is_link != link_only:
                    continue

                task_name = plugin_task.name
                name = _get_plugin_task_name(plugin_name, task_name)
                if self._manager.is_running(name):
                    continue

                kwargs = {}
                if plugin_task.handle_actions:
                    conn1, conn2 = multiprocessing.Pipe(True)
                    self._plugins_pipes[name] = conn1
                    kwargs['pipe_connection'] = conn2

                # Since Windows has no os.fork, multiprocessing will actually
                # run the process again and pass the required objects by
                # pickling them. For some reason, passing a plugin task will
                # break some places, since the it will make some objects
                # like PluginManager be pickled/unpicled, and when unlicking
                # it will run its contructor again, but it should wait
                # to do that until we have configured the database.
                func = (plugin_name, task_name)

                tasks.append(Task(name, func, **kwargs))

        for task in tasks:
            if not self._manager.is_running(task.name):
                self._manager.run_task(task)

        # Close the default store because it not functioning anymore since the
        # forked processes closed its "clone", but open a new one later
        # or else Stoq will not be able to find this instance
        set_default_store(None)
        get_default_store()