def run_matrix_bot(options): if options.verbose: from laniakea.logging import set_verbose set_verbose(True) bot_pub = MatrixPublisher() bot_pub.run()
def run_matrix_bot(options): if options.verbose: from laniakea.logging import set_verbose set_verbose(True) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) bot_pub = MatrixPublisher() try: loop.run_until_complete(bot_pub.run()) finally: bot_pub.stop() loop.run_until_complete(loop.shutdown_asyncgens()) loop.close()
def check_verbose(options): if options.verbose: from laniakea.logging import set_verbose set_verbose(True)
def check_verbose(options): if options.verbose: log.basicConfig(level=log.DEBUG, format="[%(levelname)s] %(message)s") from laniakea.logging import set_verbose set_verbose(True)
def run_server(options): import systemd.daemon from laniakea.localconfig import LocalConfig if options.config_fname: LocalConfig(options.config_fname) if options.verbose: from laniakea.logging import set_verbose set_verbose(True) lconf = LocalConfig() # TODO: Disable server features requiring the database if Lighthouse is # configured as relay, making it only forward requests to other instances. # event stream plumbing pub_queue = None publish_endpoints = lconf.lighthouse.endpoints_publish if publish_endpoints: log.info('Creating event stream publisher.') pub_queue = Queue() spub = Process(target=run_events_publisher_server, args=(publish_endpoints, pub_queue), name='EventsPublisher', daemon=True) spub.start() server_processes.append(spub) # spawn processes that handle event stream submissions log.info('Creating event stream receivers ({}).'.format( len(lconf.lighthouse.endpoints_submit))) for i, submit_endpoint in enumerate(lconf.lighthouse.endpoints_submit): p = Process(target=run_events_receiver_server, args=(submit_endpoint, pub_queue), name='EventsServer-{}'.format(i), daemon=True) p.start() server_processes.append(p) # spawn processes to serve job requests log.info('Creating job handlers.') for i, jobs_endpoint in enumerate(lconf.lighthouse.endpoints_jobs): p = Process(target=run_jobs_server, args=(jobs_endpoint, pub_queue), name='JobsServer-{}'.format(i), daemon=True) p.start() server_processes.append(p) # set up termination signal handler signal.signal(signal.SIGQUIT, term_signal_handler) signal.signal(signal.SIGTERM, term_signal_handler) signal.signal(signal.SIGINT, term_signal_handler) # signal readiness log.info('Ready.') systemd.daemon.notify('READY=1') # wait for processes to terminate (possibly forever) while True: for p in server_processes: p.join(20) if not p.is_alive(): log.info('Server worker process has died, shutting down.') # one of our workers must have failed, shut down for pr in server_processes: pr.terminate() pr.join(10) pr.kill() sys.exit(p.exitcode)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. import os import sys import pytest from laniakea import LocalConfig from laniakea.logging import set_verbose from laniakea.utils import random_string from laniakea.db import LkModule # unconditionally enable verbose mode set_verbose(True) @pytest.fixture(scope='session') def samplesdir(): ''' Fixture responsible for returning the location of static test data the test may use. ''' from . import source_root samples_dir = os.path.join(source_root, 'tests', 'test_data') if not os.path.isdir(samples_dir): raise Exception( 'Unable to find test samples directory in {}'.format(samples_dir)) return samples_dir