def setUp(self): # Ensure that we are on the right DB before flushing current_db_id = self.connection.connection_pool.connection_kwargs['db'] assert current_db_id != DEFAULT_CONNECTION_SETTINGS['db'] assert current_db_id == TEST_CONNECTION_SETTINGS['db'] self.connection.flushdb() # make jobs models use the test database BaseJobsModel.use_database(self.database)
Queue as LimpydQueue, Error as LimpydError, ) from limpyd_jobs.utils import compute_delayed_until from limpyd_jobs.workers import Worker as LimpydWorker, logger from core import get_main_limpyd_database from core.ghpool import ApiError from core.models import GithubUser from . import JobRegistry logger.addHandler(settings.WORKERS_LOGGER_CONFIG['handler']) BaseJobsModel.use_database(get_main_limpyd_database()) NAMESPACE = 'gim' class Queue(LimpydQueue): namespace = NAMESPACE class Error(LimpydError): namespace = NAMESPACE # store the result of the githubapi request and response in case of error # they are set by json.dumps gh_request = fields.InstanceHashField() gh_response = fields.InstanceHashField()
from gim.core.ghpool import ApiError from gim.core.models import GithubUser from . import JobRegistry logger.addHandler(settings.WORKERS_LOGGER_CONFIG["handler"]) def get_jobs_limpyd_database(): thread_data = local() if not hasattr(thread_data, "main_limpyd_database"): thread_data.main_limpyd_database = PipelineDatabase(**settings.WORKERS_REDIS_CONFIG) return thread_data.main_limpyd_database BaseJobsModel.use_database(get_jobs_limpyd_database()) NAMESPACE = "gim" class Queue(LimpydQueue): namespace = NAMESPACE class Error(LimpydError): namespace = NAMESPACE # store the result of the githubapi request and response in case of error # they are set by json.dumps gh_request = fields.InstanceHashField() gh_response = fields.InstanceHashField()