def setUp(self): unittest.TestCase.setUp(self) self.proj_home = os.path.join(os.path.dirname(__file__), '../..') self._app = tasks.app self.app = app.ADSMasterPipelineCelery('test', local_config=\ { 'SQLALCHEMY_URL': 'sqlite:///', 'SQLALCHEMY_ECHO': False, 'SOLR_URLS': ['http://foo.bar.com/solr/v1'] }) tasks.app = self.app # monkey-patch the app object Base.metadata.bind = self.app._session.get_bind() Base.metadata.create_all()
def setUp(self): unittest.TestCase.setUp(self) proj_home = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) self.app = app.ADSMasterPipelineCelery('test', local_config=\ { 'SQLALCHEMY_URL': 'sqlite:///', 'METRICS_SQLALCHEMY_URL': 'sqlite:///', 'SQLALCHEMY_ECHO': False, 'PROJ_HOME' : proj_home, 'TEST_DIR' : os.path.join(proj_home, 'adsmp/tests'), }) Base.metadata.bind = self.app._session.get_bind() Base.metadata.create_all()
def setUp(self): unittest.TestCase.setUp(self) self.proj_home = os.path.join(os.path.dirname(__file__), '../..') self._app = tasks.app self.app = app.ADSMasterPipelineCelery('test', local_config=\ { 'SQLALCHEMY_URL': 'sqlite:///', 'SQLALCHEMY_ECHO': False, 'SOLR_URLS': ['http://foo.bar.com/solr/v1'], 'METRICS_SQLALCHEMY_URL': None, 'LINKS_RESOLVER_UPDATE_URL': 'http://localhost:8080/update', 'ADS_API_TOKEN': 'api_token' }) tasks.app = self.app # monkey-patch the app object Base.metadata.bind = self.app._session.get_bind() Base.metadata.create_all()
def setUp(self): unittest.TestCase.setUp(self) config = load_config() proj_home = os.path.abspath( os.path.join(os.path.dirname(__file__), '../..')) self.app = app.ADSMasterPipelineCelery('test', local_config=\ { 'SQLALCHEMY_URL': 'sqlite:///', 'METRICS_SQLALCHEMY_URL': 'postgresql://[email protected]:15678/test', 'SQLALCHEMY_ECHO': True, 'PROJ_HOME' : proj_home, 'TEST_DIR' : os.path.join(proj_home, 'adsmp/tests'), }) Base.metadata.bind = self.app._session.get_bind() Base.metadata.create_all() MetricsBase.metadata.bind = self.app._metrics_engine MetricsBase.metadata.create_all()
from __future__ import absolute_import, unicode_literals from past.builtins import basestring import os import adsputils from adsmp import app as app_module from adsmp import solr_updater from kombu import Queue from adsmsg.msg import Msg # ============================= INITIALIZATION ==================================== # proj_home = os.path.realpath(os.path.join(os.path.dirname(__file__), '../')) app = app_module.ADSMasterPipelineCelery('master-pipeline', proj_home=proj_home, local_config=globals().get('local_config', {})) logger = app.logger app.conf.CELERY_QUEUES = ( Queue('update-record', app.exchange, routing_key='update-record'), Queue('index-records', app.exchange, routing_key='index-records'), Queue('rebuild-index', app.exchange, routing_key='rebuild-index'), Queue('delete-records', app.exchange, routing_key='delete-records'), ) # ============================= TASKS ============================================= # @app.task(queue='update-record') def task_update_record(msg): """Receives payload to update the record. @param msg: protobuff that contains at minimum
from __future__ import absolute_import, unicode_literals import adsputils from adsmp import app as app_module from adsmp import solr_updater from kombu import Queue import math from adsmsg import MetricsRecord, NonBibRecord # ============================= INITIALIZATION ==================================== # app = app_module.ADSMasterPipelineCelery('master-pipeline') logger = app.logger app.conf.CELERY_QUEUES = ( Queue('update-record', app.exchange, routing_key='update-record'), Queue('index-records', app.exchange, routing_key='route-record'), Queue('delete-records', app.exchange, routing_key='delete-records'), ) # ============================= TASKS ============================================= # @app.task(queue='update-record') def task_update_record(msg): """Receives payload to update the record. @param msg: protobuff that contains at minimum - bibcode - and specific payload """ logger.debug('Updating record: %s', msg)