class WhenTestingVersionResource(unittest.TestCase): def setUp(self): self.req = MagicMock() self.resp = MagicMock() self.resource = VersionResource() def test_should_return_200_on_get(self): self.resource.on_get(self.req, self.resp) self.assertEqual(falcon.HTTP_200, self.resp.status) def test_should_return_version_json(self): self.resource.on_get(self.req, self.resp) parsed_body = jsonutils.loads(self.resp.body) self.assertTrue("v1" in parsed_body) self.assertEqual("current", parsed_body["v1"])
def start_up(): try: config.init_config() except config.cfg.ConfigFilesNotFoundError as ex: _LOG.exception(ex.message) conf = config.get_config() application = api = falcon.API() api.add_route('/', VersionResource()) #http correlation endpoint api.add_route('/v1/tenant/{tenant_id}/publish', PublishMessageResource()) #syslog correlation endpoint server = SyslogServer(("0.0.0.0", 5140), syslog.MessageHandler(conf)) server.start() syslog_server_proc = Process(target=start_io) syslog_server_proc.start() _LOG.info('Syslog server started as process: {}'.format( syslog_server_proc.pid)) celery.conf.CELERYBEAT_SCHEDULE = { 'worker_stats': { 'task': 'stats.publish', 'schedule': timedelta(seconds=publish_stats.WORKER_STATUS_INTERVAL) }, } #include blank argument to celery in order for beat to start correctly celery_proc = Process(target=celery.worker_main, args=[['', '--beat']]) celery_proc.start() _LOG.info('Celery started as process: {}'.format(celery_proc.pid)) return application
def start_up(): """ This persona hosts resources from a few different APIs in order to facilitate the bootstrap buildout of a brand new meniscus grid. This persona effectively allows the new worker to pair with itself. """ #Datastore adapter/session manager datastore = datasource_handler(COORDINATOR_DB) # Resources versions = VersionResource() configuration = PairingConfigurationResource() worker_registration = WorkerRegistrationResource(datastore) worker_status = WorkerStatusResource(datastore) # Routing application = api = falcon.API() # Pairing Routing api.add_route('/', versions) api.add_route('/v1/pairing/configure', configuration) # Worker Registration Routing api.add_route('/v1/pairing', worker_registration) api.add_route('/v1/worker/{worker_id}/status', worker_status) return application
class WhenTestingVersionResource(unittest.TestCase): def setUp(self): self.req = MagicMock() self.resp = MagicMock() self.resource = VersionResource() def test_should_return_200_on_get(self): self.resource.on_get(self.req, self.resp) self.assertEqual(falcon.HTTP_200, self.resp.status) def test_should_return_version_json(self): self.resource.on_get(self.req, self.resp) parsed_body = jsonutils.loads(self.resp.body) self.assertTrue('v1' in parsed_body) self.assertEqual('current', parsed_body['v1'])
def start_up(): # Resources versions = VersionResource() configuration = PairingConfigurationResource() # Routing application = api = falcon.API() api.add_route('/', versions) api.add_route('/v1/pairing/configure', configuration) return application
def start_up(): try: config.init_config() except config.cfg.ConfigFilesNotFoundError as ex: _LOG.exception(ex.message) application = api = falcon.API() api.add_route('/', VersionResource()) #http correlation endpoint api.add_route('/v1/tenant/{tenant_id}/publish', PublishMessageResource()) #syslog correlation endpoint server = receiver.new_correlation_input_server() server_proc = Process(target=server.start) server_proc.start() _LOG.info( 'ZeroMQ reception server started as process: {}'.format( server_proc.pid) ) celery.conf.CELERYBEAT_SCHEDULE = { 'worker_stats': { 'task': 'stats.publish', 'schedule': timedelta(seconds=publish_stats.WORKER_STATUS_INTERVAL) }, } #include blank argument to celery in order for beat to start correctly celery_proc = Process(target=celery.worker_main, args=[['', '--beat']]) celery_proc.start() _LOG.info( 'Celery started as process: {}'.format(celery_proc.pid) ) es_flusher = ElasticSearchStreamBulker() flush_proc = Process(target=es_flusher.start) flush_proc.start() _LOG.info( 'ElasticSearchStreamBulker started as process: {}'.format( flush_proc.pid) ) return application
def start_up(): application = api = falcon.API() api.add_route('/', VersionResource()) celery.conf.CELERYBEAT_SCHEDULE = { 'hdfs': { 'task': 'hdfs.send', 'schedule': timedelta(seconds=hdfs.FREQUENCY) }, 'worker_stats': { 'task': 'stats.publish', 'schedule': timedelta(seconds=publish_stats.WORKER_STATUS_INTERVAL) }, } #include blank argument to celery in order for beat to start correctly Process(target=celery.worker_main, args=[['', '--beat']]).start() return application
def start_up(): #Common Resource(s) versions = VersionResource() #Datastore adapter/session manager datastore = datasource_handler(COORDINATOR_DB) #Coordinator Resources worker_registration = WorkerRegistrationResource(datastore) workers_status = WorkersStatusResource(datastore) worker_status = WorkerStatusResource(datastore) #Tenant Resources tenant = TenantResource(datastore) user = UserResource(datastore) event_producers = EventProducersResource(datastore) event_producer = EventProducerResource(datastore) token = TokenResource(datastore) # Create API application = api = falcon.API() # Common Routing api.add_route('/', versions) # Coordinator Routing api.add_route('/v1/pairing', worker_registration) api.add_route('/v1/worker/{worker_id}/status', worker_status) api.add_route('/v1/status', workers_status) # Tenant Routing api.add_route('/v1/tenant', tenant) api.add_route('/v1/tenant/{tenant_id}', user) api.add_route('/v1/tenant/{tenant_id}/producers', event_producers) api.add_route('/v1/tenant/{tenant_id}/producers/{event_producer_id}', event_producer) api.add_route('/v1/tenant/{tenant_id}/token', token) return application
def start_up(): #Common Resource(s) versions = VersionResource() #Datastore adapter/session manager datastore = datasource_handler(COORDINATOR_DB) #Tenant Resources tenant = TenantResource(datastore) user = UserResource(datastore) event_producers = EventProducersResource(datastore) event_producer = EventProducerResource(datastore) token = TokenResource(datastore) # Create API application = api = falcon.API() # Version Routing api.add_route('/', versions) # Tenant Routing api.add_route('/v1/tenant', tenant) api.add_route('/v1/tenant/{tenant_id}', user) api.add_route('/v1/tenant/{tenant_id}/producers', event_producers) api.add_route('/v1/tenant/{tenant_id}/producers/{event_producer_id}', event_producer) api.add_route('/v1/tenant/{tenant_id}/token', token) celery.conf.CELERYBEAT_SCHEDULE = { 'worker_stats': { 'task': 'stats.publish', 'schedule': timedelta(seconds=publish_stats.WORKER_STATUS_INTERVAL) }, } #include blank argument to celery in order for beat to start correctly celery_proc = Process(target=celery.worker_main, args=[['', '--beat']]) celery_proc.start() _LOG.info('Celery started as process: {}'.format(celery_proc.pid)) return application
def start_up(): #Common Resource(s) versions = VersionResource() #Coordinator Resources workers_status = WorkersStatusResource() worker_status = WorkerStatusResource() #Tenant Resources tenant = TenantResource() user = UserResource() event_producers = EventProducersResource() event_producer = EventProducerResource() token = TokenResource() # Create API application = api = falcon.API() # Common Routing api.add_route('/', versions) api.add_route('/v1/worker/{hostname}/status', worker_status) api.add_route('/v1/status', workers_status) # Tenant Routing api.add_route('/v1/tenant', tenant) api.add_route('/v1/tenant/{tenant_id}', user) api.add_route('/v1/tenant/{tenant_id}/producers', event_producers) api.add_route('/v1/tenant/{tenant_id}/producers/{event_producer_id}', event_producer) api.add_route('/v1/tenant/{tenant_id}/token', token) celery_proc = Process(target=celery.worker_main) celery_proc.start() _LOG.info('Celery started as process: {}'.format(celery_proc.pid)) return application
def setUp(self): self.req = MagicMock() self.resp = MagicMock() self.resource = VersionResource()
from meniscus.api.tenant.resources import EventProducerResource from meniscus.api.tenant.resources import EventProducersResource from meniscus.api.tenant.resources import UserResource from meniscus.api.tenant.resources import TenantResource from meniscus.api.tenant.resources import TokenResource from meniscus.api.version.resources import VersionResource from meniscus.data.datastore import COORDINATOR_DB, datasource_handler from meniscus import env from meniscus.openstack.common import log log.setup('meniscus') _LOG = env.get_logger(__name__) #Common Resource(s) versions = VersionResource() #Coordinator Resources db_handler = datasource_handler(COORDINATOR_DB) worker_registration = WorkerRegistrationResource(db_handler) workers_status = WorkersStatusResource(db_handler) worker_status = WorkerStatusResource(db_handler) #Tenant Resources tenant = TenantResource(db_handler) user = UserResource(db_handler) event_producers = EventProducersResource(db_handler) event_producer = EventProducerResource(db_handler) token = TokenResource(db_handler) # Create API