def start(container, starttype, *args, **kwargs): log.info('EPU Provisioner starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) proc = [{ 'name': 'provisioner', 'module': __name__, 'class': ProvisionerService.__name__, 'spawnargs': { 'query_period': conf.getValue('query_period'), 'store': get_provisioner_store(conf), 'site_drivers': get_site_drivers(conf.getValue('sites')), 'context_client': get_context_client(conf) } }] app_supv_desc = ProcessDesc(name='Provisioner app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs': proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Work Producer starting, startup type "%s"' % starttype) config_name = __name__ if os.environ.has_key("ION_CONFIGURATION_SECTION"): config_name = os.environ["ION_CONFIGURATION_SECTION"] conf = ioninit.config(config_name) spawnargs = { 'queue_name_work': conf['queue_name_work'], 'listen_port': conf['listen_port'], 'servicename': conf['servicename'] } proc = [{ 'name': 'epu_work_producer', 'module': __name__, 'class': EPUWorkProducer.__name__, 'spawnargs': spawnargs }] app_supv_desc = ProcessDesc(name='EPU Work Producer app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs': proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Controller List service starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) controller_list_path = conf.getValue('controller_list_path', None) proc = [ { 'name': 'epu_controller_list', 'module': __name__, 'class': EPUControllerListService.__name__, 'spawnargs': { 'controller_list_path': controller_list_path } }, ] app_supv_desc = ProcessDesc(name='EPU Controller List supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs': proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Work Producer starting, startup type "%s"' % starttype) config_name = __name__ if os.environ.has_key("ION_CONFIGURATION_SECTION"): config_name = os.environ["ION_CONFIGURATION_SECTION"] conf = ioninit.config(config_name) spawnargs = {'queue_name_work' : conf['queue_name_work'], 'listen_port' : conf['listen_port'], 'servicename': conf['servicename']} proc = [{'name': 'epu_work_producer', 'module': __name__, 'class': EPUWorkProducer.__name__, 'spawnargs': spawnargs }] app_supv_desc = ProcessDesc(name='EPU Work Producer app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs':proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Controller List service starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) controller_list_path = conf.getValue('controller_list_path', None) proc = [{'name': 'epu_controller_list', 'module': __name__, 'class': EPUControllerListService.__name__, 'spawnargs': { 'controller_list_path': controller_list_path } }, ] app_supv_desc = ProcessDesc(name='EPU Controller List supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs':proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU DTRS starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) dt_dir = conf.getValue('registry_dir', '/opt/dt-data/dt') # Required services. proc = [{'name': 'dtrs', 'module': __name__, 'class': DeployableTypeRegistryService.__name__, 'spawnargs': {'registry_dir' : dt_dir} }] app_supv_desc = ProcessDesc(name='DTRS app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs':proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Queuestat starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) proc = [ { "name": "queuestat", "module": __name__, "class": QueueStatService.__name__, "spawnargs": {"interval_seconds": conf.getValue("interval_seconds")}, } ] app_supv_desc = ProcessDesc( name="Queuestat app supervisor", module=app_supervisor.__name__, spawnargs={"spawn-procs": proc} ) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Worker starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) spawnargs = {'queue_name_work' : conf['queue_name_work']} # Required services. proc = [{'name': 'epu_worker', 'module': __name__, 'class': EPUWorkerService.__name__, 'spawnargs': spawnargs }] app_supv_desc = ProcessDesc(name='EPU Worker app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs':proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Provisioner starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) proc = [{'name': 'provisioner', 'module': __name__, 'class': ProvisionerService.__name__, 'spawnargs': { 'query_period' : conf.getValue('query_period'), 'store' : get_provisioner_store(conf), 'site_drivers' : get_site_drivers(conf.getValue('sites')), 'context_client' : get_context_client(conf)}}] app_supv_desc = ProcessDesc(name='Provisioner app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs':proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Controller starting, startup type "%s"' % starttype) config_name = __name__ if os.environ.has_key("ION_CONFIGURATION_SECTION"): config_name = os.environ["ION_CONFIGURATION_SECTION"] conf = ioninit.config(config_name) # Required configurations for app-based launch spawnargs = {'queue_name_work' : conf.getValue('queue_name_work'), 'servicename': conf['servicename'], 'engine_class' : conf.getValue('engine_class'), 'engine_conf' : conf.getValue('engine_conf')} use_cassandra = conf.getValue('cassandra', True) if use_cassandra: try: spawnargs['cassandra'] = cassandra.get_config() except cassandra.CassandraConfigurationError,e: log.error("Problem loading Cassandra config: %s", e) raise
def start(container, starttype, *args, **kwargs): log.info('EPU Worker starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) spawnargs = {'queue_name_work': conf['queue_name_work']} # Required services. proc = [{ 'name': 'epu_worker', 'module': __name__, 'class': EPUWorkerService.__name__, 'spawnargs': spawnargs }] app_supv_desc = ProcessDesc(name='EPU Worker app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs': proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Queuestat starting, startup type "%s"' % starttype) conf = ioninit.config(__name__) proc = [ { 'name': 'queuestat', 'module': __name__, 'class': QueueStatService.__name__, 'spawnargs': { 'interval_seconds': conf.getValue('interval_seconds') } }, ] app_supv_desc = ProcessDesc(name='Queuestat app supervisor', module=app_supervisor.__name__, spawnargs={'spawn-procs': proc}) supv_id = yield app_supv_desc.spawn() res = (supv_id.full, [app_supv_desc]) defer.returnValue(res)
def start(container, starttype, *args, **kwargs): log.info('EPU Controller starting, startup type "%s"' % starttype) config_name = __name__ if os.environ.has_key("ION_CONFIGURATION_SECTION"): config_name = os.environ["ION_CONFIGURATION_SECTION"] conf = ioninit.config(config_name) # Required configurations for app-based launch spawnargs = { 'queue_name_work': conf.getValue('queue_name_work'), 'servicename': conf['servicename'], 'engine_class': conf.getValue('engine_class'), 'engine_conf': conf.getValue('engine_conf') } use_cassandra = conf.getValue('cassandra', True) if use_cassandra: try: spawnargs['cassandra'] = cassandra.get_config() except cassandra.CassandraConfigurationError, e: log.error("Problem loading Cassandra config: %s", e) raise
#!/usr/bin/env python """ @file res/scripts/bootstrap-dx.py @author Paul Hubbard @brief main module for bootstrapping data exchange """ import logging from twisted.internet import defer from ion.core import ioninit from ion.core import bootstrap CONF = ioninit.config('startup.bootstrap-dx') # Static definition of message queues ion_messaging = ioninit.get_config('messaging_cfg', CONF) # Static definition of service names dx_services = ioninit.get_config('services_cfg', CONF) @defer.inlineCallbacks def start(): """ Main function of bootstrap. Starts DX system with static config """ logging.info("ION/DX bootstrapping now...") startsvcs = [] startsvcs.extend(dx_services)
def test_config_file(self): cfg = ioninit.config('ion.services.dm.util.url_manipulation') ldir = cfg.getValue('local_dir', None) self.failUnless(len(ldir) > 0) cname = cfg.getValue('cache_hostname', None) self.failUnless(len(cname) > 0)
def setUp(self): cfg = ioninit.config('ion.services.dm.util.url_manipulation') self.prefix = cfg.getValue('local_dir', None)
@test ion.services.sa.proxy Test of refactored proxy+fetcher+controller @see http://bytes.com/topic/python/answers/22918-proxy-authentication-using-urllib2 @brief Designed to be an integration test, exercises DX via users' http proxy. Does not use OOI messaging. """ from twisted.internet import defer from twisted.internet import threads from ion.test.iontest import IonTestCase from twisted.trial import unittest import logging logging = logging.getLogger(__name__) from ion.core import ioninit config = ioninit.config('ion.services.sa.proxy') PROXY_PORT = int(config.getValue('proxy_port', '8100')) import ion.util.procutils as pu from pydap.client import open_url from pydap.util import socks import pydap.lib import httplib2 import urllib2 TEST_URL = 'http://amoeba.ucsd.edu:8001/coads.nc' class PydapIntegrationTest(IonTestCase): """
def _init_config(): global CONF if CONF is None: CONF = ioninit.config(CONF_NAME)
import logging from twisted.internet import defer from ion.core import ioninit from ion.core import bootstrap CONF = ioninit.config('startup.bootstrap1') # Static definition of message queues ion_messaging = ioninit.get_config('messaging_cfg', CONF) # Static definition of service names ion_core_services = ioninit.get_config('coreservices_cfg', CONF) ion_services = ioninit.get_config('services_cfg', CONF) @defer.inlineCallbacks def main(): """Main function of bootstrap. Starts system with static config """ logging.info("ION SYSTEM bootstrapping now...") startsvcs = [] #startsvcs.extend(ion_core_services) startsvcs.extend(ion_services) yield bootstrap.bootstrap(ion_messaging, startsvcs) main()
@file ion/services/dm/util/url_manipulation.py @brief DM routines for manipulating URLs @author Paul Hubbard @date 6/4/10 """ import re import string import urlparse import logging logging = logging.getLogger(__name__) import os.path from ion.core import ioninit # Read our configuration from the ion.config file. config = ioninit.config(__name__) LOCAL_DIR = config.getValue('local_dir', '../../dap_server/data/') CACHE_HOSTNAME = config.getValue('cache_hostname', 'localhost') CACHE_PORTNUM = config.getValue('cache_port', '80') def rewrite_url(dsUrl, newHostname=CACHE_HOSTNAME): """ @brief Given a DAP URL, presume that we host it locally and rewrite it to reflect same. Changes hostname to localhost, removes any port, rewrites path to be just root. Used by the cache front end, to change canonical URLs into local-only URLs. @param dsUrl Original URL to rewrite @param newHostname Default is localhost, TCP name of server @retval String with rewritten URL. @todo add CACHE_PORTNUM """
import logging logging = logging.getLogger(__name__) from ion.core.base_process import ProtocolFactory from ion.services.dm.util.eoi_data_stream_producer import CoordinatorClient from ion.services.base_service import BaseService from twisted.internet import defer, protocol, reactor from twisted.protocols.basic import LineReceiver import base64 # Read configuration file to find TCP server port from ion.core import ioninit config = ioninit.config(__name__) PROXY_PORT = int(config.getValue('proxy_port', '8100')) class DAPProxyProtocol(LineReceiver): """ Super, super simple HTTP proxy. Goal: Take requests from DAP clients such as matlab and netcdf, convert them into OOI messages to the preservation service coordinator, and return whatever it gets from same as http. Initially written using the twisted proxy classes, but in the end what we need here is quite different from what the provide, so its much simpler to just implement it as a most-basic protocol. @see http://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol @see The DAP protocol spec at http://www.opendap.org/pdf/ESE-RFC-004v1.1.pdf
#!/usr/bin/env python import logging from twisted.internet import defer from ion.core import ioninit from ion.core import bootstrap CONF = ioninit.config('startup.web') import gviz_api from ion.services.dm.presentation import web_service # Static definition of message queues ion_messaging = ioninit.get_config('messaging_cfg', CONF) # Static definition of service names web_services = ioninit.get_config('services_cfg', CONF) page_template = """ <html> <script src="http://www.google.com/jsapi" type="text/javascript"></script> <script> google.load('visualization', '1', {packages:['table']}); google.setOnLoadCallback(drawTable); function drawTable() { %(jscode)s var jscode_table = new google.visualization.Table(document.getElementById('table_div_jscode')); jscode_table.draw(jscode_data, {showRowNumber: true});
@test ion.services.sa.proxy Test of refactored proxy+fetcher+controller @see http://bytes.com/topic/python/answers/22918-proxy-authentication-using-urllib2 @brief Designed to be an integration test, exercises DX via users' http proxy. Does not use OOI messaging. """ from twisted.internet import defer from twisted.internet import threads from ion.test.iontest import IonTestCase from twisted.trial import unittest import logging logging = logging.getLogger(__name__) from ion.core import ioninit config = ioninit.config('ion.services.sa.proxy') PROXY_PORT = int(config.getValue('proxy_port', '8100')) import ion.util.procutils as pu from pydap.client import open_url from pydap.util import socks import pydap.lib import httplib2 import urllib2 TEST_URL = 'http://amoeba.ucsd.edu:8001/coads.nc' class PydapIntegrationTest(IonTestCase): """ High-fidelity integration test - use pydap's full DAP client to exercise
import random import logging from twisted.internet import defer from ion.core.base_process import BaseProcess, ProcessDesc from ion.core import ioninit from ion.core import bootstrap from ion.services.dm.distribution import pubsub_service from ion.resources.dm_resource_descriptions import PubSubTopicResource, SubscriptionResource from ion.services.dm.distribution.consumers import example_consumer, forwarding_consumer, latest_consumer, logging_consumer CONF = ioninit.config('startup.pubsub') # Static definition of message queues ion_messaging = ioninit.get_config('messaging_cfg', CONF) # Static definition of service names dm_services = ioninit.get_config('services_cfg', CONF) # Static definition of service names #dm_services = Config(CONF.getValue('services_cfg')).getObject() #ion_messaging = Config(CONF.getValue('messaging_cfg')).getObject() @defer.inlineCallbacks def create_producers(proc, n=1):
import random import logging from twisted.internet import defer from ion.core.base_process import BaseProcess, ProcessDesc from ion.core import ioninit from ion.core import bootstrap from ion.services.dm.distribution import pubsub_service from ion.resources.dm_resource_descriptions import PubSubTopicResource, SubscriptionResource from ion.services.dm.distribution.consumers import example_consumer, forwarding_consumer, latest_consumer, logging_consumer CONF = ioninit.config('startup.pubsub') # Static definition of message queues ion_messaging = ioninit.get_config('messaging_cfg', CONF) # Static definition of service names dm_services = ioninit.get_config('services_cfg', CONF) # Static definition of service names #dm_services = Config(CONF.getValue('services_cfg')).getObject() #ion_messaging = Config(CONF.getValue('messaging_cfg')).getObject() @defer.inlineCallbacks def create_producers(proc,n=1):
#!/usr/bin/env python import logging from twisted.internet import defer from ion.core import ioninit from ion.core import bootstrap CONF = ioninit.config('startup.web') import gviz_api from ion.services.dm.presentation import web_service # Static definition of message queues ion_messaging = ioninit.get_config('messaging_cfg', CONF) # Static definition of service names web_services = ioninit.get_config('services_cfg', CONF) page_template = """ <html> <script src="http://www.google.com/jsapi" type="text/javascript"></script> <script> google.load('visualization', '1', {packages:['table']}); google.setOnLoadCallback(drawTable); function drawTable() { %(jscode)s
# Python Capability Container start script. # Starts empty container with system name set. import logging from twisted.internet import defer from ion.core import ioninit from ion.core import bootstrap from ion.util.config import Config # Use the bootstrap configuration entries from the standard bootstrap CONF = ioninit.config('ion.core.bootstrap') # Config files with lists of processes to start agent_procs = ioninit.get_config('ccagent_cfg', CONF) svc_procs = ioninit.get_config('services_cfg', CONF) @defer.inlineCallbacks def main(): """ Initializes container """ logging.info("ION CONTAINER initializing...") processes = [] # Disabling ccagent #processes.extend(agent_procs) processes.extend(svc_procs)
# Starts container with Java Services. import logging from twisted.internet import defer from ion.agents.instrumentagents.simulators.sim_SBE49 import Simulator from ion.agents.instrumentagents.instrument_agent import InstrumentAgentClient from ion.core import ioninit from ion.core import bootstrap from ion.util.config import Config from ion.resources.sa_resource_descriptions import InstrumentResource, DataProductResource from ion.services.sa.instrument_registry import InstrumentRegistryClient from ion.services.sa.data_product_registry import DataProductRegistryClient # Use the bootstrap configuration entries from the standard bootstrap CONF = ioninit.config("ion.core.bootstrap") # Config files with lists of processes to start agent_procs = ioninit.get_config("ccagent_cfg", CONF) demo_procs = [ {"name": "agent_registry", "module": "ion.services.coi.agent_registry", "class": "ResourceRegistryService"}, {"name": "instrument_registry", "module": "ion.services.sa.instrument_registry", "class": ""}, {"name": "data_product_registry", "module": "ion.services.sa.data_product_registry", "class": ""}, {"name": "instrument_management", "module": "ion.services.sa.instrument_management", "class": ""}, {"name": "service_registry", "module": "ion.services.coi.service_registry", "class": ""}, ] INSTRUMENT_ID = "123" @defer.inlineCallbacks
from twisted.internet import defer import inspect from ion.core.base_process import BaseProcess from ion.core.base_process import ProtocolFactory from ion.services.base_service import BaseService, BaseServiceClient from ion.data.datastore import registry from ion.data import dataobject from ion.resources import coi_resource_descriptions from ion.core import ioninit CONF = ioninit.config(__name__) class AgentRegistryService(registry.BaseRegistryService): """ Agent registry service interface @todo a agent is a resource and should also be living in the resource registry """ # Declaration of service declare = BaseService.service_declare(name='agent_registry', version='0.1.0', dependencies=[]) op_clear_registry = registry.BaseRegistryService.base_clear_registry op_register_agent_definition = registry.BaseRegistryService.base_register_resource """ Service operation: Register a agent definition with the registry.