Exemplo n.º 1
0
    def _start_event_dispatch(self):
        """
        Registers the event listener by using a URL that is composed from
        CFG.server.oms.host, CFG.server.oms.port, and CFG.server.oms.path.

        NOTE: the same listener URL will be registered by multiple RSN platform
        drivers. See other related notes in this file.

        @see https://jira.oceanobservatories.org/tasks/browse/OOIION-1287
        @see https://jira.oceanobservatories.org/tasks/browse/OOIION-968
        """

        # gateway host and port to compose URL:
        host = CFG.get_safe('server.oms.host', "localhost")
        port = CFG.get_safe('server.oms.port', "5000")
        path = CFG.get_safe('server.oms.path', "/ion-service/oms_event")

        self.listener_url = "http://%s:%s%s" % (host, port, path)
        self._register_event_listener(self.listener_url)

        # start OMSDeviceStatusEvent listener to notify the agent about those:
        def event_received(evt, *args, **kwargs):
            log.debug('%r: OmsEventListener received: %s', self._platform_id, evt)
            self._send_event(ExternalEventDriverEvent(evt))

        self._event_listener = self._create_event_subscriber(
            event_type   = 'OMSDeviceStatusEvent',
            origin       = self._platform_id,
            origin_type  = 'OMS Platform',
            callback     = event_received)

        log.debug("%r: started OMSDeviceStatusEvent listener", self._platform_id)

        return "OK"
def setting_up_smtp_client():
    """
    Sets up the smtp client
    """

    #------------------------------------------------------------------------------------
    # the default smtp server
    #------------------------------------------------------------------------------------
#    smtp_client = None
    smtp_host = CFG.get_safe('server.smtp.host')
    smtp_port = CFG.get_safe('server.smtp.port', 25)
#    smtp_sender = CFG.get_safe('server.smtp.sender')
#    smtp_password = CFG.get_safe('server.smtp.password')

    if CFG.get_safe('system.smtp',False): #Default is False - use the fake_smtp
        log.debug('Using the real SMTP library to send email notifications! host = %s', smtp_host)

#        smtp_client = smtplib.SMTP(smtp_host)
#        smtp_client.ehlo()
#        smtp_client.starttls()
#        smtp_client.login(smtp_sender, smtp_password)

        smtp_client = smtplib.SMTP(smtp_host, smtp_port)
        log.debug("In setting up smtp client using the smtp client: %s", smtp_client)
        log.debug("Message received after ehlo exchange: %s", str(smtp_client.ehlo()))
#        smtp_client.login(smtp_sender, smtp_password)
    else:
        log.debug('Using a fake SMTP library to simulate email notifications!')

        smtp_client = fake_smtplib.SMTP(smtp_host)

    return smtp_client
Exemplo n.º 3
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
Exemplo n.º 4
0
    def on_start(self):
        self.data_source_subscriber = EventSubscriber(
            event_type=OT.ResourceModifiedEvent,
            origin_type=RT.DataSource,
            callback=self._register_data_source)
        self.provider_subscriber = EventSubscriber(
            event_type=OT.ResourceModifiedEvent,
            origin_type=RT.ExternalDataProvider,
            callback=self._register_provider)
        self.data_source_subscriber.start()
        self.provider_subscriber.start()

        self.rr = self.container.resource_registry

        self.using_eoi_services = CFG.get_safe('eoi.meta.use_eoi_services',
                                               False)
        self.server = CFG.get_safe(
            'eoi.importer_service.server', "localhost") + ":" + str(
                CFG.get_safe('eoi.importer_service.port', 8844))

        log.info("Using geoservices=" + str(self.using_eoi_services))
        if not self.using_eoi_services:
            log.warn("not using geoservices...")

        self.importer_service_available = self.check_for_importer_service()
        if not self.importer_service_available:
            log.warn("not using importer service...")
Exemplo n.º 5
0
    def on_init(self):
        log.info("Ingestion starting")
        self.exchange_name = "ingestion_process"

        plugin_cls = CFG.get_safe(CONFIG_KEY + ".plugin")
        self.plugin = named_any(plugin_cls)(self)
        log.info("Started ingestion plugin '%s'", plugin_cls)

        self.persistence_formats = {}
        self.persistence_objects = {}
        self.default_persistence_format = CFG.get_safe(CONFIG_KEY + ".persist.persistence_format")
        self._require_persistence_layer(self.default_persistence_format)

        self.stream_sub = StreamSubscriber(process=self, exchange_name=self.exchange_name,
                                           callback=self.process_package)
        streams = CFG.get_safe(CONFIG_KEY + ".stream_subscriptions") or []
        for stream in streams:
            if isinstance(stream, list):
                stream = StreamRoute(exchange_point=stream[0], routing_key=stream[1])

            log.info("Ingestion subscribed to stream '%s'", stream)
            self.stream_sub.add_stream_subscription(stream)

        self.plugin.on_init()

        self.stream_sub.start()
Exemplo n.º 6
0
    def _start_event_dispatch(self):
        """
        Registers the event listener by using a URL that is composed from
        CFG.server.oms.host, CFG.server.oms.port, and CFG.server.oms.path.

        NOTE: the same listener URL will be registered by multiple RSN platform
        drivers. See other related notes in this file.

        @see https://jira.oceanobservatories.org/tasks/browse/OOIION-1287
        @see https://jira.oceanobservatories.org/tasks/browse/OOIION-968
        """

        # gateway host and port to compose URL:
        host = CFG.get_safe('server.oms.host', "localhost")
        port = CFG.get_safe('server.oms.port', "5000")
        path = CFG.get_safe('server.oms.path', "/ion-service/oms_event")

        self.listener_url = "http://%s:%s%s" % (host, port, path)
        self._register_event_listener(self.listener_url)

        # start OMSDeviceStatusEvent listener to notify the agent about those:
        def event_received(evt, *args, **kwargs):
            log.debug('%r: OmsEventListener received: %s', self._platform_id, evt)
            self._send_event(ExternalEventDriverEvent(evt))

        self._event_listener = self._create_event_subscriber(
            event_type   = 'OMSDeviceStatusEvent',
            origin       = self._platform_id,
            origin_type  = 'OMS Platform',
            callback     = event_received)

        log.debug("%r: started OMSDeviceStatusEvent listener", self._platform_id)

        return "OK"
Exemplo n.º 7
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
Exemplo n.º 8
0
    def do_auto_preload(self):
        """ Load configured preload scenarios and remembers in directory which ones
        have been loaded. """
        log.info("Executing auto preload")
        preload_entries = self.process.container.directory.lookup("/System/Preload")
        preload_changed = False
        if preload_entries is None:
            preload_entries = dict(scenarios={})
            preload_changed = True

        # "HACK" Preload core to get app specifics in
        if "core" not in preload_entries["scenarios"]:
            preload_entries["scenarios"]["core"] = self._get_dir_entry("core")
            self.do_preload_core()
            preload_changed = True

        preload_scenarios = CFG.get_safe("scion.preload.scenarios") or []
        preload_scope = CFG.get_safe("scion.preload.scope")
        for scenario_info in preload_scenarios:
            scope, scenario = scenario_info
            if scope == "all" or scope == preload_scope:
                if scenario not in preload_entries["scenarios"]:
                    preload_entries["scenarios"][scenario] = self._get_dir_entry(scenario)
                    changed = self.do_preload_master(scenario)
                    preload_changed = preload_changed or changed

        if preload_changed:
            self.process.container.directory.register("/System", "Preload", scenarios=preload_entries["scenarios"])
def setting_up_smtp_client():
    '''
    Sets up the smtp client
    '''

    #------------------------------------------------------------------------------------
    # the default smtp server
    #------------------------------------------------------------------------------------

    ION_SMTP_SERVER = 'mail.oceanobservatories.org'

    smtp_host = CFG.get_safe('server.smtp.host', ION_SMTP_SERVER)
    smtp_port = CFG.get_safe('server.smtp.port', 25)
    smtp_sender = CFG.get_safe('server.smtp.sender')
    smtp_password = CFG.get_safe('server.smtp.password')

    if CFG.get_safe('system.smtp',False): #Default is False - use the fake_smtp
        log.debug('Using the real SMTP library to send email notifications!')

        smtp_client = smtplib.SMTP(smtp_host)
        smtp_client.ehlo()
        smtp_client.starttls()
        smtp_client.login(smtp_sender, smtp_password)

    else:
        log.debug('Using a fake SMTP library to simulate email notifications!')

        smtp_client = fake_smtplib.SMTP(smtp_host)

    return smtp_client
    def _start_event_dispatch(self):
        """
        Registers the event listener by using a URL that is composed from
        CFG.server.oms.host, CFG.server.oms.port, and CFG.server.oms.path.

        NOTE: the same listener URL will be registered by multiple RSN platform
        drivers. See other related notes in this file.

        @see https://jira.oceanobservatories.org/tasks/browse/OOIION-1287
        @see https://jira.oceanobservatories.org/tasks/browse/OOIION-968
        """

        # gateway host and port to compose URL:
        host = CFG.get_safe('server.oms.host', "localhost")
        port = CFG.get_safe('server.oms.port', "5000")
        path = CFG.get_safe('server.oms.path', "/ion-service/oms_event")

        #the above are defined in pyon.cfg
        #we will override local host for debugging inside the VM
        host = "10.208.79.19"

        self.listener_url = "http://%s:%s%s" % (host, port, path)
        self._register_event_listener(self.listener_url)

        return "OK"
    def test_pydap(self):
        if not CFG.get_safe('bootstrap.use_pydap',False):
            raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
        
        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt,10)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.event.wait(10))


        gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity

        pydap_host = CFG.get_safe('server.pydap.host','localhost')
        pydap_port = CFG.get_safe('server.pydap.port',8001)
        url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)

        ds = open_url(url)
        np.testing.assert_array_equal(ds['time'][:], np.arange(10))
        untested = []
        for k,v in rdt.iteritems():
            if k==rdt.temporal_parameter:
                continue
            context = rdt.context(k)
            if isinstance(context.param_type, QuantityType):
                np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
            elif isinstance(context.param_type, ArrayType):
                values = np.empty(rdt[k].shape, dtype='O')
                for i,obj in enumerate(rdt[k]):
                    values[i] = str(obj)
                np.testing.assert_array_equal(ds[k][k][:][0], values)
            elif isinstance(context.param_type, ConstantType):
                np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
            elif isinstance(context.param_type, CategoryType):
                np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
            else:
                untested.append('%s (%s)' % (k,context.param_type))
        if untested:
            raise AssertionError('Untested parameters: %s' % untested)
Exemplo n.º 12
0
    def on_start(self): # pragma no cover
        super(DiscoveryService,self).on_start()

        self.use_es = CFG.get_safe('system.elasticsearch',False)

        self.elasticsearch_host = CFG.get_safe('server.elasticsearch.host','localhost')
        self.elasticsearch_port = CFG.get_safe('server.elasticsearch.port','9200')

        self.ep = EventPublisher(event_type = 'SearchBufferExceededEvent')
        self.heuristic_cutoff = 4
Exemplo n.º 13
0
    def on_start(self): # pragma no cover
        super(DiscoveryService,self).on_start()

        self.use_es = CFG.get_safe('system.elasticsearch',False)

        self.elasticsearch_host = CFG.get_safe('server.elasticsearch.host','localhost')
        self.elasticsearch_port = CFG.get_safe('server.elasticsearch.port','9200')

        self.ep = EventPublisher(event_type = 'SearchBufferExceededEvent')
        self.heuristic_cutoff = 4
Exemplo n.º 14
0
    def on_start(self):  # pragma: no cover
        super(DiscoveryService, self).on_start()

        self.use_es = CFG.get_safe("system.elasticsearch", False)

        self.elasticsearch_host = CFG.get_safe("server.elasticsearch.host", "localhost")
        self.elasticsearch_port = CFG.get_safe("server.elasticsearch.port", "9200")

        self.ep = EventPublisher(event_type="SearchBufferExceededEvent")
        self.heuristic_cutoff = 4
Exemplo n.º 15
0
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(host=es_host, port=es_port, timeout=10)
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete, index)
            IndexManagementService._es_call(es.index_delete, index)
Exemplo n.º 16
0
    def es_cleanup():
        es_host = CFG.get_safe("server.elasticsearch.host", "localhost")
        es_port = CFG.get_safe("server.elasticsearch.port", "9200")
        es = ep.ElasticSearch(host=es_host, port=es_port, timeout=10)
        indexes = STD_INDEXES.keys()
        indexes.append("%s_resources_index" % get_sys_name().lower())
        indexes.append("%s_events_index" % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete, index)
            IndexManagementService._es_call(es.index_delete, index)
Exemplo n.º 17
0
    def test_ccov_visualization(self):
        '''
        Tests Complex Coverage aggregation of array types and proper splitting of coverages
        tests pydap and the visualization
        '''
        if not CFG.get_safe('bootstrap.use_pydap',False):
            raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')

        data_product_id, stream_def_id = self.make_array_data_product()

        # Make a granule with an array type, give it a few values
        # Send it to google_dt transform, verify output

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(2208988800, 2208988810)
        rdt['temp_sample'] = np.arange(10*4).reshape(10,4)
        rdt['cond_sample'] = np.arange(10*4).reshape(10,4)

        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt, connection_id='abc1', connection_index='1')
        self.assertTrue(dataset_monitor.event.wait(10))
        dataset_monitor.event.clear()


        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(2208988810, 2208988820)
        rdt['temp_sample'] = np.arange(10*4).reshape(10,4)
        rdt['cond_sample'] = np.arange(10*4).reshape(10,4)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt, connection_id='abc2', connection_index='1')
        self.assertTrue(dataset_monitor.event.wait(10))
        dataset_monitor.event.clear()

        qstring = '{"stride_time": 1, "parameters": [], "query_type": "highcharts_data", "start_time": 0, "use_direct_access": 0, "end_time": 19}'
        graph = self.visualization.get_visualization_data(data_product_id, qstring)
        self.assertIn('temp_sample[3]', graph)

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)

        np.testing.assert_array_equal(rdt['temp_sample'][0], np.arange(4))

        pydap_host = CFG.get_safe('server.pydap.host','localhost')
        pydap_port = CFG.get_safe('server.pydap.port',8001)
        url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)

        ds = open_url(url)

        temp_sample, time = ds['temp_sample']
        temp_values, dim = temp_sample[0]
        np.testing.assert_array_equal(temp_values, np.array(['0.0,1.0,2.0,3.0']))
Exemplo n.º 18
0
 def setUp(self):
     self._start_container()
     self.container.start_rel_from_url('res/deploy/r2deploy.yml')
     path = CFG.get_safe('server.pydap.data_path', "RESOURCE:ext/pydap")
     ext_path = FileSystem.get_extended_url(path)
     self.cov,self.filename = _make_coverage(ext_path)
     self.nt = 5
     self.cov.insert_timesteps(self.nt) 
     self.time_data = [i+1 for i in range(self.nt)]
     self.cov.set_parameter_values("time", value=self.time_data)
     host = CFG.get_safe('container.pydap_gateway.web_server.host', 'localhost')
     port = CFG.get_safe('container.pydap_gateway.web_server.port', '8001')
     self.request_url = "http://"+host+":"+str(port)+os.sep+os.path.basename(self.filename)
Exemplo n.º 19
0
def process_query():
    ''' 
    Processes a query from the user
    '''
    elasticsearch_host = CFG.get_safe('server.elasticsearch.host','localhost')
    elasticsearch_port = CFG.get_safe('server.elasticsearch.port','9200')
    try:
        import requests
        data = request.stream.read()
        # pass thru
        r = requests.post('http://%s:%s/_search' % (elasticsearch_host, elasticsearch_port), data=data, headers={'content-type':'application/json'})
        return r.content
    
    except Exception as e:
        return build_error_page(traceback.format_exc())
Exemplo n.º 20
0
    def on_init(self):

        # clients
        self.resource_registry = self.container.resource_registry
        self.event_publisher = EventPublisher(OT.NotificationSentEvent)

        # SMTP client configurations
        self.smtp_from = CFG.get_safe('server.smtp.from', '*****@*****.**')
        self.smtp_host = CFG.get_safe('server.smtp.host', 'localhost')
        self.smtp_port = CFG.get_safe('server.smtp.port', 25)

        # Jinja2 template environment
        self.jinja_env = Environment(loader=FileSystemLoader('res/templates'), trim_blocks=True, lstrip_blocks=True)

        super(NotificationWorker, self).on_init()
Exemplo n.º 21
0
    def on_start(self): # pragma no cover
        super(DiscoveryService,self).on_start()

        self.use_es = CFG.get_safe('system.elasticsearch', False)

        self.elasticsearch_host = CFG.get_safe('server.elasticsearch.host','localhost')
        self.elasticsearch_port = CFG.get_safe('server.elasticsearch.port','9200')

        self.ep = EventPublisher(event_type = 'SearchBufferExceededEvent')
        self.heuristic_cutoff = 4

        self.cfg_datastore = CFG.get_safe('container.datastore.default_server', "couchdb")
        self.ds_discovery = None
        if self.cfg_datastore != "couchdb":
            self.ds_discovery = DatastoreDiscovery(self)
Exemplo n.º 22
0
    def on_init(self):

        self.http_server = None
        self.server_hostname = self.CFG.get_safe(CFG_PREFIX + '.web_server.hostname', DEFAULT_WEB_SERVER_HOSTNAME)
        self.server_port = self.CFG.get_safe(CFG_PREFIX + '.web_server.port', DEFAULT_WEB_SERVER_PORT)
        self.url_prefix = self.CFG.get_safe(CFG_PREFIX + '.url_prefix') or ""

        self.web_server_enabled = True
        self.logging = None
        self.interaction_observer = None
        self.plugin = None
        app.secret_key = self.__class__.__name__   # Enables sessions (for mscweb)

        #retain a pointer to this object for use in ProcessRPC calls
        global adminui_instance
        adminui_instance = self

        #Start the gevent web server unless disabled
        if self.web_server_enabled:
            self.start_service(self.server_hostname, self.server_port)

            plugin_cls = CFG.get_safe(CFG_PREFIX + '.plugin')
            if plugin_cls:
                cls = named_any(plugin_cls)
                self.plugin = cls(app, self)
def send_email(event, msg_recipient, smtp_client, rr_client):
    """
    A common method to send email with formatting

    @param event              Event
    @param msg_recipient        str
    @param smtp_client          fake or real smtp client object

    """

    #log.debug("Got type of event to notify on: %s", event.type_)

    #------------------------------------------------------------------------------------
    # the 'from' email address for notification emails
    #------------------------------------------------------------------------------------

    ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'
    smtp_sender = CFG.get_safe('server.smtp.sender', ION_NOTIFICATION_EMAIL_ADDRESS)

    msg = convert_events_to_email_message([event], rr_client)
    msg['From'] = smtp_sender
    msg['To'] = msg_recipient
    log.debug("UNS sending email from %s to %s for event type: %s", smtp_sender,msg_recipient, event.type_)
    #log.debug("UNS using the smtp client: %s", smtp_client)

    try:
        smtp_client.sendmail(smtp_sender, [msg_recipient], msg.as_string())
    except: # Can be due to a broken connection... try to create a connection
        smtp_client = setting_up_smtp_client()
        log.debug("Connect again...message received after ehlo exchange: %s", str(smtp_client.ehlo()))
        smtp_client.sendmail(smtp_sender, [msg_recipient], msg.as_string())
Exemplo n.º 24
0
    def _gen_service_spec(self, service_name):
        output = {
            "swagger": "2.0",
            "info": {},
            "host": CFG.get_safe("system.web_ui_url", "").rstrip("/").split("//", 1)[1],
            "basePath": CFG.get_safe(UISG_CFG_PREFIX + ".url_prefix"),
            "schemes": ["http"],
        }
        output["info"].update(self.config.get("info", {}))
        if self.config.get("externalDocs", {}):
            output["externalDocs"] = self.config["externalDocs"]

        output["tags"] = self._gen_tags(service_name)
        output["paths"] = self._gen_paths(service_name)

        return output
Exemplo n.º 25
0
    def on_init(self):

        #get the fields from the header that need to be logged
        self.header_fields = CFG.get_safe(
            'container.messaging.conversation_log.header_fields', [])

        # Time in between event persists
        self.persist_interval = 1.0

        # Holds received events FIFO
        self.conv_queue = Queue()

        # Temporarily holds list of events to persist while datastore operation not yet completed
        self.convs_to_persist = None

        # bookkeeping for timeout greenlet
        self._persist_greenlet = None
        self._terminate_persist = Event(
        )  # when set, exits the timeout greenlet

        # The event subscriber
        self.conv_sub = None

        # The conv repository
        self.conv_repository = None
 def _get_cov(self, name, nt):
     path = CFG.get_safe('server.pydap.data_path', "RESOURCE:ext/pydap")
     ext_path = FileSystem.get_extended_url(path)
     cov,filename = _make_coverage(ext_path, "the_cov")
     cov.insert_timesteps(nt) 
     cov.set_parameter_values("time", value=nt)
     return cov, filename
Exemplo n.º 27
0
 def on_start_streaming(self, streaming_args):
     log.info('Orb_DataAgentPlugin..on_start_streaming: args %s' % str(streaming_args))
     self.streaming_args = streaming_args
     script_path = os.path.dirname(__file__) + "/orbstart.py"   # On production, run from inside an egg
     data_dir = Container.instance.file_system.get("TEMP/orb_data")
     cmd_args = ['orb_reap', script_path, streaming_args['orb_name'],
                 streaming_args['select'], "--datadir", data_dir]
     if 'reject' in streaming_args:
         cmd_args.append('--reject')
         cmd_args.append(streaming_args['reject'])
     if 'after' in streaming_args:
         cmd_args.append('--after')
         cmd_args.append(streaming_args['after'])
     if 'timeout' in streaming_args:
         cmd_args.append('--timeout')
         cmd_args.append(streaming_args['timeout'])
     if 'qsize' in streaming_args:
         cmd_args.append('--qsize')
         cmd_args.append(streaming_args['qsize'])
     log.info('Orb reap args: ' + str(cmd_args))
     self.data_dir = data_dir + '/%s/' % (streaming_args['select'].replace('/', '-'))
     if os.path.exists(self.data_dir):
         shutil.rmtree(self.data_dir)
     antelope_path = CFG.get_safe("scion.antelope.path", "/opt/antelope/5.6")
     cmd_args = [str(arg) for arg in cmd_args]
     self.proc = subprocess.Popen(cmd_args, executable=antelope_path+'/bin/python')
     log.info('Orb reap process started, %i' % self.proc.pid)
    def _load_system_actors(self):
        """
        Retrieve system and webauth actors and headers for later use.
        """

        # Retrieve and store system actor and headers.
        system_actor, _ = self.container.resource_registry.find_resources(
            RT.ActorIdentity,
            name=CFG.system.system_actor,
            id_only=False)
        self.system_actor = system_actor[0] if system_actor else None
        self.system_actor_id = system_actor[0]._id if system_actor \
            else 'anonymous'
        self.system_actor_headers = {
            'ion-actor-id': self.system_actor_id,
            'ion-actor-roles': {'ION': ['ION_MANAGER', 'ORG_MANAGER']},
            'expiry':'0'
        }

        # Retrieve and store webauth actor and headers.
        webauth_actor, _ = self.container.resource_registry.find_resources(
            RT.ActorIdentity,
            name=CFG.get_safe("system.web_authentication_actor",
                              "web_authentication"), id_only=False)
        self.webauth_actor = webauth_actor[0] if webauth_actor else None
        self.webauth_actor_id = webauth_actor[0]._id if webauth_actor \
            else 'anonymous'
        self.webauth_actor_headers = {
            'ion-actor-id': self.webauth_actor_id,
            'ion-actor-roles': {'ION': ['ION_MANAGER', 'ORG_MANAGER']},
            'expiry':'0'
        }
Exemplo n.º 29
0
    def install_terminate_handler():
        import gevent
        from pyon.core.bootstrap import CFG
        shutdown_timeout = int(CFG.get_safe("container.timeout.shutdown") or 0)
        terminate_gl = None

        def cancel_func():
            if terminate_gl:
                terminate_gl.kill()

        if shutdown_timeout > 0:
            pid = os.getpid()

            def terminate_unresponsive():
                print >> sys.stderr, "ERROR: Container shutdown seems unresponsive. Timeout elapsed (%s sec) -- TERMINATE process (%s)" % (
                    shutdown_timeout, pid)
                os.kill(pid, signal.SIGKILL)

            terminate_gl = gevent.spawn_later(shutdown_timeout,
                                              terminate_unresponsive)
            terminate_gl._glname = "Container terminate timeout"
            log.info(
                "Container termination timeout set to %s sec (process %s)",
                shutdown_timeout, pid)
        return cancel_func
Exemplo n.º 30
0
 def on_start_streaming(self, streaming_args):
     log.info('Orb_DataAgentPlugin..on_start_streaming: args %s' %
              str(streaming_args))
     self.streaming_args = streaming_args
     script_path = os.path.dirname(
         __file__) + "/orbstart.py"  # On production, run from inside an egg
     data_dir = Container.instance.file_system.get("TEMP/orb_data")
     cmd_args = [
         'orb_reap', script_path, streaming_args['orb_name'],
         streaming_args['select'], "--datadir", data_dir
     ]
     if 'reject' in streaming_args:
         cmd_args.append('--reject')
         cmd_args.append(streaming_args['reject'])
     if 'after' in streaming_args:
         cmd_args.append('--after')
         cmd_args.append(streaming_args['after'])
     if 'timeout' in streaming_args:
         cmd_args.append('--timeout')
         cmd_args.append(streaming_args['timeout'])
     if 'qsize' in streaming_args:
         cmd_args.append('--qsize')
         cmd_args.append(streaming_args['qsize'])
     log.info('Orb reap args: ' + str(cmd_args))
     self.data_dir = data_dir + '/%s/' % (streaming_args['select'].replace(
         '/', '-'))
     if os.path.exists(self.data_dir):
         shutil.rmtree(self.data_dir)
     antelope_path = CFG.get_safe("scion.antelope.path",
                                  "/opt/antelope/5.6")
     cmd_args = [str(arg) for arg in cmd_args]
     self.proc = subprocess.Popen(cmd_args,
                                  executable=antelope_path + '/bin/python')
     log.info('Orb reap process started, %i' % self.proc.pid)
    def on_initial_bootstrap(self, process, config, **kwargs):

        if os.environ.get('PYCC_MODE'):
            # This environment is an ion integration test
            log.info('PYCC_MODE: skipping qc_post_processor launch')
            return
        if self.process_exists(process, 'qc_post_processor'):
            # Short circuit the bootstrap to make sure not more than one is ever started
            return

        self.scheduler_service = SchedulerServiceProcessClient(process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(process=process)
        self.run_interval = CFG.get_safe('service.qc_processing.run_interval', 24)

        interval_key = uuid4().hex # Unique identifier for this process

        config = DotDict()
        config.process.interval_key = interval_key

        process_definition = ProcessDefinition(name='qc_post_processor',
            executable={'module':'ion.processes.data.transforms.qc_post_processing', 'class':'QCPostProcessing'})
        process_definition_id = self.process_dispatcher.create_process_definition(process_definition)

        process_id = self.process_dispatcher.create_process(process_definition_id)
        self.process_dispatcher.schedule_process(process_definition_id, process_id=process_id, configuration=config)


        timer_id = self.scheduler_service.create_interval_timer(start_time=str(time.time()),
                end_time='-1', #Run FOREVER
                interval=3600*self.run_interval,
                event_origin=interval_key)
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process = TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client,
                                  self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
Exemplo n.º 33
0
def process_index():
    try:
        from pyon.public import CFG
        from pyon.core.bootstrap import get_sys_name
        default_ds_server = CFG.get_safe("container.datastore.default_server", "postgresql")



        fragments = [
            "<h1>SciON Admin UI</h1>",
            "<p><ul>",
            "<li><a href='/restypes'><b>Browse Resource Registry and Resource Objects</b></a>",
            "<ul>",
            "<li>Org/Users: <a href='/list/Org'>Org</a>, <a href='/list/UserRole'>UserRole</a>, <a href='/list/ActorIdentity'>ActorIdentity</a></li>",
            "<li>Computing: <a href='/list/Process'>Process</a>, <a href='/list/ProcessDefinition'>ProcessDefinition</a>, <a href='/list/Service'>Service</a>, <a href='/list/ServiceDefinition'>ServiceDefinition</a>, <a href='/list/CapabilityContainer'>CapabilityContainer</a></li>",
            "<li>Messaging: <a href='/list/ExchangeSpace'>ExchangeSpace</a>, <a href='/list/ExchangePoint'>ExchangePoint</a>, <a href='/list/ExchangeName'>ExchangeName</a>, <a href='/list/ExchangeBroker'>ExchangeBroker</a></li>",
            "<li>Governance: <a href='/list/Commitment'>Commitment</a>, <a href='/list/Negotiation'>Negotiation</a>, <a href='/list/Policy'>Policy</a></li>",
            _get_resmenu_extension(),
            "</ul></li>",
            "<li><a href='/events'><b>Browse Events</b></a></li>",
            "<li><a href='/viewobj'><b>View Objects</b></a></li>",
            "<li><a href='/viewstate'><b>View Process State</b></a></li>",
            "<li><a href='/dir'><b>Browse SciON Directory</b></a></li>",
            #"<li><a href='/mscweb'><b>Show system messages (MSCWeb)</b></a>",
            #"<ul>",
            #"<li><a href='/mscaction/stop'>Stop system message recording</a></li>",
            #"</ul></li>",
            "<li><a href='http://localhost:4000'><b>Application Web UI (if running)</b></a></li>",
            "<li><a href='http://" + CFG.get_safe("server.amqp.host") + ":15672/'><b>RabbitMQ Management UI (if running)</b></a></li>",
            "<li><a href='http://localhost:9001/'><b>Supervisord UI (if running)</b></a></li>",
            "</ul></p>",
            "<h2>System and Container Properties</h2>",
            "<p><table>",
            "<tr><th>Property</th><th>Value</th></tr>",
            "<tr><td>system.name (sysname)</td><td>%s</td></tr>" % get_sys_name(),
            "<tr><td>Message Broker</td><td>%s</td></tr>" % "%s:%s" % (CFG.server.amqp.host, CFG.server.amqp.port),
            "<tr><td>Database</td><td>%s</td></tr>" % "%s:%s" % (CFG.get_safe("server.%s.host" % default_ds_server), CFG.get_safe("server.%s.port" % default_ds_server)),
            "<tr><td>Container ID</td><td>%s</td></tr>" % Container.instance.id,
            "<tr><td>Read-Only</td><td>%s</td></tr>" % is_read_only(),
            "</table></p>",

            ]
        content = "\n".join(fragments)
        return build_page(content)

    except Exception as e:
        return build_error_page(traceback.format_exc())
Exemplo n.º 34
0
    def do_work(container):
        """
        Performs initial startup actions with the container as requested in arguments.
        Then remains in container shell or infinite wait until container stops.
        Returns when container should stop. Raises an exception if anything failed.
        """
        if opts.proc:
            # Run a one-off process (with the -x argument)
            mod, proc = opts.proc.rsplit('.', 1)
            print "pycc: Starting process %s" % opts.proc
            container.spawn_process(proc,
                                    mod,
                                    proc,
                                    config={'process': {
                                        'type': 'immediate'
                                    }})
            # And end
            return

        if opts.rel:
            # Start a rel file
            start_ok = container.start_rel_from_url(opts.rel)
            if not start_ok:
                raise Exception("Cannot start deploy file '%s'" % opts.rel)

        if opts.mx:
            from pyon.public import CFG
            port = CFG.get_safe('container.flask_webapp.port', 8080)
            container.spawn_process("ContainerUI", "ion.core.containerui",
                                    "ContainerUI")
            print "pycc: Container UI started ... listening on http://localhost:%s" % port

        if opts.signalparent:
            import os
            import signal
            print 'pycc: Signal parent pid %d that pycc pid %d service start process is complete...' % (
                os.getppid(), os.getpid())
            os.kill(os.getppid(), signal.SIGUSR1)

            def is_parent_gone():
                while os.getppid() != 1:
                    gevent.sleep(1)
                print 'pycc: Now I am an orphan ... notifying serve_forever to stop'
                os.kill(os.getpid(), signal.SIGINT)

            import gevent
            ipg = gevent.spawn(is_parent_gone)

            container.gl_parent_watch = ipg

        if not opts.noshell and not opts.daemon:
            # Keep container running while there is an interactive shell
            from pyon.container.shell_api import get_shell_api
            setup_ipython_shell(get_shell_api(container))
        elif not opts.nomanhole:
            from pyon.container.shell_api import get_shell_api
            setup_ipython_embed(get_shell_api(container))
        else:
            container.serve_forever()
Exemplo n.º 35
0
    def setUp(self):

        self.username = CFG.get_safe('eoi.geoserver.user_name', 'admin')
        self.PASSWORD = CFG.get_safe('eoi.geoserver.password', 'geoserver')
        self.gs_host = CFG.get_safe('eoi.geoserver.server', 'http://localhost:8080')
        self.gs_rest_url = ''.join([self.gs_host, '/geoserver/rest'])
        self.gs_ows_url = ''.join([self.gs_host, '/geoserver/ows'])
        IMPORTER_SERVICE_SERVER = CFG.get_safe('eoi.importer_service.server', 'http://localhost')
        IMPORTER_SERVICE_PORT = str(CFG.get_safe('eoi.importer_service.port', 8844))
        self.importer_service_url = ''.join([IMPORTER_SERVICE_SERVER, ':', IMPORTER_SERVICE_PORT])

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = self.container.resource_registry
Exemplo n.º 36
0
    def on_start(self): # pragma no cover
        super(DiscoveryService,self).on_start()

        cfg_datastore = CFG.get_safe('container.datastore.default_server')
        if cfg_datastore != "postgresql":
            raise Exception("Discovery service does not support datastores other than postgresql")

        self.ds_discovery = DatastoreDiscovery(self)
Exemplo n.º 37
0
    def on_start(self):
        super(DiscoveryService, self).on_start()

        cfg_datastore = CFG.get_safe('container.datastore.default_server')
        if cfg_datastore != "postgresql":
            raise Exception("Discovery service does not support datastores other than postgresql")

        self.ds_discovery = DatastoreDiscovery(self)
    def get_data_url(self, data_product_id=''):
        # The unique pointer to this set of data
        ret = IonObject(OT.ComputedStringValue)
        ret.value  = ""

        erddap_host = CFG.get_safe('server.erddap.host','localhost')
        errdap_port = CFG.get_safe('server.erddap.port','8080')
        try:
            dataset_id = self._get_dataset_id(data_product_id)
            ret.value  = string.join( ["http://", erddap_host, ":", str(errdap_port),"/erddap/griddap/", str(dataset_id), "_0.html"],'')
            ret.status = ComputedValueAvailability.PROVIDED
            log.debug("get_data_url: data_url: %s", ret.value)
        except NotFound:
            ret.status = ComputedValueAvailability.NOTAVAILABLE
            ret.reason = "Dataset for this Data Product could not be located"

        return ret
Exemplo n.º 39
0
    def on_init(self):

        # clients
        self.resource_registry = self.container.resource_registry
        self.event_publisher = EventPublisher(OT.NotificationSentEvent)

        # SMTP client configurations
        self.smtp_from = CFG.get_safe('server.smtp.from',
                                      '*****@*****.**')
        self.smtp_host = CFG.get_safe('server.smtp.host', 'localhost')
        self.smtp_port = CFG.get_safe('server.smtp.port', 25)

        # Jinja2 template environment
        self.jinja_env = Environment(loader=FileSystemLoader('res/templates'),
                                     trim_blocks=True,
                                     lstrip_blocks=True)

        super(NotificationWorker, self).on_init()
Exemplo n.º 40
0
def process_index():
    try:
        from pyon.public import CFG
        from pyon.core.bootstrap import get_sys_name
        default_ds_server = CFG.get_safe("container.datastore.default_server", "postgresql")

        attrs = dict(listurl=_link("/list"), base=_link(""))

        fragments = [
            "<h1>SciON Admin UI</h1>",
            "<p><ul>",
            "<li><a href='%s'><b>Browse Resource Registry and Resource Objects</b></a>" % _link("/restypes"),
            "<ul>",
            "<li>Org/Users: <a href='%(listurl)s/Org'>Org</a>, <a href='%(listurl)s/UserRole'>UserRole</a>, <a href='%(listurl)s/ActorIdentity'>ActorIdentity</a></li>" % attrs,
            "<li>Computing: <a href='%(listurl)s/Process'>Process</a>, <a href='%(listurl)s/ProcessDefinition'>ProcessDefinition</a>, <a href='%(listurl)s/Service'>Service</a>, <a href='%(listurl)s/ServiceDefinition'>ServiceDefinition</a>, <a href='%(listurl)s/CapabilityContainer'>CapabilityContainer</a></li>" % attrs,
            "<li>Messaging: <a href='%(listurl)s/ExchangeSpace'>ExchangeSpace</a>, <a href='%(listurl)s/ExchangePoint'>ExchangePoint</a>, <a href='%(listurl)s/ExchangeName'>ExchangeName</a>, <a href='%(listurl)s/ExchangeBroker'>ExchangeBroker</a></li>" % attrs,
            "<li>Governance: <a href='%(listurl)s/Commitment'>Commitment</a>, <a href='%(listurl)s/Negotiation'>Negotiation</a>, <a href='%(listurl)s/Policy'>Policy</a></li>" % attrs,
            _get_resmenu_extension(),
            "</ul></li>",
            "<li><a href='%s'><b>System Commands</b></a></li>" % _link("/syscmds"),
            "<li><a href='%s'><b>Browse Events</b></a></li>" % _link("/events"),
            "<li><a href='%s'><b>Browse SciON Directory</b></a></li>" % _link("/dir"),
            "<li><a href='%s'><b>View Objects</b></a></li>" % _link("/viewobj"),
            "<li><a href='%s'><b>View Process State</b></a></li>" % _link("/viewstate"),
            "<li><a href='http://localhost:4000'><b>Application Web UI (if running)</b></a></li>",
            "<li><a href='http://" + CFG.get_safe("server.amqp.host") + ":15672/'><b>RabbitMQ Management UI (if running)</b></a></li>",
            "<li><a href='http://localhost:9001/'><b>Supervisord UI (if running)</b></a></li>",
            "</ul></p>",
            "<h2>System and Container Properties</h2>",
            "<p><table>",
            "<tr><th>Property</th><th>Value</th></tr>",
            "<tr><td>Sysname (system.name)</td><td>%s</td></tr>" % get_sys_name(),
            "<tr><td>Message Broker</td><td>%s</td></tr>" % "%s:%s" % (CFG.server.amqp.host, CFG.server.amqp.port),
            "<tr><td>Database</td><td>%s</td></tr>" % "%s:%s" % (CFG.get_safe("server.%s.host" % default_ds_server), CFG.get_safe("server.%s.port" % default_ds_server)),
            "<tr><td>Container ID</td><td>%s</td></tr>" % Container.instance.id,
            "<tr><td>Read-Only</td><td>%s</td></tr>" % is_read_only(),
            "</table></p>",

            ]
        content = "\n".join(fragments)
        return build_page(content)

    except Exception as e:
        return build_error_page(traceback.format_exc())
Exemplo n.º 41
0
def _get_resmenu_extension():
    resmenu_ext = CFG.get_safe(CFG_PREFIX + '.menu.extensions')
    if not resmenu_ext:
        return ""
    ext_str = ""
    for ext in resmenu_ext:
        if isinstance(ext, basestring):
            ext = ext.split(",")
        ext_str += "<li>%s: %s</li>\n" % (ext[0], ", ".join("<a href='/list/%s'>%s</a>" % (rex, rex) for rex in ext[1:]))
    return ext_str
Exemplo n.º 42
0
def _get_resmenu_extension():
    resmenu_ext = CFG.get_safe(CFG_PREFIX + '.menu.extensions')
    if not resmenu_ext:
        return ""
    ext_str = ""
    for ext in resmenu_ext:
        if isinstance(ext, basestring):
            ext = ext.split(",")
        ext_str += "<li>%s: %s</li>\n" % (ext[0], ", ".join("<a href='/list/%s'>%s</a>" % (rex, rex) for rex in ext[1:]))
    return ext_str
Exemplo n.º 43
0
    def do_preload_master(self, master):
        if CFG.get_safe("scion.preload.enabled", False) is not True:
            return False
        log.info("############## PRELOAD SCION RESOURCES (%s) ##############", master)

        skip_steps = CFG.get_safe("skipsteps")
        if skip_steps:
            skip_steps = skip_steps.split(",")
        self.initialize_preloader(self.process, {})

        if os.path.isdir("res/preload/{}".format(master)):
            self.preload_master("res/preload/{}/actions.yml".format(master), skip_steps=skip_steps)
        elif os.path.exists("res/preload/{}".format(master)):
            self.preload_master("res/preload/{}".format(master), skip_steps=skip_steps)
        else:
            raise BadRequest("Cannot find preload master")

        log.info("############## END PRELOAD ##############")
        return True
Exemplo n.º 44
0
 def _es_call(es, *args, **kwargs):
     res = AsyncResult()
     def async_call(es, *args, **kwargs):
         res.set(es(*args,**kwargs))
     spawn(async_call,es,*args,**kwargs)
     try:
         retval = res.get(timeout=CFG.get_safe('server.elasticsearch.timeout', 10))
     except Timeout:
         raise exceptions.Timeout("Call to ElasticSearch timed out.")
     return retval
    def request_password_reset(self, username=''):
        actor_id = self.find_actor_identity_by_username(username)
        actor = self.rr.read(actor_id)

        validity = CFG.get_safe(CFG_PREFIX + '.password_token_validity', 60*60)
        actor.passwd_reset_token = self._create_token(actor_id=actor_id, validity=validity,
                                                      token_type=TokenTypeEnum.ACTOR_RESET_PASSWD)
        self.rr.update(actor)

        return actor.passwd_reset_token.token_string
Exemplo n.º 46
0
    def _gen_service_spec(self, service_name):
        output = {
            "swagger":
            "2.0",
            "info": {},
            "host":
            CFG.get_safe("system.web_ui_url", "").rstrip("/").split("//",
                                                                    1)[1],
            "basePath":
            CFG.get_safe(UISG_CFG_PREFIX + ".url_prefix"),
            "schemes": ["http"],
        }
        output["info"].update(self.config.get("info", {}))
        if self.config.get("externalDocs", {}):
            output["externalDocs"] = self.config["externalDocs"]

        output["tags"] = self._gen_tags(service_name)
        output["paths"] = self._gen_paths(service_name)

        return output
Exemplo n.º 47
0
    def setUp(self):

        self.username = CFG.get_safe('eoi.geoserver.user_name', 'admin')
        self.PASSWORD = CFG.get_safe('eoi.geoserver.password', 'geoserver')
        self.gs_host = CFG.get_safe('eoi.geoserver.server',
                                    'http://localhost:8080')
        self.gs_rest_url = ''.join([self.gs_host, '/geoserver/rest'])
        self.gs_ows_url = ''.join([self.gs_host, '/geoserver/ows'])
        IMPORTER_SERVICE_SERVER = CFG.get_safe('eoi.importer_service.server',
                                               'http://localhost')
        IMPORTER_SERVICE_PORT = str(
            CFG.get_safe('eoi.importer_service.port', 8844))
        self.importer_service_url = ''.join(
            [IMPORTER_SERVICE_SERVER, ':', IMPORTER_SERVICE_PORT])

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = self.container.resource_registry
Exemplo n.º 48
0
def setting_up_smtp_client():
    '''
    Sets up the smtp client
    '''

    #------------------------------------------------------------------------------------
    # the default smtp server
    #------------------------------------------------------------------------------------
    smtp_client = None
    smtp_host = CFG.get_safe('server.smtp.host')
    smtp_port = CFG.get_safe('server.smtp.port', 25)
    smtp_sender = CFG.get_safe('server.smtp.sender')
    smtp_password = CFG.get_safe('server.smtp.password')

    if CFG.get_safe('system.smtp',
                    False):  #Default is False - use the fake_smtp
        log.debug(
            'Using the real SMTP library to send email notifications! host = %s',
            smtp_host)

        #        smtp_client = smtplib.SMTP(smtp_host)
        #        smtp_client.ehlo()
        #        smtp_client.starttls()
        #        smtp_client.login(smtp_sender, smtp_password)

        smtp_client = smtplib.SMTP(smtp_host, smtp_port)
        log.debug("In setting up smtp client using the smtp client: %s",
                  smtp_client)
        log.debug("Message received after ehlo exchange: %s",
                  str(smtp_client.ehlo()))


#        smtp_client.login(smtp_sender, smtp_password)
    else:
        log.debug('Using a fake SMTP library to simulate email notifications!')

        smtp_client = fake_smtplib.SMTP(smtp_host)

    return smtp_client
    def request_password_reset(self, username=''):
        actor_id = self.find_actor_identity_by_username(username)
        actor = self.rr.read(actor_id)

        validity = CFG.get_safe(CFG_PREFIX + '.password_token_validity',
                                60 * 60)
        actor.passwd_reset_token = self._create_token(
            actor_id=actor_id,
            validity=validity,
            token_type=TokenTypeEnum.ACTOR_RESET_PASSWD)
        self.rr.update(actor)

        return actor.passwd_reset_token.token_string
Exemplo n.º 50
0
    def on_start(self):

        load_from_file = CFG.get("load_from_file", None)
        if load_from_file:
            if os.path.exists(load_from_file):
                self.load_from_files(self.directory_path, [load_from_file])
            else:
                print "LoadConfiguration: Error couldn't find the file path\n"
        else:
            # load from all files
            self.load_object_model_interface()
            self.load_service_interface()
            self.load_resource_configuration()
Exemplo n.º 51
0
    def __init__(self, notification_request, user_id):

        super(EmailEventProcessor, self).__init__(notification_request,
                                                  user_id)

        smtp_host = CFG.get_safe('server.smtp.host', ION_SMTP_SERVER)
        smtp_port = CFG.get_safe('server.smtp.port', 25)
        self.smtp_sender = CFG.get_safe('server.smtp.sender')
        smtp_password = CFG.get_safe('server.smtp.password')

        log.info('smtp_host: %s' % str(smtp_host))
        log.info('smtp_port: %s' % str(smtp_port))

        if CFG.get_safe('system.smtp',
                        False):  #Default is False - use the fake_smtp
            log.warning(
                'Using the real SMTP library to send email notifications!')

            #@todo - for now hard wire for gmail account
            #msg_sender = '*****@*****.**'
            #gmail_pwd = 'ooici777'

            self.smtp_client = smtplib.SMTP(smtp_host)
            self.smtp_client.ehlo()
            self.smtp_client.starttls()
            self.smtp_client.login(self.smtp_sender, smtp_password)

            log.warning("Using smpt host: %s" % smtp_host)
        else:
            # Keep this as a warning
            log.warning(
                'Using a fake SMTP library to simulate email notifications!')

            #@todo - what about port etc??? What is the correct interface to fake?
            self.smtp_client = fake_smtplib.SMTP(smtp_host)

        log.debug("UserEventProcessor.__init__(): email for user %s " %
                  self.user_id)
Exemplo n.º 52
0
    def test_pydap_handlers(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_def_id = self.create_stream_definition('ctd', parameter_dictionary_id=pdict_id)
        data_product_id = self.create_data_product('ctd', stream_def_id=stream_def_id)
        self.activate_data_product(data_product_id)

        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)


        rdt = self.ph.get_rdt(stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        self.ph.publish_rdt_to_data_product(data_product_id,rdt)
        dataset_monitor.event.wait(10)

        from pydap.client import open_url
        pydap_host = CFG.get_safe('server.pydap.host','localhost')
        pydap_port = CFG.get_safe('server.pydap.port',8001)
        url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)
        ds = open_url(url)
        ds['temp']['temp'][:]
Exemplo n.º 53
0
    def _cmd_start_rel(self, command):
        log.debug("Start rel")
        rel = command["rel_def"]

        max_proc_replicas = int(CFG.get_safe("container.process.max_replicas", 0))

        for rel_app_cfg in rel["apps"]:
            app_name = rel_app_cfg["name"]
            log.debug("app definition in rel: %s", str(rel_app_cfg))

            # Decide where process should go
            target_engine = self._determine_target_engine(rel_app_cfg)
            log.debug("Dispatch app %s to engine %s", app_name, target_engine)

            if 'processapp' in rel_app_cfg:
                name, module, cls = rel_app_cfg["processapp"]

                rel_cfg = None
                if 'config' in rel_app_cfg:
                    rel_cfg = deepcopy(rel_app_cfg["config"])

                if 'replicas' in rel_app_cfg:
                    proc_replicas = int(rel_app_cfg["replicas"])
                    if max_proc_replicas > 0:
                        if proc_replicas > max_proc_replicas:
                            log.info("Limiting number of proc replicas to %s from %s", max_proc_replicas, proc_replicas)
                        proc_replicas = min(proc_replicas, max_proc_replicas)
                    if proc_replicas < 1 or proc_replicas > 100:
                        log.warn("Invalid number of process replicas: %s", proc_replicas)
                        proc_replicas = 1
                    for i in xrange(proc_replicas):
                        cont_info = self._determine_target_container(rel_app_cfg, target_engine)
                        container_name = self._get_cc_agent_name(cont_info)
                        proc_name = "%s.%s" % (name, i) if i else name
                        action_res = self._add_spawn_process_action(cc_agent=container_name, proc_name=proc_name,
                                                                    module=module, cls=cls, config=rel_cfg)
                        proc_id = action_res.wait()
                        self._slot_process(cont_info, proc_id, dict(proc_name=proc_name, state=ProcessStateEnum.RUNNING))
                else:
                    cont_info = self._determine_target_container(rel_app_cfg, target_engine)
                    container_name = self._get_cc_agent_name(cont_info)
                    action_res = self._add_spawn_process_action(cc_agent=container_name, proc_name=name,
                                                                module=module, cls=cls, config=rel_cfg)
                    proc_id = action_res.wait()
                    self._slot_process(cont_info, proc_id, dict(proc_name=name, state=ProcessStateEnum.RUNNING))

            else:
                log.warn("App file not supported")
Exemplo n.º 54
0
    def test_dashi(self):

        import dashi

        dashi_conn = dashi.DashiConnection(
            "something",
            self._haa_dashi_uri,
            self._haa_dashi_exchange,
            sysname=CFG.get_safe("dashi.sysname"))

        status = dashi_conn.call(self._haa_dashi_name, "status")
        assert status in ('PENDING', 'READY', 'STEADY')

        new_policy = {'preserve_n': 0}
        dashi_conn.call(self._haa_dashi_name,
                        "reconfigure_policy",
                        new_policy=new_policy)
Exemplo n.º 55
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
    def setUp(self):
        self.dashi = None
        self._start_container()
        from pyon.public import CFG

        self.container_client = ContainerAgentClient(node=self.container.node,
                                                     name=self.container.name)
        self.container = self.container_client._get_container_instance()

        app = dict(name="process_dispatcher",
                   processapp=("process_dispatcher",
                               "ion.services.cei.process_dispatcher_service",
                               "ProcessDispatcherService"))
        self.container.start_app(app, config=pd_config)

        self.rr_cli = self.container.resource_registry

        self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.process_definition = ProcessDefinition(name='test_process')
        self.process_definition.executable = {
            'module': 'ion.services.cei.test.test_process_dispatcher',
            'class': 'TestProcess'
        }
        self.process_definition_id = self.pd_cli.create_process_definition(
            self.process_definition)

        self._eea_pids = []
        self._eea_pid_to_resource_id = {}
        self._eea_pid_to_persistence_dir = {}
        self._tmpdirs = []

        self.dashi = get_dashi(
            uuid.uuid4().hex,
            pd_config['processdispatcher']['dashi_uri'],
            pd_config['processdispatcher']['dashi_exchange'],
            sysname=CFG.get_safe("dashi.sysname"))

        #send a fake node_state message to PD's dashi binding.
        self.node1_id = uuid.uuid4().hex
        self._send_node_state("engine1", self.node1_id)
        self._initial_eea_pid = self._start_eeagent(self.node1_id)

        self.waiter = ProcessStateWaiter()