def __init__(self, host=None, port=None, datastore_name='prototype', options="", profile=DataStore.DS_PROFILE.BASIC): log.debug('__init__(host=%s, port=%s, datastore_name=%s, options=%s)', host, port, datastore_name, options) self.host = host or CFG.server.couchdb.host self.port = port or CFG.server.couchdb.port # The scoped name of the datastore self.datastore_name = datastore_name self.auth_str = "" try: if CFG.server.couchdb.username and CFG.server.couchdb.password: self.auth_str = "%s:%s@" % (CFG.server.couchdb.username, CFG.server.couchdb.password) log.debug("Using username:password authentication to connect to datastore") except AttributeError: log.error("CouchDB username:password not configured correctly. Trying anonymous...") connection_str = "http://%s%s:%s" % (self.auth_str, self.host, self.port) #connection_str = "http://%s:%s" % (self.host, self.port) # TODO: Security risk to emit password into log. Remove later. log.info('Connecting to CouchDB server: %s' % connection_str) self.server = couchdb.Server(connection_str) # Datastore specialization (views) self.profile = profile # serializers self._io_serializer = IonObjectSerializer() # TODO: Not nice to have this class depend on ION objects self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) self._datastore_cache = {}
def _construct_stream_and_publisher(self, stream_name, stream_config): if log.isEnabledFor(logging.TRACE): # pragma: no cover log.trace("%r: _construct_stream_and_publisher: " "stream_name:%r, stream_config:\n%s", self._platform_id, stream_name, self._pp.pformat(stream_config)) decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) if 'stream_def_dict' not in stream_config: # should not happen: PlatformAgent._validate_configuration validates this. log.error("'stream_def_dict' key not in configuration for stream %r" % stream_name) return stream_def_dict = stream_config['stream_def_dict'] stream_def_dict['type_'] = 'StreamDefinition' stream_def_obj = decoder.deserialize(stream_def_dict) self._stream_defs[stream_name] = stream_def_obj routing_key = stream_config['routing_key'] stream_id = stream_config['stream_id'] exchange_point = stream_config['exchange_point'] parameter_dictionary = stream_def_dict['parameter_dictionary'] log.debug("%r: got parameter_dictionary from stream_def_dict", self._platform_id) self._data_streams[stream_name] = stream_id self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary) stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key) publisher = self._create_publisher(stream_id, stream_route) self._data_publishers[stream_name] = publisher log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
def __init__(self, *args, **kwargs): super(ReplayProcess, self).__init__(*args, **kwargs) self.deserializer = IonObjectDeserializer( obj_registry=get_obj_registry()) self.publishing = Event() self.play = Event() self.end = Event()
def __init__(self, container, datastore_name=""): self.container = container self.datastore_name = datastore_name # Object serialization/deserialization self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def __init__(self, datastore_name='prototype'): self.datastore_name = datastore_name log.debug('Creating in-memory dict of dicts that will simulate data stores') self.root = {} # serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def test_stream_ingestion_worker(self): self.start_ingestion_worker() context_ids, time_ctxt = self._create_param_contexts() pdict_id = self.dataset_management_client.create_parameter_dictionary( name='stream_ingestion_pdict', parameter_context_ids=context_ids, temporal_context='ingestion_timestamp') self.addCleanup( self.dataset_management_client.delete_parameter_dictionary, pdict_id) dataset_id = self.dataset_management_client.create_dataset( name='fake_dataset', description='fake_dataset', stream_id=self.stream_id, spatial_domain=self.spatial_dom.dump(), temporal_domain=self.time_dom.dump(), parameter_dictionary_id=pdict_id) self.addCleanup(self.dataset_management_client.delete_dataset, dataset_id) self.cov = self._create_coverage(dataset_id=dataset_id, parameter_dict_id=pdict_id, time_dom=self.time_dom, spatial_dom=self.spatial_dom) self.addCleanup(self.cov.close) rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['conductivity'] = 1 rdt['pressure'] = 2 rdt['salinity'] = 3 self.start_listener(dataset_id) self.publisher.publish(rdt.to_granule()) self.data_modified = Event() self.data_modified.wait(30) cov = self.get_coverage(dataset_id) self.assertIsNotNone(cov.get_parameter_values('raw')) deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) granule = retrieve_stream(dataset_id) rdt_complex = RecordDictionaryTool.load_from_granule(granule) rdt_complex['raw'] = [ deserializer.deserialize(i) for i in rdt_complex['raw'] ] for gran in rdt_complex['raw']: rdt_new = RecordDictionaryTool.load_from_granule(gran) self.assertIn(1, rdt_new['conductivity']) self.assertIn(2, rdt_new['pressure']) self.assertIn(3, rdt_new['salinity']) cov.close()
def test_config(self): """ test_initialize Test agent initialize command. This causes creation of driver process and transition to inactive. """ # We start in uninitialized state. # In this state there is no driver process. state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.UNINITIALIZED) # Ping the agent. retval = self._ia_client.ping_agent() log.info(retval) # Initialize the agent. # The agent is spawned with a driver config, but you can pass one in # optinally with the initialize command. This validates the driver # config, launches a driver process and connects to it via messaging. # If successful, we switch to the inactive state. cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.INACTIVE) # Ping the driver proc. retval = self._ia_client.ping_resource() log.info(retval) decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) # Grab the alarms defined in the config. retval = decoder.deserialize(self._ia_client.get_agent(['alarms'])['alarms']) """ {'status': None, 'stream_name': 'parsed', 'name': 'test_sim_warning', 'upper_bound': 5.0, 'expr': 'x<5.0', 'upper_rel_op': '<', 'lower_rel_op': None, 'type_': 'IntervalAlarmDef', 'value_id': 'temp', 'lower_bound': None, 'message': 'Temperature is above test range of 5.0.', 'current_val': None, 'type': 1} """ self.assertEqual(retval[0].type_, 'IntervalAlarmDef') self.assertEqual(retval[0].upper_bound, 5.0) self.assertEqual(retval[0].expr, 'x<5.0') # Reset the agent. This causes the driver messaging to be stopped, # the driver process to end and switches us back to uninitialized. cmd = AgentCommand(command=ResourceAgentEvent.RESET) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.UNINITIALIZED)
def _results_from_response(self, response, id_only): deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) if not (response.has_key('hits') and response['hits'].has_key('hits')): return [] hits = response['hits']['hits'] if len(hits) > 0: if len(hits) >= SEARCH_BUFFER_SIZE: log.warning("Query results exceeded search buffer limitations") self.raise_search_buffer_exceeded() if id_only: return [str(i['_id']) for i in hits] results = map(deserializer.deserialize, hits) return results else: return []
def __init__(self, datastore_name=None, config=None, scope=None, profile=None): """ @param datastore_name Name of datastore within server. May be scoped to sysname @param config A server config dict with connection params @param scope Prefix for the datastore name (e.g. sysname) to separate multiple systems """ PostgresDataStore.__init__(self, datastore_name=datastore_name, config=config or CFG.get_safe("server.postgresql"), profile=profile or DataStore.DS_PROFILE.BASIC, scope=scope) # IonObject Serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def _construct_streams(self, stream_info): decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) for (stream_name, config) in stream_info.iteritems(): try: if config.has_key('stream_def_dict'): stream_def_dict = config['stream_def_dict'] stream_def_dict['type_'] = 'StreamDefinition' stream_def_obj = decoder.deserialize(stream_def_dict) self._stream_defs[stream_name] = stream_def_obj rdt = RecordDictionaryTool(stream_definition=stream_def_obj) else: stream_def = config['stream_definition_ref'] self._stream_defs[stream_name] = stream_def rdt = RecordDictionaryTool(stream_definition_id=stream_def) self._agent.aparam_streams[stream_name] = rdt.fields self._agent.aparam_pubrate[stream_name] = 0 except Exception as e: errmsg = 'Instrument agent %s' % self._agent._proc_name errmsg += 'error constructing stream %s. ' % stream_name errmsg += str(e) log.error(errmsg) self._agent.aparam_set_pubrate = self.aparam_set_pubrate
def __init__(self): Interceptor.__init__(self) self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def __init__(self, id_factory): self.encoder = IonObjectSerializer() self.decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) self.id_factory = id_factory
def test_perf(self): _io_serializer = IonObjectSerializer() _io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry()) def time_serialize(test_obj, name="?", has_ion=False): with time_it(name + ", serialize"): os = _io_serializer.serialize(test_obj) with time_it(name + ", deserialize"): os2 = _io_deserializer.deserialize(os) count_objs(os) if has_ion: test_obj = os with time_it(name + ", json.dumps"): oj = json.dumps(test_obj) with time_it(name + ", json.loads"): o2 = json.loads(oj) log.info(" len(json): %s", len(oj)) with time_it(name + ", simplejson.dumps"): oj = simplejson.dumps(test_obj) with time_it(name + ", simplejson.loads"): o2 = simplejson.loads(oj) log.info(" len(simplejson): %s", len(oj)) with time_it(name + ", msgpack.packb"): o1 = msgpack.packb(test_obj) with time_it(name + ", msgpack.unpackb"): o2 = msgpack.unpackb(o1, use_list=1) log.info(" len(msgpack): %s", len(o1)) # with time_it(name + ", pickle.dumps"): # op = pickle.dumps(test_obj) # # with time_it(name + ", pickle.loads"): # o2 = pickle.loads(op) # log.info(" len(pickle): %s", len(op)) # # with time_it(name + ", cPickle.dumps"): # op = cPickle.dumps(test_obj) # # with time_it(name + ", cPickle.loads"): # o2 = cPickle.loads(op) # log.info(" len(cPickle): %s", len(op)) log.info("----------------") # Large nested with time_it("highly nested dict/list, create"): test_obj = create_test_object(4, 4, do_list=False, uvals=True, ukeys=True) time_serialize(test_obj, "highly nested dict/list") # Nested with time_it("nested dict/list, create"): test_obj = create_test_object(3, 40, do_list=True, uvals=False, ukeys=False) time_serialize(test_obj, "nested dict/list") # Large string #value = ''.join(random.choice(allowed_chars) for x in xrange(1460000)) value = ''.join(random.choice(allowed_chars) for x in xrange(500000)) time_serialize(value, "long string") # ION with time_it("create ion"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested", has_ion=True) from pyon.core.interceptor.interceptor import Invocation from pyon.core.interceptor.encode import EncodeInterceptor encode = EncodeInterceptor() invocation = Invocation() invocation.message = test_obj1 with time_it("ion object, encode"): encode.outgoing(invocation) with time_it("ion object, decode"): encode.incoming(invocation) count_objs(invocation.message) # ION with time_it("create ion unicode"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False, uvals=True, ukeys=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested unicode", has_ion=True) # Create objects with validation on with time_it("create ion calidated"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested validated", has_ion=True)
@author David Stuebe @author Don Brittain @author Tim Giguere @brief https://confluence.oceanobservatories.org/display/CIDev/R2+Construction+Data+Model ''' import yaml from pyon.core.object import ion_serializer, IonObjectDeserializer from pyon.core.registry import IonObjectRegistry from interface.objects import Taxonomy from pyon.util.log import log # Create an IonObjectDeserializer used in the prototype loads method... ior = IonObjectRegistry() ion_deserializer = IonObjectDeserializer(obj_registry=ior) class TaxyTool(object): """ @brief Wraps up a Taxonomy (IONObject) in a class which uses that information Definition of the Taxonomy Ion Resource: Taxonomy: !Extends_InformationResource map: {} The map is a dictionary which contains handles as keys and name sets as values. A name set is a set of objects which can be hashed for inverse lookup and should be serializable for transport and persistence In practice they are strings for nicknames and Taxonomy Description objects for complex definitions