def create_test_object(depth=3, breadth=10, do_dict=True, do_list=True, do_ion=False, uvals=False, ukeys=False, restype="Resource", obj_validate=None): def get_value(min_len=5, max_len=10, uni=False): rand_pos = random.randint(0, POOL_SIZE-max_len) key = value_pool[rand_pos:rand_pos+random.randint(min_len, max_len)] if uni and random.random() > 0.5: key += u'\u20ac' return key def get_key(): return get_value(uni=ukeys) def create_test_col(level=0, ot=dict, no_ion=False): if level == 0: return get_value(0, 15, uvals) if ot == dict: res_dict = {} num_kinds = 1 if do_ion and no_ion else (1 if do_dict else 0) + (1 if do_list else 0) for i in xrange(breadth / num_kinds): if do_ion and not no_ion: key = get_key() res_obj = IonObject(restype, name="TestObject %s.%s" % (level, key)) res_obj.addl = create_test_col(level-1, dict, no_ion=True) res_dict[key] = res_obj else: if do_dict: res_dict[get_key()] = create_test_col(level-1, dict) if do_list: res_dict[get_key()] = create_test_col(level-1, list) return res_dict elif ot == list: res_list = [] num_kinds = 1 if do_ion and no_ion else (1 if do_dict else 0) + (1 if do_list else 0) for i in xrange(breadth / num_kinds): if do_ion and not no_ion: res_obj = IonObject(restype, name="TestObject %s.%s" % (level, random.randint(1000, 9999))) res_obj.addl = create_test_col(level-1, dict, no_ion=True) res_list.append(res_obj) else: if do_dict: res_list.append(create_test_col(level-1, dict)) if do_list: res_list.append(create_test_col(level-1, list)) return res_list elif ot == "IonObject": res_obj = IonObject(restype, name="TestObject %s.%s" % (level, random.randint(1000, 9999))) return res_obj if obj_validate is not None: from pyon.core.bootstrap import get_obj_registry old_validate = get_obj_registry().validate_setattr get_obj_registry().validate_setattr = obj_validate test_obj = create_test_col(depth, dict) get_obj_registry().validate_setattr = old_validate else: test_obj = create_test_col(depth, dict) return test_obj
def __init__(self, container, datastore_name=""): self.container = container self.datastore_name = datastore_name # Object serialization/deserialization self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def _construct_stream_and_publisher(self, stream_name, stream_config): if log.isEnabledFor(logging.TRACE): # pragma: no cover log.trace("%r: _construct_stream_and_publisher: " "stream_name:%r, stream_config:\n%s", self._platform_id, stream_name, self._pp.pformat(stream_config)) decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) if 'stream_def_dict' not in stream_config: # should not happen: PlatformAgent._validate_configuration validates this. log.error("'stream_def_dict' key not in configuration for stream %r" % stream_name) return stream_def_dict = stream_config['stream_def_dict'] stream_def_dict['type_'] = 'StreamDefinition' stream_def_obj = decoder.deserialize(stream_def_dict) self._stream_defs[stream_name] = stream_def_obj routing_key = stream_config['routing_key'] stream_id = stream_config['stream_id'] exchange_point = stream_config['exchange_point'] parameter_dictionary = stream_def_dict['parameter_dictionary'] log.debug("%r: got parameter_dictionary from stream_def_dict", self._platform_id) self._data_streams[stream_name] = stream_id self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary) stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key) publisher = self._create_publisher(stream_id, stream_route) self._data_publishers[stream_name] = publisher log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
def test_persisted_version(self): # create an initial version of SampleResource io_serializer = IonObjectSerializer() obj = IonObject('SampleResource', {'num': 9, 'other_field': 'test value'}) obj_dict = io_serializer.serialize(obj,True) self.assertEquals(obj_dict['persisted_version'], 1) # verify that the simulated previous version does not have new_attribute self.assertEquals('new_attribute' in obj_dict, False) # simulate version increment to SampleResource that adds new_attribute obj_dict['type_'] = 'SampleResource_V2' # simulate reading the previous version after version increment io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) obj = io_deserializer.deserialize(obj_dict) # verify that “new_attribute” is added and initialized with default value self.assertEquals(obj.new_attribute['key'], 'value') # verify that old attributes are still there and retain values self.assertEquals(obj.num, 9) # verify that old attributes are still there and retain values self.assertEquals(obj.other_field, 'test value') # verify that persisted_version is not updated at read self.assertEquals(obj_dict['persisted_version'], 1) # simulate update obj_dict = io_serializer.serialize(obj,True) # verify that version is updated self.assertEquals(obj_dict['persisted_version'], 2)
def test_complex_version(self): io_serializer = IonObjectSerializer() obj = IonObject('SampleComplexEvent', {'num': 9, 'other_field': 'test value'}) obj_dict = io_serializer.serialize(obj,True) self.assertEquals(obj_dict['persisted_version'], 1) # simulate a previous version data of SampleComplexEvent_V2 obj_dict['type_'] = 'SampleComplexEvent_V2' # verify that the simulated previous version data has resource self.assertEquals('resource' in obj_dict, True) # verify that the simulated previous version data does not have new_attribute self.assertEquals('new_resource' in obj_dict, False) # simulate reading the previous version that does not have new_attribute io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) obj = io_deserializer.deserialize(obj_dict) # verify that new attribute is added and initialized with default value self.assertEquals(obj.new_resource.new_attribute['key'], 'value') # verify that old attributes are still there self.assertEquals(obj.num, 9) # verify that old attributes are still there self.assertEquals(obj.other_field, 'test value') # verify that on read version is not yet updated self.assertEquals(obj_dict['persisted_version'], 1) # simulate create/update obj_dict = io_serializer.serialize(obj,True) # verify that version is updated self.assertEquals(obj_dict['persisted_version'], 2)
def __init__(self, host=None, port=None, datastore_name='prototype', options="", profile=DataStore.DS_PROFILE.BASIC): log.debug('__init__(host=%s, port=%s, datastore_name=%s, options=%s)', host, port, datastore_name, options) self.host = host or CFG.server.couchdb.host self.port = port or CFG.server.couchdb.port # The scoped name of the datastore self.datastore_name = datastore_name self.auth_str = "" try: if CFG.server.couchdb.username and CFG.server.couchdb.password: self.auth_str = "%s:%s@" % (CFG.server.couchdb.username, CFG.server.couchdb.password) log.debug("Using username:password authentication to connect to datastore") except AttributeError: log.error("CouchDB username:password not configured correctly. Trying anonymous...") connection_str = "http://%s%s:%s" % (self.auth_str, self.host, self.port) #connection_str = "http://%s:%s" % (self.host, self.port) # TODO: Security risk to emit password into log. Remove later. log.info('Connecting to CouchDB server: %s' % connection_str) self.server = couchdb.Server(connection_str) # Datastore specialization (views) self.profile = profile # serializers self._io_serializer = IonObjectSerializer() # TODO: Not nice to have this class depend on ION objects self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) self._datastore_cache = {}
def __init__(self, *args, **kwargs): super(ReplayProcess, self).__init__(*args, **kwargs) self.deserializer = IonObjectDeserializer( obj_registry=get_obj_registry()) self.publishing = Event() self.play = Event() self.end = Event()
def __init__(self, container, datastore_name=""): self.container = container self.datastore_name = datastore_name # Object serialization/deserialization self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def __init__(self, datastore_name='prototype'): self.datastore_name = datastore_name log.debug('Creating in-memory dict of dicts that will simulate data stores') self.root = {} # serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def test_stream_ingestion_worker(self): self.start_ingestion_worker() context_ids, time_ctxt = self._create_param_contexts() pdict_id = self.dataset_management_client.create_parameter_dictionary( name='stream_ingestion_pdict', parameter_context_ids=context_ids, temporal_context='ingestion_timestamp') self.addCleanup( self.dataset_management_client.delete_parameter_dictionary, pdict_id) dataset_id = self.dataset_management_client.create_dataset( name='fake_dataset', description='fake_dataset', stream_id=self.stream_id, spatial_domain=self.spatial_dom.dump(), temporal_domain=self.time_dom.dump(), parameter_dictionary_id=pdict_id) self.addCleanup(self.dataset_management_client.delete_dataset, dataset_id) self.cov = self._create_coverage(dataset_id=dataset_id, parameter_dict_id=pdict_id, time_dom=self.time_dom, spatial_dom=self.spatial_dom) self.addCleanup(self.cov.close) rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['conductivity'] = 1 rdt['pressure'] = 2 rdt['salinity'] = 3 self.start_listener(dataset_id) self.publisher.publish(rdt.to_granule()) self.data_modified = Event() self.data_modified.wait(30) cov = self.get_coverage(dataset_id) self.assertIsNotNone(cov.get_parameter_values('raw')) deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) granule = retrieve_stream(dataset_id) rdt_complex = RecordDictionaryTool.load_from_granule(granule) rdt_complex['raw'] = [ deserializer.deserialize(i) for i in rdt_complex['raw'] ] for gran in rdt_complex['raw']: rdt_new = RecordDictionaryTool.load_from_granule(gran) self.assertIn(1, rdt_new['conductivity']) self.assertIn(2, rdt_new['pressure']) self.assertIn(3, rdt_new['salinity']) cov.close()
def __init__(self, datastore_name=None, profile=None, config=None, scope=None, **kwargs): log.debug('__init__(datastore_name=%s, profile=%s, config=%s)', datastore_name, profile, config) CouchbaseDataStore.__init__(self, datastore_name=datastore_name, config=config or CFG.get_safe("server.couchdb"), profile=profile or DataStore.DS_PROFILE.BASIC, scope=scope) # IonObject Serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def type_defs(ob=None): """Returns object type definitions for object name(s) @param ob name or list of names of object """ from pyon.core.bootstrap import get_obj_registry if ob is not None: print "Type definition for: %s\n" % ob if not getattr(ob, '__iter__', False): ob = (ob,) for o in ob: print get_obj_registry().type_by_name[o] else: print "List of defined objects" print "-----------------------" tnames = sorted(get_obj_registry().type_by_name.keys()) print pprint_list(tnames, -1, 1, 2) print "\nType type_defs('name') or type_defs(['name1','name2']) for definition"
def type_defs(ob=None): """Returns object type definitions for object name(s) @param ob name or list of names of object """ from pyon.core.bootstrap import get_obj_registry if ob is not None: print "Type definition for: %s\n" % ob if not getattr(ob, '__iter__', False): ob = (ob, ) for o in ob: print get_obj_registry().type_by_name[o] else: print "List of defined objects" print "-----------------------" tnames = sorted(get_obj_registry().type_by_name.keys()) print pprint_list(tnames, -1, 1, 2) print "\nType type_defs('name') or type_defs(['name1','name2']) for definition"
def decode_ion(obj): """msgpack object hook to decode granule (numpy) types and IonObjects. This works for nested IonObjects as well""" # NOTE: Just matching on dict with "type_" is a bit weak if "type_" in obj: if "__noion__" in obj: # INTERNAL: Allow dicts to mask as IonObject without being decoded (with all defaults set and validated) obj.pop("__noion__") return obj global obj_registry if obj_registry is None: obj_registry = get_obj_registry() ion_obj = obj_registry.new(obj["type_"]) for k, v in obj.iteritems(): # unicode translate to utf8 # Note: This is not recursive within dicts/list or any other types if isinstance(v, unicode): v = v.encode('utf8') if k != "type_": setattr(ion_obj, k, v) return ion_obj if 't' not in obj: return obj objt = obj['t'] if objt == EncodeTypes.LIST: return list(obj['o']) elif objt == EncodeTypes.NPARRAY: return np.array(obj['o'], dtype=np.dtype(obj['d'])) elif objt == EncodeTypes.COMPLEX: return complex(obj['o'][0], obj['o'][1]) elif objt == EncodeTypes.DTYPE: return np.dtype(obj['o']) elif objt == EncodeTypes.SLICE: return slice(obj['o'][0], obj['o'][1], obj['o'][2]) elif objt == EncodeTypes.SET: return set(obj['o']) elif objt == EncodeTypes.NPVAL: dt = np.dtype(obj['d']) return dt.type(obj['o']) return obj
def test_config(self): """ test_initialize Test agent initialize command. This causes creation of driver process and transition to inactive. """ # We start in uninitialized state. # In this state there is no driver process. state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.UNINITIALIZED) # Ping the agent. retval = self._ia_client.ping_agent() log.info(retval) # Initialize the agent. # The agent is spawned with a driver config, but you can pass one in # optinally with the initialize command. This validates the driver # config, launches a driver process and connects to it via messaging. # If successful, we switch to the inactive state. cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.INACTIVE) # Ping the driver proc. retval = self._ia_client.ping_resource() log.info(retval) decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) # Grab the alarms defined in the config. retval = decoder.deserialize( self._ia_client.get_agent(['alarms'])['alarms']) """ {'status': None, 'stream_name': 'parsed', 'name': 'test_sim_warning', 'upper_bound': 5.0, 'expr': 'x<5.0', 'upper_rel_op': '<', 'lower_rel_op': None, 'type_': 'IntervalAlarmDef', 'value_id': 'temp', 'lower_bound': None, 'message': 'Temperature is above test range of 5.0.', 'current_val': None, 'type': 1} """ self.assertEqual(retval[0].type_, 'IntervalAlarmDef') self.assertEqual(retval[0].upper_bound, 5.0) self.assertEqual(retval[0].expr, 'x<5.0') # Reset the agent. This causes the driver messaging to be stopped, # the driver process to end and switches us back to uninitialized. cmd = AgentCommand(command=ResourceAgentEvent.RESET) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.UNINITIALIZED)
def test_config(self): """ test_initialize Test agent initialize command. This causes creation of driver process and transition to inactive. """ # We start in uninitialized state. # In this state there is no driver process. state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.UNINITIALIZED) # Ping the agent. retval = self._ia_client.ping_agent() log.info(retval) # Initialize the agent. # The agent is spawned with a driver config, but you can pass one in # optinally with the initialize command. This validates the driver # config, launches a driver process and connects to it via messaging. # If successful, we switch to the inactive state. cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.INACTIVE) # Ping the driver proc. retval = self._ia_client.ping_resource() log.info(retval) decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) # Grab the alarms defined in the config. retval = decoder.deserialize(self._ia_client.get_agent(['alarms'])['alarms']) """ {'status': None, 'stream_name': 'parsed', 'name': 'test_sim_warning', 'upper_bound': 5.0, 'expr': 'x<5.0', 'upper_rel_op': '<', 'lower_rel_op': None, 'type_': 'IntervalAlarmDef', 'value_id': 'temp', 'lower_bound': None, 'message': 'Temperature is above test range of 5.0.', 'current_val': None, 'type': 1} """ self.assertEqual(retval[0].type_, 'IntervalAlarmDef') self.assertEqual(retval[0].upper_bound, 5.0) self.assertEqual(retval[0].expr, 'x<5.0') # Reset the agent. This causes the driver messaging to be stopped, # the driver process to end and switches us back to uninitialized. cmd = AgentCommand(command=ResourceAgentEvent.RESET) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.UNINITIALIZED)
def test_complex_version_del_attrib(self): io_serializer = IonObjectSerializer() # verify that extraneous fields given while creating an IonObject raises an error. with self.assertRaises(AttributeError): IonObject('SampleComplexEvent_V2', {'num': 9, 'other_field': 'test value','more_new_resource': {'key':'value'}}) obj = IonObject('SampleComplexEvent_V2', {'num': 9, 'other_field': 'test value','new_resource': {'num': 9, 'other_field': 'test value','new_attribute':{'key':'value'}}}) # create simulated saved data obj_dict = io_serializer.serialize(obj,True) self.assertEquals(obj_dict['persisted_version'], 2) # simulate a next version data of SampleComplexEvent_V2 obj_dict['type_'] = 'SampleComplexEvent_V3' # verify that the simulated previous version data does have new_resource self.assertEquals('new_resource' in obj_dict, True) # note the schema version of new_resource self.assertEquals(obj_dict['new_resource']['persisted_version'], 2) # simulate reading the next version that has a new type of new_resource io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) obj = io_deserializer.deserialize(obj_dict) # verify that new_resource exists self.assertTrue('new_resource' in obj) # however, verify that new_resource does not have new_attribute since type of new_resource has changed self.assertFalse('new_attribute' in obj.new_resource) # verify that the new type of new_resource has another_new_attribute that is initialized to default data self.assertEquals(obj.new_resource.another_new_attribute['key'], 'new_value') # verify on read that the schema version of new_resource replaces the old persisted_version self.assertEquals(obj.new_resource.persisted_version, 3) # verify that old attributes values of new_resource have been thrown away self.assertNotEquals(obj.new_resource.num, 9) # verify that attributes values of new_resource have been initialized to default values self.assertEquals(obj.new_resource.num, 0) # However, verify that old attributes of the resource (SampleComplexEvent) are still there self.assertEquals(obj.num, 9) # verify that old attributes are still there self.assertEquals(obj.other_field, 'test value') # verify that on read, version is not yet updated self.assertEquals(obj.persisted_version, 2) # simulate create/update obj_dict = io_serializer.serialize(obj,True) # verify that version is updated self.assertEquals(obj_dict['persisted_version'], 3) # verify that version is updated fo the subsumed object self.assertEquals(obj_dict['new_resource']['persisted_version'], 3)
def test_attribute_version(self): io_serializer = IonObjectSerializer() # verify that extraneous fields given while creating an IonObject raises an error. with self.assertRaises(AttributeError): IonObject('SampleComplexEvent_V2', {'num': 9, 'other_field': 'test value','more_new_resource': {'key':'value'}}) obj = IonObject('SampleComplexEvent_V2', {'num': 9, 'other_field': 'test value','new_resource': {'num': 9, 'other_field': 'test value','new_attribute':{'key':'value'}}}) obj_dict = io_serializer.serialize(obj,True) self.assertEquals(obj_dict['persisted_version'], 2) # verify that the simulated previous version data does have new_resource self.assertEquals('new_resource' in obj_dict, True) # verify that the new_resource has type SampleResource_V2 self.assertEquals(obj_dict['new_resource']['type_'],"SampleResource_V2") # set type to SampleComplexEvent_V3 obj_dict['type_']="SampleComplexEvent_V3" obj_dict['persisted_version']=3 # set new_resource's type to SampleResource_V3 # so we pretend that version, not the type, of the attribute has been changed obj_dict['new_resource']['type_']="SampleResource_V3" # simulate reading SampleComplexEvent_V3 after a new version of new_resource has been introduced io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) obj = io_deserializer.deserialize(obj_dict) # verify that new resource is not deleted self.assertTrue('new_resource' in obj) # verify that new resource does not have new_attribute self.assertFalse('new_attribute' in obj.new_resource) # verify that the next version of new_resource has default data in the another_new_attribute self.assertEquals(obj.new_resource.another_new_attribute['key'], 'new_value') # verify that old attributes values of new_resource have not been thrown away self.assertEquals(obj.new_resource.num, 9) # verify that values from old attributes of SampleComplexEvent_V2 are still there self.assertEquals(obj.num, 9) self.assertEquals(obj.other_field, 'test value') # verify that on read version is not yet updated for the subsumed object self.assertEquals(obj.new_resource.persisted_version, 2) # simulate create/update obj_dict = io_serializer.serialize(obj,True) # verify that versions are unchanged self.assertEquals(obj_dict['persisted_version'], 3) # verify that versions are updated in the subsumed object self.assertEquals(obj_dict['new_resource']['persisted_version'], 3)
def __init__(self, datastore_name=None, config=None, scope=None, profile=None): """ @param datastore_name Name of datastore within server. May be scoped to sysname @param config A server config dict with connection params @param scope Prefix for the datastore name (e.g. sysname) to separate multiple systems """ PostgresDataStore.__init__(self, datastore_name=datastore_name, config=config or CFG.get_safe("server.postgresql"), profile=profile or DataStore.DS_PROFILE.BASIC, scope=scope) # IonObject Serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def _results_from_response(self, response, id_only): deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) if not (response.has_key('hits') and response['hits'].has_key('hits')): return [] hits = response['hits']['hits'] if len(hits) > 0: if len(hits) >= SEARCH_BUFFER_SIZE: log.warning("Query results exceeded search buffer limitations") self.raise_search_buffer_exceeded() if id_only: return [str(i['_id']) for i in hits] results = map(deserializer.deserialize,hits) return results else: return []
def _results_from_response(self, response, id_only): deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) if not (response.has_key('hits') and response['hits'].has_key('hits')): return [] hits = response['hits']['hits'] if len(hits) > 0: if len(hits) >= SEARCH_BUFFER_SIZE: log.warning("Query results exceeded search buffer limitations") self.raise_search_buffer_exceeded() if id_only: return [str(i['_id']) for i in hits] results = map(deserializer.deserialize, hits) return results else: return []
def test_stream_ingestion_worker(self): self.start_ingestion_worker() context_ids, time_ctxt = self._create_param_contexts() pdict_id = self.dataset_management_client.create_parameter_dictionary(name='stream_ingestion_pdict', parameter_context_ids=context_ids, temporal_context='ingestion_timestamp') self.addCleanup(self.dataset_management_client.delete_parameter_dictionary, pdict_id) dataset_id = self.dataset_management_client.create_dataset(name='fake_dataset', description='fake_dataset', stream_id=self.stream_id, parameter_dictionary_id=pdict_id) self.addCleanup(self.dataset_management_client.delete_dataset, dataset_id) self.cov = self._create_coverage(dataset_id=dataset_id, parameter_dict_id=pdict_id, time_dom=self.time_dom, spatial_dom=self.spatial_dom) self.addCleanup(self.cov.close) rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['conductivity'] = 1 rdt['pressure'] = 2 rdt['salinity'] = 3 self.start_listener(dataset_id) self.publisher.publish(rdt.to_granule()) self.data_modified = Event() self.data_modified.wait(30) cov = self.get_coverage(dataset_id) self.assertIsNotNone(cov.get_parameter_values('raw')) deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) granule = retrieve_stream(dataset_id) rdt_complex = RecordDictionaryTool.load_from_granule(granule) rdt_complex['raw'] = [deserializer.deserialize(i) for i in rdt_complex['raw']] for gran in rdt_complex['raw']: rdt_new = RecordDictionaryTool.load_from_granule(gran) self.assertIn(1, rdt_new['conductivity']) self.assertIn(2, rdt_new['pressure']) self.assertIn(3, rdt_new['salinity']) cov.close()
def test_version_del_attrib(self): io_serializer = IonObjectSerializer() # verify that extraneous fields given while creating an IonObject raises an error. with self.assertRaises(AttributeError): IonObject('SampleResource_V2', {'num': 9, 'other_field': 'test value','more_new_attribute': {'key':'value'}}) # simulate creating a version 2 of SampleResource that has "new_attribute" obj = IonObject('SampleResource_V2', {'num': 9, 'other_field': 'test value','new_attribute': {'key':'value'}}) obj_dict = io_serializer.serialize(obj,True) # verify that version is 2 self.assertEquals(obj_dict['persisted_version'], 2) # verify that the simulated version 2 data does have new_attribute self.assertEquals('new_attribute' in obj_dict, True) # simulate incrementing to version 3 that does not have "new_attribute" obj_dict['type_'] = 'SampleResource_V3' # simulate reading after version increment to 3 io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) obj = io_deserializer.deserialize(obj_dict) # verify that new attribute is deleted self.assertFalse('new_attribute' in obj) # verify that the simulated next version data does have more_new_attribute self.assertEquals(obj.another_new_attribute['key'], 'new_value') # verify that old attributes are still there and retain their data self.assertEquals(obj.num, 9) # verify that old attributes are still there and retain their data self.assertEquals(obj.other_field, 'test value') # verify that persisted_version is not yet updated i.e. it is still 2 self.assertEquals(obj_dict['persisted_version'], 2) # simulate update obj_dict = io_serializer.serialize(obj,True) # verify that version is updated self.assertEquals(obj_dict['persisted_version'], 3)
def _construct_streams(self, stream_info): decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) for (stream_name, config) in stream_info.iteritems(): try: if config.has_key('stream_def_dict'): stream_def_dict = config['stream_def_dict'] stream_def_dict['type_'] = 'StreamDefinition' stream_def_obj = decoder.deserialize(stream_def_dict) self._stream_defs[stream_name] = stream_def_obj rdt = RecordDictionaryTool(stream_definition=stream_def_obj) else: stream_def = config['stream_definition_ref'] self._stream_defs[stream_name] = stream_def rdt = RecordDictionaryTool(stream_definition_id=stream_def) self._agent.aparam_streams[stream_name] = rdt.fields self._agent.aparam_pubrate[stream_name] = 0 except Exception as e: errmsg = 'Instrument agent %s' % self._agent._proc_name errmsg += 'error constructing stream %s. ' % stream_name errmsg += str(e) log.error(errmsg) self._agent.aparam_set_pubrate = self.aparam_set_pubrate
def __init__(self, datastore_name=None, config=None, scope=None, profile=None): """ @param datastore_name Name of datastore within server. May be scoped to sysname @param config A server config dict with connection params @param scope Prefix for the datastore name (e.g. sysname) to separate multiple systems """ PostgresDataStore.__init__(self, datastore_name=datastore_name, config=config or CFG.get_safe("server.postgresql"), profile=profile or DataStore.DS_PROFILE.BASIC, scope=scope) # IonObject Serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def _construct_stream_and_publisher(self, stream_name, stream_config): # granule_publish_rate # records_per_granule if log.isEnabledFor(logging.TRACE): # pragma: no cover log.trace("%r: _construct_stream_and_publisher: " "stream_name:%r, stream_config:\n%s", self._platform_id, stream_name, self._pp.pformat(stream_config)) decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) if 'stream_def_dict' in stream_config: stream_def_dict = stream_config['stream_def_dict'] stream_def_dict['type_'] = 'StreamDefinition' stream_def_obj = decoder.deserialize(stream_def_dict) self._stream_defs[stream_name] = stream_def_obj log.debug("%r: using stream_def_dict", self._platform_id) else: # TODO this case to be removed. stream_definition_ref = stream_config['stream_definition_ref'] self._stream_defs[stream_name] = stream_definition_ref log.debug("%r: using stream_definition_ref", self._platform_id) routing_key = stream_config['routing_key'] stream_id = stream_config['stream_id'] exchange_point = stream_config['exchange_point'] parameter_dictionary = stream_config['parameter_dictionary'] self._data_streams[stream_name] = stream_id self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary) stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key) publisher = self._create_publisher(stream_id, stream_route) self._data_publishers[stream_name] = publisher log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
def test_event_version_del_attrib(self): io_serializer = IonObjectSerializer() # verify that extraneous fields given while creating an IonObject raises an error. with self.assertRaises(AttributeError): IonObject('SampleEvent_V2', {'num': 9, 'other_field': 'test value','more_new_attribute': {'key':'value'}}) obj = IonObject('SampleEvent_V2', {'num': 9, 'other_field': 'test value','new_attribute': {'key':'value'}}) obj_dict = io_serializer.serialize(obj,True) self.assertEquals(obj_dict['persisted_version'], 2) # simulate a next version data of SampleEvent_V2 obj_dict['type_'] = 'SampleEvent_V3' # verify that the simulated previous version data does have new_attribute self.assertEquals('new_attribute' in obj_dict, True) # simulate reading the next version that does not have new_attribute io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) obj = io_deserializer.deserialize(obj_dict) # verify that new attribute is deleted self.assertFalse('new_attribute' in obj) # verify that the simulated next version data does have more_new_attribute self.assertEquals(obj.another_new_attribute['key'], 'new_value') # verify that old attributes are still there self.assertEquals(obj.num, 9) # verify that old attributes are still there self.assertEquals(obj.other_field, 'test value') # verify that on read version is not yet updated self.assertEquals(obj_dict['persisted_version'], 2) # simulate create/update obj_dict = io_serializer.serialize(obj,True) # verify that version is updated self.assertEquals(obj_dict['persisted_version'], 3)
def __init__(self): Interceptor.__init__(self) self._io_serializer = IonObjectBlameSerializer() self._io_deserializer = IonObjectBlameDeserializer(obj_registry=get_obj_registry())
def __init__(self, id_factory): self.encoder = IonObjectSerializer() self.decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) self.id_factory = id_factory
def test_perf(self): _io_serializer = IonObjectSerializer() _io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry()) def time_serialize(test_obj, name="?", has_ion=False): with time_it(name + ", serialize"): os = _io_serializer.serialize(test_obj) with time_it(name + ", deserialize"): os2 = _io_deserializer.deserialize(os) count_objs(os) if has_ion: test_obj = os with time_it(name + ", json.dumps"): oj = json.dumps(test_obj) with time_it(name + ", json.loads"): o2 = json.loads(oj) log.info(" len(json): %s", len(oj)) with time_it(name + ", simplejson.dumps"): oj = simplejson.dumps(test_obj) with time_it(name + ", simplejson.loads"): o2 = simplejson.loads(oj) log.info(" len(simplejson): %s", len(oj)) with time_it(name + ", msgpack.packb"): o1 = msgpack.packb(test_obj) with time_it(name + ", msgpack.unpackb"): o2 = msgpack.unpackb(o1, use_list=1) log.info(" len(msgpack): %s", len(o1)) # with time_it(name + ", pickle.dumps"): # op = pickle.dumps(test_obj) # # with time_it(name + ", pickle.loads"): # o2 = pickle.loads(op) # log.info(" len(pickle): %s", len(op)) # # with time_it(name + ", cPickle.dumps"): # op = cPickle.dumps(test_obj) # # with time_it(name + ", cPickle.loads"): # o2 = cPickle.loads(op) # log.info(" len(cPickle): %s", len(op)) log.info("----------------") # Large nested with time_it("highly nested dict/list, create"): test_obj = create_test_object(4, 4, do_list=False, uvals=True, ukeys=True) time_serialize(test_obj, "highly nested dict/list") # Nested with time_it("nested dict/list, create"): test_obj = create_test_object(3, 40, do_list=True, uvals=False, ukeys=False) time_serialize(test_obj, "nested dict/list") # Large string #value = ''.join(random.choice(allowed_chars) for x in xrange(1460000)) value = ''.join(random.choice(allowed_chars) for x in xrange(500000)) time_serialize(value, "long string") # ION with time_it("create ion"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested", has_ion=True) from pyon.core.interceptor.interceptor import Invocation from pyon.core.interceptor.encode import EncodeInterceptor encode = EncodeInterceptor() invocation = Invocation() invocation.message = test_obj1 with time_it("ion object, encode"): encode.outgoing(invocation) with time_it("ion object, decode"): encode.incoming(invocation) count_objs(invocation.message) # ION with time_it("create ion unicode"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False, uvals=True, ukeys=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested unicode", has_ion=True) # Create objects with validation on with time_it("create ion calidated"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested validated", has_ion=True)
def __init__(self, *args, **kwargs): super(ReplayProcess,self).__init__(*args,**kwargs) self.deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def test_perf(self): _io_serializer = IonObjectSerializer() _io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) def time_serialize(test_obj, name="?", has_ion=False): with time_it(name + ", serialize"): os = _io_serializer.serialize(test_obj) with time_it(name + ", deserialize"): os2 = _io_deserializer.deserialize(os) count_objs(os) if has_ion: test_obj = os with time_it(name + ", json.dumps"): oj = json.dumps(test_obj) with time_it(name + ", json.loads"): o2 = json.loads(oj) log.info(" len(json): %s", len(oj)) with time_it(name + ", simplejson.dumps"): oj = simplejson.dumps(test_obj) with time_it(name + ", simplejson.loads"): o2 = simplejson.loads(oj) log.info(" len(simplejson): %s", len(oj)) with time_it(name + ", msgpack.packb"): o1 = msgpack.packb(test_obj) with time_it(name + ", msgpack.unpackb"): o2 = msgpack.unpackb(o1, use_list=1) log.info(" len(msgpack): %s", len(o1)) # with time_it(name + ", pickle.dumps"): # op = pickle.dumps(test_obj) # # with time_it(name + ", pickle.loads"): # o2 = pickle.loads(op) # log.info(" len(pickle): %s", len(op)) # # with time_it(name + ", cPickle.dumps"): # op = cPickle.dumps(test_obj) # # with time_it(name + ", cPickle.loads"): # o2 = cPickle.loads(op) # log.info(" len(cPickle): %s", len(op)) log.info("----------------") # Large nested with time_it("highly nested dict/list, create"): test_obj = create_test_object(4, 4, do_list=False, uvals=True, ukeys=True) time_serialize(test_obj, "highly nested dict/list") # Nested with time_it("nested dict/list, create"): test_obj = create_test_object(3, 40, do_list=True, uvals=False, ukeys=False) time_serialize(test_obj, "nested dict/list") # Large string #value = ''.join(random.choice(allowed_chars) for x in xrange(1460000)) value = ''.join(random.choice(allowed_chars) for x in xrange(500000)) time_serialize(value, "long string") # ION with time_it("create ion"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested", has_ion=True) from pyon.core.interceptor.interceptor import Invocation from pyon.core.interceptor.encode import EncodeInterceptor encode = EncodeInterceptor() invocation = Invocation() invocation.message = test_obj1 with time_it("ion object, encode"): encode.outgoing(invocation) with time_it("ion object, decode"): encode.incoming(invocation) count_objs(invocation.message) # ION with time_it("create ion unicode"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False, uvals=True, ukeys=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested unicode", has_ion=True) # Create objects with validation on with time_it("create ion calidated"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested validated", has_ion=True)
def __init__(self): Interceptor.__init__(self) self._io_serializer = IonObjectBlameSerializer() self._io_deserializer = IonObjectBlameDeserializer( obj_registry=get_obj_registry())
def __init__(self, *args, **kwargs): super(ReplayProcess,self).__init__(*args,**kwargs) self.deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) self.publishing = Event() self.play = Event() self.end = Event()
def create_test_object(depth=3, breadth=10, do_dict=True, do_list=True, do_ion=False, uvals=False, ukeys=False, restype="Resource", obj_validate=None): def get_value(min_len=5, max_len=10, uni=False): rand_pos = random.randint(0, POOL_SIZE - max_len) key = value_pool[rand_pos:rand_pos + random.randint(min_len, max_len)] if uni and random.random() > 0.5: key += u'\u20ac' return key def get_key(): return get_value(uni=ukeys) def create_test_col(level=0, ot=dict, no_ion=False): if level == 0: return get_value(0, 15, uvals) if ot == dict: res_dict = {} num_kinds = 1 if do_ion and no_ion else (1 if do_dict else 0) + ( 1 if do_list else 0) for i in xrange(breadth / num_kinds): if do_ion and not no_ion: key = get_key() res_obj = IonObject(restype, name="TestObject %s.%s" % (level, key)) res_obj.addl = create_test_col(level - 1, dict, no_ion=True) res_dict[key] = res_obj else: if do_dict: res_dict[get_key()] = create_test_col(level - 1, dict) if do_list: res_dict[get_key()] = create_test_col(level - 1, list) return res_dict elif ot == list: res_list = [] num_kinds = 1 if do_ion and no_ion else (1 if do_dict else 0) + ( 1 if do_list else 0) for i in xrange(breadth / num_kinds): if do_ion and not no_ion: res_obj = IonObject(restype, name="TestObject %s.%s" % (level, random.randint(1000, 9999))) res_obj.addl = create_test_col(level - 1, dict, no_ion=True) res_list.append(res_obj) else: if do_dict: res_list.append(create_test_col(level - 1, dict)) if do_list: res_list.append(create_test_col(level - 1, list)) return res_list elif ot == "IonObject": res_obj = IonObject(restype, name="TestObject %s.%s" % (level, random.randint(1000, 9999))) return res_obj if obj_validate is not None: from pyon.core.bootstrap import get_obj_registry old_validate = get_obj_registry().validate_setattr get_obj_registry().validate_setattr = obj_validate test_obj = create_test_col(depth, dict) get_obj_registry().validate_setattr = old_validate else: test_obj = create_test_col(depth, dict) return test_obj