def _process_gateway_request(resource_id, operation, json_request, requester): if requester is not None: json_request["agentRequest"]["requester"] = requester decoder = IonObjectSerializer() decoded_msg = decoder.serialize(json_request) payload = simplejson.dumps(decoded_msg) response = _agent_gateway_request(resource_id + '/' + operation, payload) if response['data'].has_key(GATEWAY_ERROR): log.error(response['data'][GATEWAY_ERROR][GATEWAY_ERROR_MESSAGE]) #raise BadRequest(response['data'][GATEWAY_ERROR][GATEWAY_ERROR_MESSAGE]) ex_cls = response['data'][GATEWAY_ERROR][GATEWAY_ERROR_EXCEPTION] ex_msg = response['data'][GATEWAY_ERROR][GATEWAY_ERROR_MESSAGE] if hasattr(pyex, ex_cls): raise getattr(pyex, ex_cls)(ex_msg) else: raise Exception(ex_msg) try: if "type_" in response['data'][GATEWAY_RESPONSE]: del response['data'][GATEWAY_RESPONSE]["type_"] except Exception, e: pass
def __init__(self, host=None, port=None, datastore_name='prototype', options="", profile=DataStore.DS_PROFILE.BASIC): log.debug('__init__(host=%s, port=%s, datastore_name=%s, options=%s)', host, port, datastore_name, options) self.host = host or CFG.server.couchdb.host self.port = port or CFG.server.couchdb.port # The scoped name of the datastore self.datastore_name = datastore_name self.auth_str = "" try: if CFG.server.couchdb.username and CFG.server.couchdb.password: self.auth_str = "%s:%s@" % (CFG.server.couchdb.username, CFG.server.couchdb.password) log.debug("Using username:password authentication to connect to datastore") except AttributeError: log.error("CouchDB username:password not configured correctly. Trying anonymous...") connection_str = "http://%s%s:%s" % (self.auth_str, self.host, self.port) #connection_str = "http://%s:%s" % (self.host, self.port) # TODO: Security risk to emit password into log. Remove later. log.info('Connecting to CouchDB server: %s' % connection_str) self.server = couchdb.Server(connection_str) # Datastore specialization (views) self.profile = profile # serializers self._io_serializer = IonObjectSerializer() # TODO: Not nice to have this class depend on ION objects self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry()) self._datastore_cache = {}
def __init__(self, container, datastore_name=""): self.container = container self.datastore_name = datastore_name # Object serialization/deserialization self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def __init__(self, datastore_name='prototype'): self.datastore_name = datastore_name log.debug('Creating in-memory dict of dicts that will simulate data stores') self.root = {} # serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer(obj_registry=get_obj_registry())
def _serialize_port_assigments(self, port_assignments=None): serializer = IonObjectSerializer() serialized_port_assignments = {} if isinstance(port_assignments, dict): for device_id, platform_port in port_assignments.iteritems(): flatpp = serializer.serialize(platform_port) serialized_port_assignments[device_id] = flatpp return serialized_port_assignments
def obj_to_tree(definition): from pyon.core.object import IonObjectSerializer if not isinstance(definition, StreamDefinitionContainer): return serializer = IonObjectSerializer() definition = serializer.serialize(definition) tree = DefinitionTree.traverse(definition, definition['data_stream_id']) return tree
def size(self): ''' Truly poor way to calculate the size of a granule... returns the size in bytes. ''' granule = self.to_granule() serializer = IonObjectSerializer() flat = serializer.serialize(granule) byte_stream = msgpack.packb(flat, default=encode_ion) return len(byte_stream)
def _process_gateway_request(service_name, operation, json_request, requester): if requester is not None: json_request["serviceRequest"]["requester"] = requester decoder = IonObjectSerializer() decoded_msg = decoder.serialize(json_request) payload = simplejson.dumps(decoded_msg) response = _service_gateway_request(service_name + '/' + operation, payload) return response
def _build_stream_config(self): """ """ # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) dataset_management = DatasetManagementServiceClient() encoder = IonObjectSerializer() # Create streams and subscriptions for each stream named in driver. self._stream_config = {} stream_name = 'parsed' param_dict_name = 'ctd_parsed_param_dict' pd_id = dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) stream_def = pubsub_client.read_stream_definition(stream_def_id) stream_def_dict = encoder.serialize(stream_def) pd = stream_def.parameter_dictionary stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict(routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, parameter_dictionary=pd, stream_def_dict=stream_def_dict) self._stream_config[stream_name] = stream_config stream_name = 'raw' param_dict_name = 'ctd_raw_param_dict' pd_id = dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) stream_def = pubsub_client.read_stream_definition(stream_def_id) stream_def_dict = encoder.serialize(stream_def) pd = stream_def.parameter_dictionary stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict(routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, parameter_dictionary=pd, stream_def_dict=stream_def_dict) self._stream_config[stream_name] = stream_config
def __init__(self): BaseIngestionManagementService.__init__(self) xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([bootstrap.get_sys_name(), xp_base]) except ValueError: raise StandardError( 'Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp) self.serializer = IonObjectSerializer() self.process_definition_id = None
def gw_agent_execute_agent(resource_id, cmd, requester=None): agent_cmd_params = IonObjectSerializer().serialize(cmd) agent_execute_request = { "agentRequest": { "agentId": resource_id, "agentOp": "execute_agent", "expiry": 0, "params": { "command": agent_cmd_params } } } ret_values = process_gateway_request(resource_id, "execute_agent", agent_execute_request, requester) ret_obj = IonObject('AgentCommandResult', ret_values) return ret_obj
def recv_packet(self, msg, stream_route, stream_id): validate_is_instance(msg, Granule, 'Incoming packet must be of type granule') cov = self.get_coverage(stream_id) if cov: cov.insert_timesteps(1) if 'raw' in cov.list_parameters(): gran = IonObjectSerializer().serialize(msg) cov.set_parameter_values(param_name='raw', value=[gran]) if 'ingestion_timestamp' in cov.list_parameters(): t_now = time.time() ntp_time = TimeUtils.ts_to_units( cov.get_parameter_context('ingestion_timestamp').uom, t_now) cov.set_parameter_values(param_name='ingestion_timestamp', value=ntp_time) self.dataset_changed(self.get_dataset(stream_id), cov.num_timesteps)
def _gw_execute(self, op, resource_id, cmd, requester=None, timeout=300): agent_cmd_params = IonObjectSerializer().serialize(cmd) agent_execute_request = { "agentRequest": { "agentId": resource_id, "agentOp": op, # "expiry": 0, "timeout": 300, "params": { "timeout": timeout, "command": agent_cmd_params } } } ret_values = _process_gateway_request(resource_id, op, agent_execute_request, requester) ret_obj = IonObject('AgentCommandResult', ret_values) return ret_obj
def __init__(self, datastore_name=None, config=None, scope=None, profile=None): """ @param datastore_name Name of datastore within server. May be scoped to sysname @param config A server config dict with connection params @param scope Prefix for the datastore name (e.g. sysname) to separate multiple systems """ PostgresDataStore.__init__(self, datastore_name=datastore_name, config=config or CFG.get_safe("server.postgresql"), profile=profile or DataStore.DS_PROFILE.BASIC, scope=scope) # IonObject Serializers self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def __init__(self): Interceptor.__init__(self) self._io_serializer = IonObjectSerializer() self._io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry())
def __init__(self, id_factory): self.encoder = IonObjectSerializer() self.decoder = IonObjectDeserializer(obj_registry=get_obj_registry()) self.id_factory = id_factory
def __init__(self): BaseTransformManagementService.__init__(self) self.serializer = IonObjectSerializer()
def test_perf(self): _io_serializer = IonObjectSerializer() _io_deserializer = IonObjectDeserializer( obj_registry=get_obj_registry()) def time_serialize(test_obj, name="?", has_ion=False): with time_it(name + ", serialize"): os = _io_serializer.serialize(test_obj) with time_it(name + ", deserialize"): os2 = _io_deserializer.deserialize(os) count_objs(os) if has_ion: test_obj = os with time_it(name + ", json.dumps"): oj = json.dumps(test_obj) with time_it(name + ", json.loads"): o2 = json.loads(oj) log.info(" len(json): %s", len(oj)) with time_it(name + ", simplejson.dumps"): oj = simplejson.dumps(test_obj) with time_it(name + ", simplejson.loads"): o2 = simplejson.loads(oj) log.info(" len(simplejson): %s", len(oj)) with time_it(name + ", msgpack.packb"): o1 = msgpack.packb(test_obj) with time_it(name + ", msgpack.unpackb"): o2 = msgpack.unpackb(o1, use_list=1) log.info(" len(msgpack): %s", len(o1)) # with time_it(name + ", pickle.dumps"): # op = pickle.dumps(test_obj) # # with time_it(name + ", pickle.loads"): # o2 = pickle.loads(op) # log.info(" len(pickle): %s", len(op)) # # with time_it(name + ", cPickle.dumps"): # op = cPickle.dumps(test_obj) # # with time_it(name + ", cPickle.loads"): # o2 = cPickle.loads(op) # log.info(" len(cPickle): %s", len(op)) log.info("----------------") # Large nested with time_it("highly nested dict/list, create"): test_obj = create_test_object(4, 4, do_list=False, uvals=True, ukeys=True) time_serialize(test_obj, "highly nested dict/list") # Nested with time_it("nested dict/list, create"): test_obj = create_test_object(3, 40, do_list=True, uvals=False, ukeys=False) time_serialize(test_obj, "nested dict/list") # Large string #value = ''.join(random.choice(allowed_chars) for x in xrange(1460000)) value = ''.join(random.choice(allowed_chars) for x in xrange(500000)) time_serialize(value, "long string") # ION with time_it("create ion"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested", has_ion=True) from pyon.core.interceptor.interceptor import Invocation from pyon.core.interceptor.encode import EncodeInterceptor encode = EncodeInterceptor() invocation = Invocation() invocation.message = test_obj1 with time_it("ion object, encode"): encode.outgoing(invocation) with time_it("ion object, decode"): encode.incoming(invocation) count_objs(invocation.message) # ION with time_it("create ion unicode"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=False, uvals=True, ukeys=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested unicode", has_ion=True) # Create objects with validation on with time_it("create ion calidated"): test_obj1 = create_test_object(2, 200, do_ion=True, do_list=False, do_dict=True, obj_validate=True) count_objs(test_obj1) time_serialize(test_obj1, "dict of ion nested validated", has_ion=True)
def _generate_stream_config(self): log.debug("_generate_stream_config for %s", self.agent_instance_obj.name) dsm = self.clients.dataset_management psm = self.clients.pubsub_management agent_obj = self._get_agent() device_obj = self._get_device() streams_dict = {} for stream_cfg in agent_obj.stream_configurations: #create a stream def for each param dict to match against the existing data products streams_dict[stream_cfg.stream_name] = { 'param_dict_name': stream_cfg.parameter_dictionary_name } #retrieve the output products # TODO: What about platforms? other things? device_id = device_obj._id data_product_objs = self.RR2.find_data_products_of_instrument_device_using_has_output_product( device_id) stream_config = {} for d in data_product_objs: stream_def_id = self.RR2.find_stream_definition_id_of_data_product_using_has_stream_definition( d._id) for stream_name, stream_info_dict in streams_dict.items(): # read objects from cache to be compared pdict = self.RR2.find_resource_by_name( RT.ParameterDictionary, stream_info_dict.get('param_dict_name')) stream_def_id = self._find_streamdef_for_dp_and_pdict( d._id, pdict._id) if stream_def_id: #model_param_dict = self.RR2.find_resources_by_name(RT.ParameterDictionary, # stream_info_dict.get('param_dict_name'))[0] #model_param_dict = self._get_param_dict_by_name(stream_info_dict.get('param_dict_name')) #stream_route = self.RR2.read(product_stream_id).stream_route product_stream_id = self.RR2.find_stream_id_of_data_product_using_has_stream( d._id) stream_def = psm.read_stream_definition(stream_def_id) stream_route = psm.read_stream_route( stream_id=product_stream_id) from pyon.core.object import IonObjectSerializer stream_def_dict = IonObjectSerializer().serialize( stream_def) stream_def_dict.pop('type_') if stream_name in stream_config: log.warn("Overwriting stream_config[%s]", stream_name) stream_config[stream_name] = { 'routing_key': stream_route. routing_key, # TODO: Serialize stream_route together 'stream_id': product_stream_id, 'stream_definition_ref': stream_def_id, 'stream_def_dict': stream_def_dict, 'exchange_point': stream_route.exchange_point, # TODO: This is redundant and very large - the param dict is in the stream_def_dict ??? 'parameter_dictionary': stream_def.parameter_dictionary, } log.debug("Stream config generated") log.trace("generate_stream_config: %s", stream_config) return stream_config