def test_variables_atts_ds2_ds3(self): # Convert to a message self.msg1 = dap_tools.ds2dap_msg(self.ds1) # Convert back to a dataset self.ds2 = dap_tools.dap_msg2ds(self.msg1) # Convert back to a message self.msg2 = dap_tools.ds2dap_msg(self.ds2) # Convert back to a dataste self.ds3 = dap_tools.dap_msg2ds(self.msg2) # Test the var_atts self._test_variable_atts(self.ds2, self.ds3)
def test_variables_atts_ds2_ds3(self): # Convert to a message self.msg1 = dap_tools.ds2dap_msg(self.ds1) # Convert back to a dataset self.ds2 = dap_tools.dap_msg2ds(self.msg1) # Convert back to a message self.msg2 = dap_tools.ds2dap_msg(self.ds2) # Convert back to a dataste self.ds3 = dap_tools.dap_msg2ds(self.msg2) # Test the var_atts self._test_variable_atts(self.ds2,self.ds3)
def test_variables_ds1_ds2(self): # Convert to a message self.msg1 = dap_tools.ds2dap_msg(self.ds1) # Convert back to a dataset self.ds2 = dap_tools.dap_msg2ds(self.msg1) # Test teh variables self._test_variables(self.ds1, self.ds2)
def test_global_atts_ds1_ds2(self): # Convert to a message self.msg1 = dap_tools.ds2dap_msg(self.ds1) # Convert back to a dataset self.ds2 = dap_tools.dap_msg2ds(self.msg1) # test the global atts! self._test_global_atts(self.ds1, self.ds2)
def test_global_atts_ds1_ds2(self): # Convert to a message self.msg1 = dap_tools.ds2dap_msg(self.ds1) # Convert back to a dataset self.ds2 = dap_tools.dap_msg2ds(self.msg1) # test the global atts! self._test_global_atts(self.ds1,self.ds2)
def test_variables_ds1_ds2(self): # Convert to a message self.msg1 = dap_tools.ds2dap_msg(self.ds1) # Convert back to a dataset self.ds2 = dap_tools.dap_msg2ds(self.msg1) # Test teh variables self._test_variables(self.ds1,self.ds2)
def op_data(self, content, headers, msg): """ @brief: this method is invoked when data arrives off an AMQP Queue @param content Message object which could be DAP, Dictionary or String @param headers Ignored @param msg Used to route the reply, otherwise ignored """ logging.info(self.__class__.__name__ + ', MSG Received: ' + str(headers)) logging.info(self.__class__.__name__ + '; Calling data process!') # Keep track of how many messages you got self.receive_cnt[headers.get('receiver')] += 1 # Unpack the message and save it to disk! datamessage = dataobject.DataObject.decode(content) if isinstance(datamessage, DAPMessageObject): dataset = dap_tools.dap_msg2ds(datamessage) # Call preserve DAP data fname = self.params['filename'] name = os.path.basename(fname) dataset.name = name path = fname.replace(name, '') logging.info('name: ' + name) logging.info('fname:' + fname) logging.info('path:' + path) retval = self._save_dap_dataset(dataset, path) logging.info("retval from _save_dap_dataset is:" + str(retval)) if retval == 1: raise RuntimeError("Archive file does not exist") if retval == 2: raise RuntimeError("Problem with NCA handler") elif isinstance(datamessage, (DictionaryMessageObject, StringMessageObject)): data = datamessage.data fname = self.params['filename'] logging.info("Writing Dictionary or String to " + fname) f = open(fname, "w") f.write(data) f.close() #Call preserve dict or string #self._save_string_dataset(data,fname=self.params['filename']) else: raise RuntimeError( "Persister Service received an incompatible message.") # Later - these will be sent to a historical log for the dataset... notification = datamessage.notification timestamp = datamessage.timestamp
def op_data(self, content, headers, msg): """ @brief: this method is invoked when data arrives off an AMQP Queue @param content Message object which could be DAP, Dictionary or String @param headers Ignored @param msg Used to route the reply, otherwise ignored """ logging.info(self.__class__.__name__ + ", MSG Received: " + str(headers)) logging.info(self.__class__.__name__ + "; Calling data process!") # Keep track of how many messages you got self.receive_cnt[headers.get("receiver")] += 1 # Unpack the message and save it to disk! datamessage = dataobject.DataObject.decode(content) if isinstance(datamessage, DAPMessageObject): dataset = dap_tools.dap_msg2ds(datamessage) # Call preserve DAP data fname = self.params["filename"] name = os.path.basename(fname) dataset.name = name path = fname.replace(name, "") logging.info("name: " + name) logging.info("fname:" + fname) logging.info("path:" + path) retval = self._save_dap_dataset(dataset, path) logging.info("retval from _save_dap_dataset is:" + str(retval)) if retval == 1: raise RuntimeError("Archive file does not exist") if retval == 2: raise RuntimeError("Problem with NCA handler") elif isinstance(datamessage, (DictionaryMessageObject, StringMessageObject)): data = datamessage.data fname = self.params["filename"] logging.info("Writing Dictionary or String to " + fname) f = open(fname, "w") f.write(data) f.close() # Call preserve dict or string # self._save_string_dataset(data,fname=self.params['filename']) else: raise RuntimeError("Persister Service received an incompatible message.") # Later - these will be sent to a historical log for the dataset... notification = datamessage.notification timestamp = datamessage.timestamp
def op_data(self, content, headers, msg): logging.debug(self.__class__.__name__ +', MSG Received: ' + str(headers)) logging.info(self.__class__.__name__ + '; Calling data process!') # Keep a record of messages received #@Note this could get big! What todo? self.receive_cnt[headers.get('receiver')] += 1 #self.received_msg.append(content) # Do not keep the messages! # Unpack the message and turn it into data datamessage = dataobject.DataObject.decode(content) if isinstance(datamessage, DAPMessageObject): data = dap_tools.dap_msg2ds(datamessage) elif isinstance(datamessage, (StringMessageObject, DictionaryMessageObject)): data = datamessage.data else: data = None notification = datamessage.notification timestamp = datamessage.timestamp # Build the keyword args for ondata args = dict(self.params) args.update(self.deliver) logging.debug('**ARGS to ondata:'+str(args)) yield defer.maybeDeferred(self.ondata, data, notification, timestamp, **args) logging.info(self.__class__.__name__ +"; op_data: Finished data processing") # Is this a consumer with digest delivery? if not self.delivery_interval: # if not send the messages from ondata... yield self.deliver_messages() logging.info(self.__class__.__name__ +"; op_data: Finished sending results") else: # Do the digets thing... if self.interval_cnt.has_key(headers.get('receiver')): self.interval_cnt[headers.get('receiver')] += 1 else: self.interval_cnt[headers.get('receiver')] = 1 logging.debug(self.__class__.__name__ +"; op_data: digest state: \n" + \ "Last Delivered: " +str(self.last_delivered) +";\n" +\ "Loop Running: " +str(self.loop_running)) # First time data has arrived? if self.last_delivered == None: self.last_delivered = pu.currenttime() if not self.loop_running: # Is it already time to go? if self.last_delivered + self.delivery_interval <= pu.currenttime(): yield self.digest() # if data has arrived but it is not yet time to deliver, schedule a call back else: self.loop_running = True delta_t = self.last_delivered + self.delivery_interval - pu.currenttime() logging.debug('Scheduling a call back in %s seconds' % delta_t) #self.loop.start(delta_t) reactor.callLater(delta_t, self.digest)