def format(self, data, pretty_format=None): stream = StreamWithCallback(self.output_callback) stream.set_meta('data', data) stream.set_meta('index', 0) formatter = SGMLFormatter() formatter.open_tag('data', info=self.info) stream.set_meta('formatter', formatter) stream.set_meta('remaining', '') return stream
def format(self, data): if self.debug: msglog.log('broadway', msglog.types.DB, 'Format called on %s.' % self.name) stream = StreamWithCallback(self.output_callback) stream.set_meta('data', data) stream.set_meta('index', 0) stream.set_meta('remaining', '') if self.prefix: stream.write(self.prefix) self._write_header(stream) return stream
def format(self, data,pretty_format = None): stream = StreamWithCallback(self.output_callback) stream.set_meta('data',data) stream.set_meta('index',0) formatter = SGMLFormatter() formatter.open_tag('data',info=self.info) stream.set_meta('formatter',formatter) stream.set_meta('remaining', '') return stream
def format(self, data): if self.debug: msglog.log('broadway',msglog.types.DB, 'Format called on %s.' % self.name) stream = StreamWithCallback(self.output_callback) stream.set_meta('data', data) stream.set_meta('index', 0) stream.set_meta('remaining', '') if self.prefix: stream.write(self.prefix + '\n') self._write_header(stream) return stream
def format(self, ws_dicts, pretty_format = None): stream = StreamWithCallback(self.output_callback) stream.set_meta('data', ws_dicts) stream.set_meta('index',0) formatter = SGMLFormatter() # Insert overall open tag, plus top-level data tags: formatter.add_text(_xml._xml_prolog) formatter.open_tag('workout-sessions') formatter.open_tag('msg-time') formatter.add_text(_xml._convert_utc_sec_to_date_time_str(time.time())) formatter.close_tag('msg-time') formatter.open_tag('ics-serial-id') formatter.add_text(str(self._ics_serial_id)) formatter.close_tag('ics-serial-id') stream.set_meta('formatter',formatter) stream.set_meta('remaining', '') return stream
def format(self, data): if self.debug: msglog.log('broadway', msglog.types.DB, 'Format called on %s.' % self.name) stream = StreamWithCallback(self.output_callback) stream.set_meta('data', data) stream.set_meta('index', 0) stream.set_meta('remaining', '') if self.prefix: stream.write(self.prefix + '\n') columns = self.parent.log.get_column_names() stream.set_meta('columns', columns) return stream
def format(self, data): stream = StreamWithCallback(self.output_callback) stream.set_meta('data', data) stream.set_meta('index', 0) stream.set_meta('remaining', '') if self.prefix: stream.write(self.prefix + self.eol) if data: self._write_header(stream) else: columns = self.log.get_column_names() stream.write( string.join(columns, self.header_delimiter) + self.eol) stream.set_meta('columns', columns) return stream
def format(self, data): stream = StreamWithCallback(self.output_callback) stream.set_meta('data', data) stream.set_meta('index', 0) stream.set_meta('remaining', '') if self.prefix: stream.write(self.prefix + self.eol) if data: self._write_header(stream) else: columns = self.log.get_column_names() stream.write(string.join(columns,self.header_delimiter) + self.eol) stream.set_meta('columns',columns) return stream
def format(self, data): if self.debug: msglog.log("broadway", msglog.types.DB, "Format called on %s." % self.name) stream = StreamWithCallback(self.output_callback) stream.set_meta("data", data) stream.set_meta("index", 0) stream.set_meta("remaining", "") if self.prefix: stream.write(self.prefix + "\n") columns = self.parent.log.get_column_names() stream.set_meta("columns", columns) return stream
def format(self, data, pretty_format=None): # Organize all log data (list of time-based dicts) into a dict of # point-based lists. (Dict of lists could get REALLY large; may # need to do only one point at a time... # self._channels:K=col_name,V=col_dict # col_dict:K='column_node':,'values':list_of_2tuples # list_of_2tuples: [(timestamp,value),] # Only want records for preceding self.capture_period-hr period: end_time = time.time() start_time = self.parent.last_time() # ASSUME that parent is a periodic exporter... # Comment out line below, in favor of line above, because FSG tends to # disable their FTP server (effectively) for days at a time, but still # want all the data gathered during those blackout periods to go to the # FTP server when the server reappears with respect to the Mediator. # This change means that the FTP server recvs table-formatted data all # the way back to the last successful export, regardless of the # actual size of that data: #start_time = end_time - (self.capture_period * 3600.0) data_to_send = 0 data = data[:] self.debug_print('Data: %s' % data,None,1) removed_channels = [] for log_rec_dict in data: timestamp = log_rec_dict['timestamp'] if (timestamp < start_time) \ or (timestamp > end_time): continue for channel_name in log_rec_dict.keys(): if channel_name == 'timestamp': continue if not self._channels.has_key(channel_name): if not channel_name in removed_channels: msglog.log('fsg:xml_formatter',msglog.types.ERR, \ 'Channel %s has been removed from the configuration.' \ % channel_name) removed_channels.append(channel_name) continue data_to_send = 1 self._channels[channel_name]['values'].append((timestamp,log_rec_dict[channel_name],)) channel_names = self._channels.keys() # it's a list # Organize all data from exception log, if any: exception_dicts = {} # K:trigger name, V:time-sorted list of 2tuples # (timestamp, message) if not self._exception_log is None: if self._PDO.exception_log_last_time > start_time: start_time = self._PDO.exception_log_last_time + 0.00001 # do not re-send already-sent data exception_data = self._exception_log.get_range('timestamp',start_time,end_time) for log_rec_dict in exception_data: trigger_node_url = log_rec_dict['trigger_node_url'] trigger_node = as_node(trigger_node_url) assert isinstance(trigger_node, FsgComparisonTrigger), \ 'Node %s should be FsgComparisonTrigger, is %s' \ % (trigger_node.name, trigger_node.__class__) timestamp = log_rec_dict['timestamp'] trigger_node_msg = log_rec_dict['trigger_node_msg'] if not exception_dicts.has_key(trigger_node_url): exception_dicts[trigger_node_url] = {'trigger_node_url':trigger_node_url,'timestamps':[(timestamp,trigger_node_msg,)]} else: exception_dicts[trigger_node_url]['timestamps'].append((timestamp,trigger_node_msg,)) self._PDO.exception_log_last_time = timestamp self._PDO.save() if (data_to_send == 0) and (len(exception_dicts) == 0): msglog.log('fsg:xml_formatter',msglog.types.INFO,'No data or exceptions to send.') return None # nothing to send # Create an output stream to minimize the combined size of the XML # file and the remaining point_dicts contents during formatting: stream = StreamWithCallback(self.output_callback) stream.set_meta('channel_names',channel_names) stream.set_meta('exception_data',exception_dicts.values()) # pass in a list of "values" (dicts), to allow easy iteration stream.set_meta('index',0) # number of point time-value lists written to XML output stream formatter = SGMLFormatter() # Write opening tags: formatter.open_tag('data', info=self.location_info, key=self.location_key ) formatter.open_tag('device', info=self.panel_info, key=self.panel_key ) output = formatter.output() self.debug_print(output,None,1) stream.write(output) stream.set_meta('formatter',formatter) stream.set_meta('remaining', '') data_mode = 'channels' if data_to_send == 0: data_mode = 'exceptions' # no data for channels, so skip 'em stream.set_meta('data_mode',data_mode) return stream
def format(self, data): stream = StreamWithCallback(self.output_callback) stream.set_meta('data',data) stream.set_meta('index',0) formatter = SGMLFormatter() stream.set_meta('formatter',formatter) stream.set_meta('remaining', '') # # # stream.write('<?xml version="1.0" encoding="utf-8"?>\n') if self.add_smtp_envelope: # # <Envelope> # formatter.open_tag( 'Envelope', **{ 'xmlns:xsd':'http://www.w3.org/2001/XMLSchema', 'xmlns:xsi':'http://www.w3.org/2001/XMLSchema-instance', } ) # # <Header/> # formatter.open_close_tag( 'Header', xmlns="http://schemas.xmlsoap.org/soap/envelope/" ) # # <Body> # formatter.open_tag( 'Body', xmlns="http://schemas.xmlsoap.org/soap/envelope/" ) # # <Process> # formatter.open_tag( 'Process', xmlns=("http://rddl.xmlinside.net/PowerMeasurement/data/ion/" + "pushmessaging/service/1/") ) # # <Data> # attributes = { 'id':self.data_id(), 'xmlns':( "http://rddl.xmlinside.net/PowerMeasurement/data/ion/" + "pushmessaging/2/" ), 'xmlns:ident':( "http://rddl.xmlinside.net/PowerMeasurement/data/ion/" + "identity/1/" ), } formatter.open_tag('Data', **attributes) # # <ident:GeneratedBy/> # attributes = { 'ident:name':self.generated_by, 'ident:namespace':self.generated_by_namespace, 'ident:dateTime':W3CDateTime().as_string_local(), } if self.generated_by_type: attributes['ident:type'] = self.generated_by_type, if self.generated_by_signature: attributes['ident:signature'] = self.generated_by_signature, formatter.open_close_tag('ident:GeneratedBy', **attributes) # Not required according to Stephan Jones # formatter.open_tag( # 'Configuration', **{ # 'ident:template':self.configuration_template, # 'ident:owner':self.configuration_owner, # 'ident:tag1':self.configuration_tag1, # 'ident:tag2':self.configuration_tag2, # 'ident:revision':self.configuration_revision, # 'xmlns:ident':('http://rddl.xmlinside.net/PowerMeasurement' + # '/data/ion/identity/1/'), # 'xmlns':('http://rddl.xmlinside.net/PowerMeasurement/data' + # '/ion/identity/1/') # } # ) # formatter.open_tag('ident:Timezone') # formatter.open_tag('ident:Bias') # formatter.add_text(self.timezone_bias) # formatter.close_tag('ident:Bias', True) # formatter.open_tag('ident:DSTBias') # formatter.add_text(self.timezone_dst_bias) # formatter.close_tag('ident:DSTBias', True) # formatter.open_tag('ident:DSTStart') # formatter.add_text(self.timezone_dst_start) # formatter.close_tag('ident:DSTStart', True) # formatter.open_tag('ident:DSTEnd') # formatter.add_text(self.timezone_dst_end) # formatter.close_tag('ident:DSTEnd', True) # formatter.close_tag('ident:Timezone') # formatter.close_tag('Configuration') # # <Device> # attributes = { 'ident:name':self.device_name, 'ident:namespace':self.device_namespace, 'ident:type':self.device_type, } if self.device_signature: attributes['ident:signature'] = self.device_signature, formatter.open_tag('Device', **attributes) # # <ident:Configuration> # attributes = {} if self.device_configuration_template: attributes['ident:template'] = self.device_configuration_template if self.device_configuration_owner: attributes['ident:owner'] = self.device_configuration_owner if self.device_configuration_tag1: attributes['ident:tag1'] = self.device_configuration_tag1 if self.device_configuration_tag2: attributes['ident:tag2'] = self.device_configuration_tag2 if self.device_configuration_revision: attributes['ident:revision'] = self.device_configuration_revision formatter.open_tag('ident:Configuration', **attributes) # # <ident:Timezone> # formatter.open_tag('ident:Timezone') # # <ident:Bias> # formatter.open_tag('ident:Bias') formatter.add_text(DST_BIAS.bias_text()) # # </ident:Bias> # formatter.close_tag('ident:Bias', True) dst_bias_text = DST_BIAS.dst_bias_text() if dst_bias_text: # # <ident:DSTBias> # formatter.open_tag('ident:DSTBias') formatter.add_text(dst_bias_text) # # </ident:DSTBias> # formatter.close_tag('ident:DSTBias', True) this_year = time.localtime().tm_year last_year = this_year - 1 for year in (last_year, this_year): dst_range_object = dst_range(year) dst_start = dst_range_object.dst_start_text() dst_end = dst_range_object.dst_end_text() if dst_start: # # <ident:DSTStart> # formatter.open_tag('ident:DSTStart') formatter.add_text(dst_start) # # </ident:DSTStart> # formatter.close_tag('ident:DSTStart', True) if dst_end: # # <ident:DSTEnd> # formatter.open_tag('ident:DSTEnd') formatter.add_text(dst_end) # # </ident:DSTEnd> # formatter.close_tag('ident:DSTEnd', True) # # </ident:Timezone> # formatter.close_tag('ident:Timezone') # # </ident:Configuration> # formatter.close_tag('ident:Configuration') # # <DataRecorder> # attributes = { 'id':self.data_recorder_id, 'label':self.data_recorder_label, } if self.data_recorder_handle: attributes['handle'] = self.data_recorder_handle formatter.open_tag('DataRecorder', **attributes) # # <Channels> # formatter.open_tag('Channels') channel_list = self._channel_map.values() def cmp_channel_list(c1, c2): return cmp(c1.mmafmt_channel_id, c2.mmafmt_channel_id) channel_list.sort(cmp_channel_list) for column_node in channel_list: # # <Channel/> # attributes = { 'id':column_node.mmafmt_channel_id, 'label':column_node.mmafmt_channel_label, } if column_node.mmafmt_channel_pos: attributes['pos'] = column_node.mmafmt_channel_pos formatter.open_close_tag('Channel', **attributes) # # </Channels> # formatter.close_tag('Channels') # # <DataRecords> # formatter.open_tag('DataRecords') return stream
def format(self, data,pretty_format = None): stream = StreamWithCallback(self.output_callback) stream.set_meta('data',data) stream.set_meta('index',0) stream.write(self.prefix + '<?xml version="1.0" encoding="UTF-8"?>') formatter = SGMLFormatter() stream.set_meta('formatter',formatter) stream.set_meta('remaining', '') return stream