def output_callback(self, stream): ws_dicts = stream.get_meta_value('data') index = stream.get_meta_value('index') formatter = stream.get_meta_value('formatter') remaining = stream.get_meta_value('remaining') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining',remaining) if remaining: return None for i in range(index, index + 10): try: ws_dict = ws_dicts[i] except IndexError: formatter.close_tag('workout-sessions') stream.write(formatter.output()) stream.close() msglog.log('mpx',msglog.types.INFO,'Sent %u workout sessions to InSite server.' \ % len(ws_dicts)) return None if not ws_dict.has_key('timestamp'): raise EIncompatiableFormat() ws = _xml.WorkoutSession(ws_dict) ws.get_xml(formatter) output = formatter.output() count = stream.write(output) stream.set_meta('index', i + 1) if count != len(output): stream.set_meta('remaining',output[count:]) return None return None
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') remaining = stream.get_meta_value('remaining') columns = self.parent.log.get_column_names() if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining', remaining) if remaining: return for i in range(index, index + 10): try: entry = data[i] except IndexError: stream.write(self.suffix) stream.close() return if not entry.has_key('timestamp'): raise EIncompatiableFormat() if len(entry.keys()) != len(columns): if i != index: # Raise exception next time so all data read. return stream.write(self.suffix) raise EBreakupTransfer(entry, 'Different number of columns') ts = self.parent.time_function(entry['timestamp']) timestamp = time.strftime(self.timestamp_format, ts) # hack - strftime does not provide millisecond support ms = int((entry['timestamp'] * 1000) % 1000) utc_offset = time.strftime('%z', ts) timestamp = timestamp % (ms, utc_offset[0:3], utc_offset[3:]) strbuff = '' for column in columns[1:]: if not entry.has_key(column): if i != index: return raise EBreakupTransfer(entry, 'Different columns') value = str(entry[column]) strbuff += self._build_buff(column, value, timestamp) count = stream.write(strbuff) stream.set_meta('index', i + 1) if count != len(strbuff): if debug: msg = 'enernoc_formatter, data remains (%d,%d)' %\ (count, len(strbuff)) msglog.log('broadway', msglog.types.INFO, msg) stream.set_meta('remaining', strbuff[count:]) return return
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') remaining = stream.get_meta_value('remaining') columns = stream.get_meta_value('columns') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining', remaining) if remaining: return for i in range(index, index + 10): try: entry = data[i] except IndexError: stream.write(self.suffix) stream.close() return if not entry.has_key('timestamp'): raise EIncompatiableFormat() if len(entry.keys()) != len(columns): if i != index: # Raise exception next time so all data read. return stream.write(self.suffix) raise EBreakupTransfer(entry, 'Different number of columns') ts_utc = entry['timestamp'] ts_tuple_utc = time.gmtime(ts_utc) ts_str_utc = time.strftime(self.timestamp_format, ts_tuple_utc) values = [self._calc_local_strftime(ts_utc)] for column in columns[1:]: if not entry.has_key(column): if i != index: return raise EBreakupTransfer(entry, 'Different columns') values.append(str(entry[column])) # values.append(time_stamp) entry = "" for value in values: entry = entry + '"' + value + '"' + self.data_delimiter entry = entry + self.eol count = stream.write(entry) stream.set_meta('index', i + 1) if count != len(entry): stream.set_meta('remaining', entry[count:]) return return None
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') remaining = stream.get_meta_value('remaining') columns = stream.get_meta_value('columns') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining', remaining) if remaining: return for i in range(index, index + 10): try: entry = data[i] except IndexError: stream.close() return if not entry.has_key('timestamp'): raise EIncompatiableFormat() if len(entry.keys()) != len(columns): if i != index: # Raise exception next time so all data read. return raise EBreakupTransfer(entry, 'Different number of columns') ts = self.parent.time_function(entry['timestamp']) date = time.strftime(self.date_format, ts) timestamp = time.strftime(self.time_format, ts) values = [date, timestamp] for column in columns[1:]: if not entry.has_key(column): if i != index: return raise EBreakupTransfer(entry, 'Different columns') if entry[column] is None: values.append('None') else: values.append(self.value_format % entry[column]) if (self.max_value_length > 0 and len(values[-1]) > self.max_value_length): msglog.log('EWebConnect Log Formatter', msglog.types.WARN, 'Len of %s > max_value_length' % values[-1]) line = string.join(values, ',') + '\n' count = stream.write(line) stream.set_meta('index', i + 1) if count != len(line): stream.set_meta('remaining', line[count:]) return return None
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') formatter = stream.get_meta_value('formatter') remaining = stream.get_meta_value('remaining') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining',remaining) if remaining: return None if index == 0: formatter.open_tag('root') for i in range(index,index+10): try: entry = data[i] except IndexError: formatter.close_tag('root') stream.write(formatter.output(self.pretty_format)) stream.write(self.suffix) stream.close() return None if not entry.has_key('timestamp'): raise EIncompatiableFormat() ts = time.strftime(self.timestamp_format, time.gmtime(entry['timestamp'])) del(entry['timestamp']) formatter.open_tag('Tbl_analog_data', STA_StationCode=self.station_code) formatter.add_attribute('STA_TimeBase',self.time_base) formatter.add_attribute('Date_Time',ts) for x in range(0,len(entry.keys())): value = entry['Value%s' % (x+1)] status = getattr(self, 'status%s' % (x+1)) formatter.add_attribute('Value%s' % (x+1),value) formatter.add_attribute('Status%s' % (x+1),status) formatter.close_tag('Tbl_analog_data') output = formatter.output(self.pretty_format) count = stream.write(output) stream.set_meta('index',index+i+1) if count != len(output): stream.set_meta('remaining',output[count:]) return None return None
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') remaining = stream.get_meta_value('remaining') columns = stream.get_meta_value('columns') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining', remaining) if remaining: return for i in range(index, index + 10): try: entry = data[i] except IndexError: stream.write(self.suffix) stream.close() return if not entry.has_key('timestamp'): raise EIncompatiableFormat() if len(entry.keys()) != len(columns): if i != index: # Raise exception next time so all data read. return stream.write(self.suffix) raise EBreakupTransfer(entry, 'Different number of columns') ts = self.parent.time_function(entry['timestamp']) timestamp = time.strftime(self.timestamp_format, ts) values = [timestamp] for column in columns[1:]: if not entry.has_key(column): if i != index: return raise EBreakupTransfer(entry, 'Different columns') values.append(str(entry[column])) entry = string.join(values, self.data_delimiter) + '\n' count = stream.write(entry) stream.set_meta('index', i + 1) if count != len(entry): stream.set_meta('remaining', entry[count:]) return return None
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') formatter = stream.get_meta_value('formatter') remaining = stream.get_meta_value('remaining') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining', remaining) if remaining: return None for i in range(index, index + 10): try: entry = data[i] except IndexError: formatter.close_tag('data') stream.write(formatter.output()) stream.close() return None if not entry.has_key('timestamp'): raise EIncompatiableFormat() ts = self.parent.time_function(entry['timestamp']) ts = time.strftime(self.timestamp_format, ts) del (entry['timestamp']) formatter.open_tag('entry', timestamp=ts) for key, value in entry.items(): formatter.open_tag('value', name=key) formatter.add_text(str(value)) formatter.close_tag('value') formatter.close_tag('entry') output = formatter.output() count = stream.write(output) stream.set_meta('index', i + 1) if count != len(output): stream.set_meta('remaining', output[count:]) return None return None
def output_callback(self, stream): data = stream.get_meta_value('data') index = stream.get_meta_value('index') formatter = stream.get_meta_value('formatter') remaining = stream.get_meta_value('remaining') if remaining: remaining = remaining[stream.write(remaining):] stream.set_meta('remaining',remaining) if remaining: return None for i in range(index,index+10): try: entry = data[i] except IndexError: # # </DataRecords> # formatter.close_tag('DataRecords') # # </DataRecorder> # formatter.close_tag('DataRecorder') # # </Device> # formatter.close_tag('Device') # # </Data> # formatter.close_tag('Data') if self.add_smtp_envelope: # # </Process> # formatter.close_tag('Process') # # </Body> # formatter.close_tag('Body') # # </Envelope> # formatter.close_tag('Envelope') stream.write(formatter.output()) stream.close() return None if not entry.has_key('timestamp'): raise EIncompatiableFormat() ts = W3CDateTime(entry['timestamp']) if self.parent.gm_time: ts = ts.as_string_utc() else: ts = ts.as_string_local() del(entry['timestamp']) attribute_pairs=[("ts",ts),] channel_pairs=[] keys = entry.keys() for key in keys: value = entry[key] if value is None: # Do not include error values in the DR's attributes. continue if not self._channel_map.has_key(key): # Survive deleted columns. continue channel_pairs.append( (self._channel_map[key].mmafmt_channel_id,value) ) channel_pairs.sort() attribute_pairs.extend(channel_pairs) formatter.open_close_tag_ex('DR', attribute_pairs) output = formatter.output() count = stream.write(output) stream.set_meta('index',i+1) if count != len(output): stream.set_meta('remaining',output[count:]) return None return None