def get_pywws_data(dir_data): ydat = {'temp_out_max_hi':float('-inf'),'temp_out_min_lo':float('inf'), 'hum_out_max':float('-inf'),'hum_out_min':float('inf'),'rel_pressure_max':float('-inf'), 'rel_pressure_min':float('inf'),'wind_gust':float('-inf'),'rain':0.0, 'temp_in_max_hi':float('-inf'),'temp_in_min_lo':float('inf'),'hum_in_max':float('-inf'), 'hum_in_min':float('inf')} ahora = datetime.utcnow() try: dat = DataStore.calib_store(dir_data) adat = dat[dat.nearest(ahora)] gdat = dat[dat.nearest(ahora)-timedelta(hours=24):] dat = DataStore.hourly_store(dir_data) hdat = dat[dat.nearest(ahora)-timedelta(hours=24):] dat = DataStore.daily_store(dir_data) ddat = dat[dat.nearest(ahora)] dat = DataStore.monthly_store(dir_data) mdat = dat[dat.nearest(ahora)] f1=datetime(ahora.year,1,1,0,0,0)#primer momento del año for d in dat[dat.after(f1):]: for k in ydat.keys(): if 'min' in k: ydat[k] = min(ydat[k],d[k]) elif 'rain' in k: ydat[k] = ydat[k]+d[k] else: ydat[k] = max(ydat[k],d[k]) except: return None return {'a':adat,'g':gdat,'h':hdat,'d':ddat,'m':mdat,'y':ydat}
def CheckFixedBlock(ws, params, status, logger): fixed_block = ws.get_fixed_block(unbuffered=True) # check clocks try: s_time = DataStore.safestrptime( fixed_block['date_time'], '%Y-%m-%d %H:%M') except Exception: s_time = None if s_time: c_time = datetime.now().replace(second=0, microsecond=0) diff = abs(s_time - c_time) if diff > timedelta(minutes=2): logger.warning( "Computer and weather station clocks disagree by %s (H:M:S).", str(diff)) # store weather station type params.set('config', 'ws type', ws.ws_type) # store info from fixed block pressure_offset = fixed_block['rel_pressure'] - fixed_block['abs_pressure'] old_offset = eval(status.get('fixed', 'pressure offset', 'None')) if old_offset and abs(old_offset - pressure_offset) > 0.01: # re-read fixed block, as can get incorrect values logger.warning('Re-read fixed block') fixed_block = ws.get_fixed_block(unbuffered=True) if not fixed_block: return None pressure_offset = fixed_block['rel_pressure'] - fixed_block['abs_pressure'] if old_offset and abs(old_offset - pressure_offset) > 0.01: logger.warning( 'Pressure offset change: %g -> %g', old_offset, pressure_offset) params.unset('fixed', 'pressure offset') params.unset('fixed', 'fixed block') status.set('fixed', 'pressure offset', '%g' % (pressure_offset)) status.set('fixed', 'fixed block', str(fixed_block)) return fixed_block
def LiveLog(data_dir): logger = logging.getLogger('pywws.LiveLog') params = DataStore.params(data_dir) status = DataStore.status(data_dir) # localise application Localisation.SetApplicationLanguage(params) # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # create a DataLogger object datalogger = DataLogger(params, status, raw_data) # create a RegularTasks object asynch = eval(params.get('config', 'asynchronous', 'False')) tasks = Tasks.RegularTasks(params, status, raw_data, calib_data, hourly_data, daily_data, monthly_data, asynch=asynch) # get live data try: for data, logged in datalogger.live_data( logged_only=(not tasks.has_live_tasks())): if logged: # process new data Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks tasks.do_tasks() else: tasks.do_live(data) except Exception, ex: logger.exception(ex)
def get_readings(self): for data in DataStore.data_store(self.data_store)[self.last_ts:]: if data['idx'] <= self.last_ts: continue self.last_ts = data['idx'] for key in self.keys: if not data[key]: continue yield Reading(READING_TYPE, data[key], data['idx'], key)
def Hourly(data_dir): # get file locations params = DataStore.params(data_dir) status = DataStore.status(data_dir) # localise application Localisation.SetApplicationLanguage(params) # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # get weather station data DataLogger(params, status, raw_data).log_data() # do the processing Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks if not Tasks.RegularTasks(params, status, raw_data, calib_data, hourly_data, daily_data, monthly_data).do_tasks(): return 1 return 0
def check_fixed_block(self): fixed_block = self.ws.get_fixed_block(unbuffered=True) # check clocks try: s_time = DataStore.safestrptime( fixed_block['date_time'], '%Y-%m-%d %H:%M') except Exception: s_time = None if s_time: c_time = datetime.now().replace(second=0, microsecond=0) diff = abs(s_time - c_time) if diff > timedelta(minutes=2): self.logger.warning( "Computer and weather station clocks disagree by %s (H:M:S).", str(diff)) # store weather station type self.params.set('config', 'ws type', self.ws.ws_type) # store info from fixed block self.status.unset('fixed', 'pressure offset') if not self.params.get('config', 'pressure offset'): self.params.set('config', 'pressure offset', '%g' % ( fixed_block['rel_pressure'] - fixed_block['abs_pressure'])) self.params.unset('fixed', 'fixed block') self.status.set('fixed', 'fixed block', str(fixed_block)) return fixed_block
def Reprocess(data_dir): # delete old format summary files print "Deleting old summaries" for summary in ["calib", "hourly", "daily", "monthly"]: for root, dirs, files in os.walk(os.path.join(data_dir, summary), topdown=False): print root for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) # create data summaries print "Generating hourly and daily summaries" params = DataStore.params(data_dir) raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) return 0
def Reprocess(data_dir, update): logger = logging.getLogger('pywws-reprocess') raw_data = DataStore.data_store(data_dir) if update: # update old data to copy high nibble of wind_dir to status logger.warning("Updating status to include extra bits from wind_dir") count = 0 for data in raw_data[:]: count += 1 idx = data['idx'] if count % 10000 == 0: logger.info("update: %s", idx.isoformat(' ')) elif count % 500 == 0: logger.debug("update: %s", idx.isoformat(' ')) if data['wind_dir'] is not None: if data['wind_dir'] >= 16: data['status'] |= (data['wind_dir'] & 0xF0) << 4 data['wind_dir'] &= 0x0F raw_data[idx] = data if data['status'] & 0x800: data['wind_dir'] = None raw_data[idx] = data raw_data.flush() # delete old format summary files logger.warning('Deleting old summaries') for summary in ['calib', 'hourly', 'daily', 'monthly']: for root, dirs, files in os.walk( os.path.join(data_dir, summary), topdown=False): logger.info(root) for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) # create data summaries logger.warning('Generating hourly and daily summaries') params = DataStore.params(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) Process.Process( params, raw_data, calib_data, hourly_data, daily_data, monthly_data) return 0
def Reprocess(data_dir, update): logger = logging.getLogger('pywws.Reprocess') raw_data = DataStore.data_store(data_dir) if update: # update old data to copy high nibble of wind_dir to status logger.warning("Updating status to include extra bits from wind_dir") count = 0 for data in raw_data[:]: count += 1 idx = data['idx'] if count % 10000 == 0: logger.info("update: %s", idx.isoformat(' ')) elif count % 500 == 0: logger.debug("update: %s", idx.isoformat(' ')) if data['wind_dir'] is not None: if data['wind_dir'] >= 16: data['status'] |= (data['wind_dir'] & 0xF0) << 4 data['wind_dir'] &= 0x0F raw_data[idx] = data if data['status'] & 0x800: data['wind_dir'] = None raw_data[idx] = data raw_data.flush() # delete old format summary files logger.warning('Deleting old summaries') for summary in ['calib', 'hourly', 'daily', 'monthly']: for root, dirs, files in os.walk(os.path.join(data_dir, summary), topdown=False): logger.info(root) for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) # create data summaries logger.warning('Generating hourly and daily summaries') params = DataStore.params(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) return 0
def Reprocess(data_dir): # delete old format summary files print 'Deleting old summaries' for summary in ['calib', 'hourly', 'daily', 'monthly']: for root, dirs, files in os.walk(os.path.join(data_dir, summary), topdown=False): print root for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) # create data summaries print 'Generating hourly and daily summaries' params = DataStore.params(data_dir) raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) return 0
for o, a in opts: if o == '--help': usage() return 0 # check arguments if len(args) != 2: print >>sys.stderr, 'Error: 2 arguments required\n' print >>sys.stderr, __usage__.strip() return 2 # process arguments in_name = args[0] out_name = args[1] # open input in_file = open(in_name, 'r') # open data file store ds = DataStore.data_store(out_name) # get time to go forward to first_stored = ds.after(datetime.min) if first_stored == None: first_stored = datetime.max # copy any missing data last_date = None count = 0 for line in in_file: items = line.split(',') local_date = DataStore.safestrptime(items[2].strip(), '%Y-%m-%d %H:%M:%S') local_date = local_date.replace(tzinfo=TimeZone.Local) date = local_date.astimezone(TimeZone.utc) if last_date and date < last_date: date = date + timedelta(hours=1) print "Corrected DST ambiguity %s %s -> %s" % (
print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options clear = False sync = None verbose = 0 for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 elif o in ('-c', '--clear'): clear = True elif o in ('-s', '--sync'): sync = int(a) elif o in ('-v', '--verbose'): verbose += 1 # check arguments if len(args) != 1: print >>sys.stderr, 'Error: 1 argument required\n' print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) root_dir = args[0] return LogData( DataStore.params(root_dir), DataStore.status(root_dir), DataStore.data_store(root_dir), sync=sync, clear=clear) if __name__ == "__main__": sys.exit(main())
return self.Upload(tweet) def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) != 2: print >>sys.stderr, "Error: 2 arguments required" print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(1) params = DataStore.params(args[0]) Localisation.SetApplicationLanguage(params) if ToTwitter(params).UploadFile(args[1]): return 0 return 3 if __name__ == "__main__": sys.exit(main())
except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) != 1: print >>sys.stderr, "Error: 1 argument required" print >>sys.stderr, __usage__.strip() return 2 data_dir = args[0] params = DataStore.params(data_dir) Localisation.SetApplicationLanguage(params) hourly_data = DataStore.hourly_store(data_dir) idx = hourly_data.before(datetime.max) print 'Zambretti (current):', Zambretti(params, hourly_data[idx]) idx = idx.replace(tzinfo=utc).astimezone(Local) if idx.hour < 8 or (idx.hour == 8 and idx.minute < 30): idx -= timedelta(hours=24) idx = idx.replace(hour=9, minute=0, second=0) idx = hourly_data.nearest(idx.astimezone(utc).replace(tzinfo=None)) lcl = idx.replace(tzinfo=utc).astimezone(Local) print 'Zambretti (at %s):' % lcl.strftime('%H:%M %Z'), Zambretti( params, hourly_data[idx]) return 0 if __name__ == "__main__":
opts, args = getopt.getopt( argv[1:], "hcv", ['help', 'catchup', 'verbose']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options catchup = False verbose = 0 for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 elif o == '-c' or o == '--catchup': catchup = True elif o == '-v' or o == '--verbose': verbose += 1 # check arguments if len(args) != 2: print >>sys.stderr, "Error: 2 arguments required" print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) return ToService( DataStore.params(args[0]), DataStore.status(args[0]), DataStore.calib_store(args[0]), args[1]).Upload( catchup=catchup, ignore_last_update=not catchup) if __name__ == "__main__": sys.exit(main())
# process options for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 # check arguments if len(args) != 1: print >> sys.stderr, 'Error: 1 argument required\n' print >> sys.stderr, __usage__.strip() return 2 data_dir = args[0] # date & time range of data to be changed, in UTC! start = datetime(2013, 10, 26, 15, 23) stop = datetime(2013, 10, 30, 12, 47) # open data store raw_data = DataStore.data_store(data_dir) # change the data for data in raw_data[start:stop]: data['rain'] -= 263.1 raw_data[data['idx']] = data # make sure it's saved raw_data.flush() # clear calibrated data that needs to be regenerated calib_data = DataStore.calib_store(data_dir) del calib_data[start:] calib_data.flush() # done return 0 if __name__ == "__main__":
def LiveLog(data_dir): logger = logging.getLogger('pywws.LiveLog') params = DataStore.params(data_dir) status = DataStore.status(data_dir) # localise application Localisation.SetApplicationLanguage(params) # connect to weather station ws_type = params.get('fixed', 'ws type') if ws_type: params.unset('fixed', 'ws type') params.set('config', 'ws type', ws_type) ws_type = params.get('config', 'ws type', '1080') ws = WeatherStation.weather_station( ws_type=ws_type, params=params, status=status) fixed_block = CheckFixedBlock(ws, params, status, logger) if not fixed_block: logger.error("Invalid data from weather station") return 3 # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # create a RegularTasks object tasks = Tasks.RegularTasks( params, status, calib_data, hourly_data, daily_data, monthly_data) # get time of last logged data two_minutes = timedelta(minutes=2) last_stored = raw_data.before(datetime.max) if last_stored == None: last_stored = datetime.min if datetime.utcnow() < last_stored: raise ValueError('Computer time is earlier than last stored data') last_stored += two_minutes # get live data hour = timedelta(hours=1) next_hour = datetime.utcnow().replace( minute=0, second=0, microsecond=0) + hour next_ptr = None for data, ptr, logged in ws.live_data( logged_only=(not tasks.has_live_tasks())): now = data['idx'] if logged: if ptr == next_ptr: # data is contiguous with last logged value raw_data[now] = data else: # catch up missing data Catchup(ws, logger, raw_data, now, ptr) next_ptr = ws.inc_ptr(ptr) # process new data Process.Process(params, status, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks tasks.do_tasks() if now >= next_hour: next_hour += hour fixed_block = CheckFixedBlock(ws, params, status, logger) if not fixed_block: logger.error("Invalid data from weather station") return 3 # save any unsaved data raw_data.flush() else: tasks.do_live(data) return 0
def process(self, live_data, template_file): def jump(idx, count): while count > 0: new_idx = data_set.after(idx + SECOND) if new_idx == None: break idx = new_idx count -= 1 while count < 0: new_idx = data_set.before(idx) if new_idx == None: break idx = new_idx count += 1 return idx, count == 0 params = self.params if not live_data: idx = self.calib_data.before(datetime.max) if not idx: self.logger.error("No calib data - run Process.py first") return live_data = self.calib_data[idx] pressure_trend_text = WeatherStation.pressure_trend_text wind_dir_text = WeatherStation.get_wind_dir_text() dew_point = WeatherStation.dew_point wind_chill = WeatherStation.wind_chill apparent_temp = WeatherStation.apparent_temp rain_hour = self._rain_hour rain_day = self._rain_day pressure_offset = eval(self.params.get('fixed', 'pressure offset')) fixed_block = eval(self.params.get('fixed', 'fixed block')) # start off with no time rounding round_time = None # start off in hourly data mode data_set = self.hourly_data # start off in utc time_zone = utc # jump to last item idx, valid_data = jump(datetime.max, -1) if not valid_data: self.logger.error("No summary data - run Process.py first") return data = data_set[idx] # open template file file if sys.version_info[0] >= 3: tmplt = open(template_file, 'r', encoding=self.encoding) else: tmplt = open(template_file, 'r') # do the text processing while True: line = tmplt.readline() if line == '': break parts = line.split('#') for i in range(len(parts)): if i % 2 == 0: # not a processing directive if i == 0 or parts[i] != '\n': yield parts[i] continue if parts[i] and parts[i][0] == '!': # comment continue command = shlex.split(parts[i]) if command == []: # empty command == print a single '#' yield '#' elif command[0] in data.keys() + ['calc']: # output a value if not valid_data: continue # format is: key fmt_string no_value_string conversion # get value if command[0] == 'calc': x = eval(command[1]) del command[1] else: x = data[command[0]] # adjust time if isinstance(x, datetime): if round_time: x += round_time x = x.replace(tzinfo=utc) x = x.astimezone(time_zone) # convert data if x != None and len(command) > 3: x = eval(command[3]) # get format fmt = '%s' if len(command) > 1: fmt = command[1] # write output if x == None: if len(command) > 2: yield command[2] elif isinstance(x, datetime): yield x.strftime(fmt) elif not self.use_locale: yield fmt % (x) elif sys.version_info >= (2, 7) or '%%' not in fmt: yield locale.format_string(fmt, x) else: yield locale.format_string(fmt.replace('%%', '##'), x).replace('##', '%') elif command[0] == 'monthly': data_set = self.monthly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'daily': data_set = self.daily_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'hourly': data_set = self.hourly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'raw': data_set = self.calib_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'live': data_set = self.calib_data idx = datetime.max valid_data = True data = live_data elif command[0] == 'timezone': if command[1] == 'utc': time_zone = utc elif command[1] == 'local': time_zone = Local else: self.logger.error("Unknown time zone: %s", command[1]) return elif command[0] == 'roundtime': if eval(command[1]): round_time = timedelta(seconds=30) else: round_time = None elif command[0] == 'jump': prevdata = data idx, valid_data = jump(idx, int(command[1])) data = data_set[idx] elif command[0] == 'goto': prevdata = data time_str = command[1] if '%' in time_str: lcl = idx.replace(tzinfo=utc).astimezone(time_zone) time_str = lcl.strftime(time_str) new_idx = DataStore.safestrptime(time_str) new_idx = new_idx.replace(tzinfo=time_zone).astimezone(utc) new_idx = data_set.after(new_idx.replace(tzinfo=None)) if new_idx: idx = new_idx data = data_set[idx] valid_data = True else: valid_data = False elif command[0] == 'loop': loop_count = int(command[1]) loop_start = tmplt.tell() elif command[0] == 'endloop': loop_count -= 1 if valid_data and loop_count > 0: tmplt.seek(loop_start, 0) else: self.logger.error("Unknown processing directive: #%s#", parts[i]) return tmplt.close() return
argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 # check arguments if len(args) != 4: print >> sys.stderr, 'Error: 4 arguments required\n' print >> sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(2) params = DataStore.params(args[0]) Localisation.SetApplicationLanguage(params) return RosePlotter(params, DataStore.status(args[0]), DataStore.calib_store(args[0]), DataStore.hourly_store(args[0]), DataStore.daily_store(args[0]), DataStore.monthly_store(args[0]), args[1]).DoPlot(args[2], args[3]) if __name__ == "__main__": sys.exit(main())
try: opts, args = getopt.getopt(argv[1:], "hv", ['help', 'verbose']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options verbose = 0 for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 elif o in ('-v', '--verbose'): verbose += 1 # check arguments if len(args) != 1: print >>sys.stderr, 'Error: 1 argument required\n' print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) data_dir = args[0] return Process(DataStore.params(data_dir), DataStore.data_store(data_dir), DataStore.calib_store(data_dir), DataStore.hourly_store(data_dir), DataStore.daily_store(data_dir), DataStore.monthly_store(data_dir)) if __name__ == "__main__": sys.exit(main())
if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "", ['help']) except getopt.error, msg: print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # check arguments if len(args) != 3: print >> sys.stderr, 'Error: 3 arguments required\n' print >> sys.stderr, __usage__.strip() return 2 # process options for o, a in opts: if o == '--help': print __usage__.strip() return 0 logger = ApplicationLogger(1) params = DataStore.params(args[0]) Localisation.SetApplicationLanguage(params) return Template(params, DataStore.calib_store(args[0]), DataStore.hourly_store(args[0]), DataStore.daily_store(args[0]), DataStore.monthly_store(args[0])).make_file( args[1], args[2]) if __name__ == "__main__": sys.exit(main())
def process(self, live_data, template_file): def jump(idx, count): while count > 0: new_idx = data_set.after(idx + SECOND) if new_idx == None: break idx = new_idx count -= 1 while count < 0: new_idx = data_set.before(idx) if new_idx == None: break idx = new_idx count += 1 return idx, count == 0 params = self.params if not live_data: idx = self.calib_data.before(datetime.max) if not idx: self.logger.error("No calib data - run pywws.Process first") return live_data = self.calib_data[idx] # get default character encoding of template input & output files self.encoding = params.get('config', 'template encoding', 'iso-8859-1') file_encoding = self.encoding if file_encoding == 'html': file_encoding = 'ascii' # get conversions module to create its 'private' wind dir text # array, then copy it to deprecated wind_dir_text variable winddir_text(0) wind_dir_text = conversions._winddir_text_array hour_diff = self._hour_diff rain_hour = self._rain_hour rain_day = self._rain_day pressure_offset = eval(self.params.get('config', 'pressure offset')) fixed_block = eval(self.status.get('fixed', 'fixed block')) # start off with no time rounding round_time = None # start off in hourly data mode data_set = self.hourly_data # start off in utc time_zone = utc # start off with default use_locale setting use_locale = self.use_locale # jump to last item idx, valid_data = jump(datetime.max, -1) if not valid_data: self.logger.error("No summary data - run pywws.Process first") return data = data_set[idx] # open template file, if not already a file(like) object if hasattr(template_file, 'readline'): tmplt = template_file else: tmplt = open(template_file, 'rb') # do the text processing while True: line = tmplt.readline().decode(file_encoding) if not line: break parts = line.split('#') for i in range(len(parts)): if i % 2 == 0: # not a processing directive if i == 0 or parts[i] != '\n': yield parts[i] continue if parts[i] and parts[i][0] == '!': # comment continue # Python 2 shlex can't handle unicode if sys.version_info[0] < 3: parts[i] = parts[i].encode(file_encoding) command = shlex.split(parts[i]) if sys.version_info[0] < 3: command = map(lambda x: x.decode(file_encoding), command) if command == []: # empty command == print a single '#' yield u'#' elif command[0] in data.keys() + ['calc']: # output a value if not valid_data: continue # format is: key fmt_string no_value_string conversion # get value if command[0] == 'calc': x = eval(command[1]) del command[1] else: x = data[command[0]] # adjust time if isinstance(x, datetime): if round_time: x += round_time x = x.replace(tzinfo=utc) x = x.astimezone(time_zone) # convert data if x is not None and len(command) > 3: x = eval(command[3]) # get format fmt = u'%s' if len(command) > 1: fmt = command[1] # write output if x is None: if len(command) > 2: yield command[2] elif isinstance(x, datetime): if sys.version_info[0] < 3: fmt = fmt.encode(file_encoding) x = x.strftime(fmt) if sys.version_info[0] < 3: if self.encoding == 'html': x = x.decode('ascii', errors='xmlcharrefreplace') else: x = x.decode(file_encoding) yield x elif not use_locale: yield fmt % (x) elif sys.version_info >= (2, 7) or '%%' not in fmt: yield locale.format_string(fmt, x) else: yield locale.format_string(fmt.replace('%%', '##'), x).replace('##', '%') elif command[0] == 'monthly': data_set = self.monthly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'daily': data_set = self.daily_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'hourly': data_set = self.hourly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'raw': data_set = self.calib_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'live': data_set = self.calib_data idx = datetime.max valid_data = True data = live_data elif command[0] == 'timezone': if command[1] == 'utc': time_zone = utc elif command[1] == 'local': time_zone = Local else: self.logger.error("Unknown time zone: %s", command[1]) return elif command[0] == 'locale': use_locale = eval(command[1]) elif command[0] == 'encoding': self.encoding = command[1] file_encoding = self.encoding if file_encoding == 'html': file_encoding = 'ascii' elif command[0] == 'roundtime': if eval(command[1]): round_time = timedelta(seconds=30) else: round_time = None elif command[0] == 'jump': prevdata = data idx, valid_data = jump(idx, int(command[1])) data = data_set[idx] elif command[0] == 'goto': prevdata = data time_str = command[1] if '%' in time_str: lcl = idx.replace(tzinfo=utc).astimezone(time_zone) time_str = lcl.strftime(time_str) new_idx = DataStore.safestrptime(time_str) new_idx = new_idx.replace(tzinfo=time_zone).astimezone(utc) new_idx = data_set.after(new_idx.replace(tzinfo=None)) if new_idx: idx = new_idx data = data_set[idx] valid_data = True else: valid_data = False elif command[0] == 'loop': loop_count = int(command[1]) loop_start = tmplt.tell() elif command[0] == 'endloop': loop_count -= 1 if valid_data and loop_count > 0: tmplt.seek(loop_start, 0) else: self.logger.error("Unknown processing directive: #%s#", parts[i]) return
print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # process options clear = False sync = None verbose = 0 for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 elif o in ('-c', '--clear'): clear = True elif o in ('-s', '--sync'): sync = int(a) elif o in ('-v', '--verbose'): verbose += 1 # check arguments if len(args) != 1: print >> sys.stderr, 'Error: 1 argument required\n' print >> sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) root_dir = args[0] DataLogger(DataStore.params(root_dir), DataStore.status(root_dir), DataStore.data_store(root_dir)).log_data(sync=sync, clear=clear) if __name__ == "__main__": sys.exit(main())
def catchup(self, last_date, last_ptr): fixed_block = self.ws.get_fixed_block(unbuffered=True) # get time to go back to last_stored = self.raw_data.before(datetime.max) if not last_stored: last_stored = datetime.min if self.status.get('data', 'ptr'): saved_ptr, saved_date = self.status.get('data', 'ptr').split(',') saved_ptr = int(saved_ptr, 16) saved_date = DataStore.safestrptime(saved_date) saved_date = self.raw_data.nearest(saved_date) while saved_date < last_stored: saved_date = self.raw_data.after(saved_date + SECOND) saved_ptr = self.ws.inc_ptr(saved_ptr) else: saved_ptr = None saved_date = None last_stored += timedelta(seconds=fixed_block['read_period'] * 30) if last_date <= last_stored: # nothing to do return self.status.set( 'data', 'ptr', '%06x,%s' % (last_ptr, last_date.isoformat(' '))) # data_count includes record currently being updated every 48 seconds max_count = fixed_block['data_count'] - 1 count = 0 duplicates = [] while last_date > last_stored and count < max_count: data = self.ws.get_data(last_ptr) if last_ptr == saved_ptr: if any(data[key] != self.raw_data[saved_date][key] for key in ( 'hum_in', 'temp_in', 'hum_out', 'temp_out', 'abs_pressure', 'wind_ave', 'wind_gust', 'wind_dir', 'rain', 'status')): # pointer matches but data is different, so no duplicates duplicates = None saved_ptr = None saved_date = None else: # potential duplicate data duplicates.append(last_date) saved_date = self.raw_data.before(saved_date) saved_ptr = self.ws.dec_ptr(saved_ptr) if (data['delay'] is None or data['delay'] > max(fixed_block['read_period'] * 2, 35)): self.logger.error('invalid data at %04x, %s', last_ptr, last_date.isoformat(' ')) last_date -= timedelta(minutes=fixed_block['read_period']) else: self.raw_data[last_date] = data count += 1 last_date -= timedelta(minutes=data['delay']) last_ptr = self.ws.dec_ptr(last_ptr) if duplicates: for d in duplicates: del self.raw_data[d] count -= len(duplicates) last_date = self.raw_data.nearest(last_date) next_date = self.raw_data.after(last_date + SECOND) if next_date: gap = (next_date - last_date).seconds // 60 gap -= fixed_block['read_period'] if gap > 0: self.logger.critical("%d minutes gap in data detected", gap) self.logger.info("%d catchup records", count)
try: opts, args = getopt.getopt( argv[1:], "hcv", ['help', 'catchup', 'verbose']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options catchup = False verbose = 0 for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 elif o == '-c' or o == '--catchup': catchup = True elif o == '-v' or o == '--verbose': verbose += 1 # check arguments if len(args) != 2: print >>sys.stderr, "Error: 2 arguments required" print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) return ToService( DataStore.params(args[0]), DataStore.status(args[0]), DataStore.calib_store(args[0]), args[1]).Upload(catchup=catchup) if __name__ == "__main__": sys.exit(main())
def LiveLog(data_dir): logger = logging.getLogger('pywws.LiveLog') params = DataStore.params(data_dir) # localise application Localisation.SetApplicationLanguage(params) # connect to weather station ws_type = params.get('config', 'ws type') if ws_type: params._config.remove_option('config', 'ws type') params.set('fixed', 'ws type', ws_type) ws_type = params.get('fixed', 'ws type', '1080') ws = WeatherStation.weather_station(ws_type=ws_type) fixed_block = CheckFixedBlock(ws, params, logger) if not fixed_block: logger.error("Invalid data from weather station") return 3 # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # create a RegularTasks object tasks = Tasks.RegularTasks( params, calib_data, hourly_data, daily_data, monthly_data) # get time of last logged data two_minutes = timedelta(minutes=2) last_stored = raw_data.before(datetime.max) if last_stored == None: last_stored = datetime.min if datetime.utcnow() < last_stored: raise ValueError('Computer time is earlier than last stored data') last_stored += two_minutes # get live data hour = timedelta(hours=1) next_hour = datetime.utcnow().replace( minute=0, second=0, microsecond=0) + hour next_ptr = None for data, ptr, logged in ws.live_data( logged_only=(not tasks.has_live_tasks())): now = data['idx'] if logged: if ptr == next_ptr: # data is contiguous with last logged value raw_data[now] = data else: # catch up missing data Catchup(ws, logger, raw_data, now, ptr) next_ptr = ws.inc_ptr(ptr) # process new data Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks tasks.do_tasks() if now >= next_hour: next_hour += hour fixed_block = CheckFixedBlock(ws, params, logger) if not fixed_block: logger.error("Invalid data from weather station") return 3 params.flush() else: tasks.do_live(data) return 0
return OK def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) < 2: print >> sys.stderr, "Error: at least 2 arguments required" print >> sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(1) if Upload(DataStore.params(args[0])).upload(args[1:]): return 0 return 3 if __name__ == "__main__": sys.exit(main())
break self.disconnect() return OK def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) < 2: print >>sys.stderr, "Error: at least 2 arguments required" print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(1) if Upload(DataStore.params(args[0])).upload(args[1:]): return 0 return 3 if __name__ == "__main__": sys.exit(main())
def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) != 2: print >> sys.stderr, "Error: 2 arguments required" print >> sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(1) params = DataStore.params(args[0]) Localisation.SetApplicationLanguage(params) if ToTwitter(params).UploadFile(args[1]): return 0 return 3 if __name__ == "__main__": sys.exit(main())
argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 # check arguments if len(args) != 4: print >>sys.stderr, 'Error: 4 arguments required\n' print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(2) params = DataStore.params(args[0]) status = DataStore.status(args[0]) Localisation.SetApplicationLanguage(params) return GraphPlotter( params, status, DataStore.calib_store(args[0]), DataStore.hourly_store(args[0]), DataStore.daily_store(args[0]), DataStore.monthly_store(args[0]), args[1] ).DoPlot(GraphFileReader(args[2]), args[3]) if __name__ == "__main__": sys.exit(main())
of.write(' </auto_update>\n') of.write(' </current_weather>\n') of.write('</response>\n') of.close() def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "hv", ['help', 'verbose']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options verbose = 0 for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 elif o == '-v' or o == '--verbose': verbose += 1 # check arguments if len(args) != 2: print >>sys.stderr, "Error: 2 arguments required" print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) return YoWindow(DataStore.calib_store(args[0])).write_file(args[1]) if __name__ == "__main__": sys.exit(main())
argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 # check arguments if len(args) != 4: print >>sys.stderr, 'Error: 4 arguments required\n' print >>sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(1) params = DataStore.params(args[0]) status = DataStore.status(args[0]) Localisation.SetApplicationLanguage(params) return GraphPlotter( params, status, DataStore.calib_store(args[0]), DataStore.hourly_store(args[0]), DataStore.daily_store(args[0]), DataStore.monthly_store(args[0]), args[1] ).DoPlot(args[2], args[3]) if __name__ == "__main__": sys.exit(main())
if o == '-h' or o == '--help': print __usage__.strip() return 0 elif o == '-n' or o == '--noaction': noaction = True # check arguments if len(args) != 1: print >>sys.stderr, 'Error: 1 argument required\n' print >>sys.stderr, __usage__.strip() return 2 data_dir = args[0] # date & time range of data to be changed, in UTC! start = datetime(2013, 10, 27, 11, 21) stop = datetime(2013, 10, 29, 18, 32) # open data store raw_data = DataStore.data_store(data_dir) # process the data aperture = timedelta(minutes=14, seconds=30) # make list of changes to apply after examining the data changes = [] for data in raw_data[start:stop]: if data['temp_out'] is None: continue # get temperatures at nearby times idx = data['idx'] temp_list = [] for local_data in raw_data[idx-aperture:idx+aperture]: temp = local_data['temp_out'] if temp is not None: temp_list.append(temp) if len(temp_list) < 3:
def process(self, live_data, template_file): def jump(idx, count): while count > 0: new_idx = data_set.after(idx + SECOND) if new_idx == None: break idx = new_idx count -= 1 while count < 0: new_idx = data_set.before(idx) if new_idx == None: break idx = new_idx count += 1 return idx, count == 0 params = self.params if not live_data: idx = self.calib_data.before(datetime.max) if not idx: self.logger.error("No calib data - run Process.py first") return live_data = self.calib_data[idx] pressure_trend_text = WeatherStation.pressure_trend_text wind_dir_text = WeatherStation.get_wind_dir_text() dew_point = WeatherStation.dew_point wind_chill = WeatherStation.wind_chill apparent_temp = WeatherStation.apparent_temp rain_hour = self._rain_hour rain_day = self._rain_day pressure_offset = eval(self.params.get('fixed', 'pressure offset')) fixed_block = eval(self.params.get('fixed', 'fixed block')) # start off with no time rounding round_time = None # start off in hourly data mode data_set = self.hourly_data # start off in utc time_zone = utc # jump to last item idx, valid_data = jump(datetime.max, -1) if not valid_data: self.logger.error("No summary data - run Process.py first") return data = data_set[idx] # open template file file if sys.version_info[0] >= 3: tmplt = open(template_file, 'r', encoding=self.encoding) else: tmplt = open(template_file, 'r') # do the text processing while True: line = tmplt.readline() if line == '': break parts = line.split('#') for i in range(len(parts)): if i % 2 == 0: # not a processing directive if i == 0 or parts[i] != '\n': yield parts[i] continue if parts[i] and parts[i][0] == '!': # comment continue command = shlex.split(parts[i]) if command == []: # empty command == print a single '#' yield '#' elif command[0] in data.keys() + ['calc']: # output a value if not valid_data: continue # format is: key fmt_string no_value_string conversion # get value if command[0] == 'calc': x = eval(command[1]) del command[1] else: x = data[command[0]] # adjust time if isinstance(x, datetime): if round_time: x += round_time x = x.replace(tzinfo=utc) x = x.astimezone(time_zone) # convert data if x != None and len(command) > 3: x = eval(command[3]) # get format fmt = '%s' if len(command) > 1: fmt = command[1] # write output if x == None: if len(command) > 2: yield command[2] elif isinstance(x, datetime): yield x.strftime(fmt) elif not self.use_locale: yield fmt % (x) elif sys.version_info >= (2, 7) or '%%' not in fmt: yield locale.format_string(fmt, x) else: yield locale.format_string( fmt.replace('%%', '##'), x).replace('##', '%') elif command[0] == 'monthly': data_set = self.monthly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'daily': data_set = self.daily_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'hourly': data_set = self.hourly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'raw': data_set = self.calib_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'live': data_set = self.calib_data idx = datetime.max valid_data = True data = live_data elif command[0] == 'timezone': if command[1] == 'utc': time_zone = utc elif command[1] == 'local': time_zone = Local else: self.logger.error("Unknown time zone: %s", command[1]) return elif command[0] == 'roundtime': if eval(command[1]): round_time = timedelta(seconds=30) else: round_time = None elif command[0] == 'jump': prevdata = data idx, valid_data = jump(idx, int(command[1])) data = data_set[idx] elif command[0] == 'goto': prevdata = data time_str = command[1] if '%' in time_str: lcl = idx.replace(tzinfo=utc).astimezone(time_zone) time_str = lcl.strftime(time_str) new_idx = DataStore.safestrptime(time_str) new_idx = new_idx.replace(tzinfo=time_zone).astimezone(utc) new_idx = data_set.after(new_idx.replace(tzinfo=None)) if new_idx: idx = new_idx data = data_set[idx] valid_data = True else: valid_data = False elif command[0] == 'loop': loop_count = int(command[1]) loop_start = tmplt.tell() elif command[0] == 'endloop': loop_count -= 1 if valid_data and loop_count > 0: tmplt.seek(loop_start, 0) else: self.logger.error( "Unknown processing directive: #%s#", parts[i]) return tmplt.close() return
######## # Main ######## # Global Variables AdaScreenNumber = 0 data = {} forecast_bom_today = "" forecast_bom_tomorrow = "" forecast_file_today = "" forecast_toggle = 0 global_init=True readings = {} # pywws data if config.getboolean('Output','PYWWS_PUBLISH'): ds = DataStore.data_store(config.get('PYWWS','STORAGE')) dstatus = DataStore.status(config.get('PYWWS','STORAGE')) if config.getboolean('Output','ADA_LCD'): AdaLcd.clear() if config.getboolean('Output','SENSEHAT_DISPLAY'): # Set up display PiSenseHat.clear() PiSenseHat.set_rotation(config.get('SenseHat','ROTATION')) if config.getboolean('Sensors','ENOCEAN'): eoCommunicator = eoSerialCommunicator(port=config.get('EnOcean','PORT')) eoCommunicator.start() # Warm up sensors print "Waiting for sensors to settle" for i in range(1,6): Sample() time.sleep(1)
params.set('twitter', 'key', access_token['oauth_token']) params.set('twitter', 'secret', access_token['oauth_token_secret']) print 'Success! Authorisation data has been stored in %s' % params._path return 0 def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) != 1: print >> sys.stderr, "Error: 1 argument required" print >> sys.stderr, __usage__.strip() return 2 return TwitterAuth(DataStore.params(args[0])) if __name__ == "__main__": sys.exit(main())
return 1 access_token = dict(urlparse.parse_qsl(content)) params.set('twitter', 'key', access_token['oauth_token']) params.set('twitter', 'secret', access_token['oauth_token_secret']) print 'Success! Authorisation data has been stored in %s' % params._path return 0 def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "h", ['help']) except getopt.error, msg: print >>sys.stderr, 'Error: %s\n' % msg print >>sys.stderr, __usage__.strip() return 1 # process options for o, a in opts: if o in ('-h', '--help'): print __usage__.strip() return 0 # check arguments if len(args) != 1: print >>sys.stderr, "Error: 1 argument required" print >>sys.stderr, __usage__.strip() return 2 return TwitterAuth(DataStore.params(args[0])) if __name__ == "__main__": sys.exit(main())
def process(self, live_data, template_file): def jump(idx, count): while count > 0: new_idx = data_set.after(idx + SECOND) if new_idx == None: break idx = new_idx count -= 1 while count < 0: new_idx = data_set.before(idx) if new_idx == None: break idx = new_idx count += 1 return idx, count == 0 params = self.params if not live_data: idx = self.calib_data.before(datetime.max) if not idx: self.logger.error("No calib data - run pywws.Process first") return live_data = self.calib_data[idx] # get default character encoding of template input & output files self.encoding = params.get('config', 'template encoding', 'iso-8859-1') file_encoding = self.encoding if file_encoding == 'html': file_encoding = 'ascii' # get conversions module to create its 'private' wind dir text # array, then copy it to deprecated wind_dir_text variable winddir_text(0) wind_dir_text = conversions._winddir_text_array hour_diff = self._hour_diff rain_hour = self._rain_hour rain_day = self._rain_day pressure_offset = eval(self.params.get('config', 'pressure offset')) fixed_block = eval(self.status.get('fixed', 'fixed block')) # start off with no time rounding round_time = None # start off in hourly data mode data_set = self.hourly_data # start off in utc time_zone = utc # start off with default use_locale setting use_locale = self.use_locale # jump to last item idx, valid_data = jump(datetime.max, -1) if not valid_data: self.logger.error("No summary data - run pywws.Process first") return data = data_set[idx] # open template file, if not already a file(like) object if hasattr(template_file, 'readline'): tmplt = template_file else: tmplt = open(template_file, 'rb') # do the text processing while True: line = tmplt.readline().decode(file_encoding) if not line: break parts = line.split('#') for i in range(len(parts)): if i % 2 == 0: # not a processing directive if i == 0 or parts[i] != '\n': yield parts[i] continue if parts[i] and parts[i][0] == '!': # comment continue # Python 2 shlex can't handle unicode if sys.version_info[0] < 3: parts[i] = parts[i].encode(file_encoding) command = shlex.split(parts[i]) if sys.version_info[0] < 3: command = map(lambda x: x.decode(file_encoding), command) if command == []: # empty command == print a single '#' yield u'#' elif command[0] in data.keys() + ['calc']: # output a value if not valid_data: continue # format is: key fmt_string no_value_string conversion # get value if command[0] == 'calc': x = eval(command[1]) del command[1] else: x = data[command[0]] # adjust time if isinstance(x, datetime): if round_time: x += round_time x = x.replace(tzinfo=utc) x = x.astimezone(time_zone) # convert data if x is not None and len(command) > 3: x = eval(command[3]) # get format fmt = u'%s' if len(command) > 1: fmt = command[1] # write output if x is None: if len(command) > 2: yield command[2] elif isinstance(x, datetime): if sys.version_info[0] < 3: fmt = fmt.encode(file_encoding) x = x.strftime(fmt) if sys.version_info[0] < 3: x = x.decode(file_encoding) yield x elif not use_locale: yield fmt % (x) elif sys.version_info >= (2, 7) or '%%' not in fmt: yield locale.format_string(fmt, x) else: yield locale.format_string( fmt.replace('%%', '##'), x).replace('##', '%') elif command[0] == 'monthly': data_set = self.monthly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'daily': data_set = self.daily_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'hourly': data_set = self.hourly_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'raw': data_set = self.calib_data idx, valid_data = jump(datetime.max, -1) data = data_set[idx] elif command[0] == 'live': data_set = self.calib_data idx = datetime.max valid_data = True data = live_data elif command[0] == 'timezone': if command[1] == 'utc': time_zone = utc elif command[1] == 'local': time_zone = Local else: self.logger.error("Unknown time zone: %s", command[1]) return elif command[0] == 'locale': use_locale = eval(command[1]) elif command[0] == 'encoding': self.encoding = command[1] file_encoding = self.encoding if file_encoding == 'html': file_encoding = 'ascii' elif command[0] == 'roundtime': if eval(command[1]): round_time = timedelta(seconds=30) else: round_time = None elif command[0] == 'jump': prevdata = data idx, valid_data = jump(idx, int(command[1])) data = data_set[idx] elif command[0] == 'goto': prevdata = data time_str = command[1] if '%' in time_str: lcl = idx.replace(tzinfo=utc).astimezone(time_zone) time_str = lcl.strftime(time_str) new_idx = DataStore.safestrptime(time_str) new_idx = new_idx.replace(tzinfo=time_zone).astimezone(utc) new_idx = data_set.after(new_idx.replace(tzinfo=None)) if new_idx: idx = new_idx data = data_set[idx] valid_data = True else: valid_data = False elif command[0] == 'loop': loop_count = int(command[1]) loop_start = tmplt.tell() elif command[0] == 'endloop': loop_count -= 1 if valid_data and loop_count > 0: tmplt.seek(loop_start, 0) else: self.logger.error( "Unknown processing directive: #%s#", parts[i]) return
for o, a in opts: if o == '--help': usage() return 0 # check arguments if len(args) != 2: print >>sys.stderr, 'Error: 2 arguments required\n' print >>sys.stderr, __doc__.strip() return 2 # process arguments in_name = args[0] out_name = args[1] # open input in_file = open(in_name, 'r') # open data file store ds = DataStore.data_store(out_name) # get time to go forward to first_stored = ds.after(datetime.min) if first_stored == None: first_stored = datetime.max # copy any missing data last_date = None count = 0 for line in in_file: items = line.split(',') local_date = DataStore.safestrptime(items[2].strip(), '%Y-%m-%d %H:%M:%S') local_date = local_date.replace(tzinfo=TimeZone.Local) date = local_date.astimezone(TimeZone.utc) if last_date and date < last_date: date = date + timedelta(hours=1) print "Corrected DST ambiguity %s %s -> %s" % (
#!/usr/bin/env python from pywws import DataStore from pywws import Process from pywws import Tasks import os import sys try: data_dir = sys.argv[1] except: data_dir = "/apps/weather/weather_data/" # open configuration files params = DataStore.params(data_dir) status = DataStore.status(data_dir) # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # Process data Process.Process(params,raw_data, calib_data, hourly_data, daily_data, monthly_data) # Do tasks (calculate aggregates, populate templates, draw graphs) Tasks.RegularTasks(params, status, raw_data, calib_data, hourly_data, daily_data, monthly_data).do_tasks()
['help', 'catchup', 'verbose']) except getopt.error, msg: print >> sys.stderr, 'Error: %s\n' % msg print >> sys.stderr, __usage__.strip() return 1 # process options catchup = False verbose = 0 for o, a in opts: if o == '-h' or o == '--help': print __usage__.strip() return 0 elif o == '-c' or o == '--catchup': catchup = True elif o == '-v' or o == '--verbose': verbose += 1 # check arguments if len(args) != 2: print >> sys.stderr, "Error: 2 arguments required" print >> sys.stderr, __usage__.strip() return 2 logger = ApplicationLogger(verbose) return ToService(DataStore.params(args[0]), DataStore.status(args[0]), DataStore.calib_store(args[0]), args[1]).Upload(catchup=catchup, ignore_last_update=not catchup) if __name__ == "__main__": sys.exit(main())
def catchup(self, last_date, last_ptr): fixed_block = self.ws.get_fixed_block(unbuffered=True) # get time to go back to last_stored = self.raw_data.before(datetime.max) if not last_stored: last_stored = datetime.min if self.status.get('data', 'ptr'): saved_ptr, saved_date = self.status.get('data', 'ptr').split(',') saved_ptr = int(saved_ptr, 16) saved_date = DataStore.safestrptime(saved_date) saved_date = self.raw_data.nearest(saved_date) while saved_date < last_stored: saved_date = self.raw_data.after(saved_date + SECOND) saved_ptr = self.ws.inc_ptr(saved_ptr) else: saved_ptr = None saved_date = None last_stored += timedelta(seconds=fixed_block['read_period'] * 30) if last_date <= last_stored: # nothing to do return self.status.set('data', 'ptr', '%06x,%s' % (last_ptr, last_date.isoformat(' '))) # data_count includes record currently being updated every 48 seconds max_count = fixed_block['data_count'] - 1 count = 0 duplicates = [] while last_date > last_stored and count < max_count: data = self.ws.get_data(last_ptr) if last_ptr == saved_ptr: if any(data[key] != self.raw_data[saved_date][key] for key in ('hum_in', 'temp_in', 'hum_out', 'temp_out', 'abs_pressure', 'wind_ave', 'wind_gust', 'wind_dir', 'rain', 'status')): # pointer matches but data is different, so no duplicates duplicates = None saved_ptr = None saved_date = None else: # potential duplicate data duplicates.append(last_date) saved_date = self.raw_data.before(saved_date) saved_ptr = self.ws.dec_ptr(saved_ptr) if (data['delay'] is None or data['delay'] > max(fixed_block['read_period'] * 2, 35)): self.logger.error('invalid data at %04x, %s', last_ptr, last_date.isoformat(' ')) last_date -= timedelta(minutes=fixed_block['read_period']) else: self.raw_data[last_date] = data count += 1 last_date -= timedelta(minutes=data['delay']) last_ptr = self.ws.dec_ptr(last_ptr) if duplicates: for d in duplicates: del self.raw_data[d] count -= len(duplicates) last_date = self.raw_data.nearest(last_date) next_date = self.raw_data.after(last_date + SECOND) if next_date: gap = (next_date - last_date).seconds // 60 gap -= fixed_block['read_period'] if gap > 0: self.logger.critical("%d minutes gap in data detected", gap) self.logger.info("%d catchup records", count)