def Reprocess(data_dir, update): logger = logging.getLogger('pywws.Reprocess') raw_data = DataStore.data_store(data_dir) if update: # update old data to copy high nibble of wind_dir to status logger.warning("Updating status to include extra bits from wind_dir") count = 0 for data in raw_data[:]: count += 1 idx = data['idx'] if count % 10000 == 0: logger.info("update: %s", idx.isoformat(' ')) elif count % 500 == 0: logger.debug("update: %s", idx.isoformat(' ')) if data['wind_dir'] is not None: if data['wind_dir'] >= 16: data['status'] |= (data['wind_dir'] & 0xF0) << 4 data['wind_dir'] &= 0x0F raw_data[idx] = data if data['status'] & 0x800: data['wind_dir'] = None raw_data[idx] = data raw_data.flush() # delete old format summary files logger.warning('Deleting old summaries') for summary in ['calib', 'hourly', 'daily', 'monthly']: for root, dirs, files in os.walk(os.path.join(data_dir, summary), topdown=False): logger.info(root) for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) # create data summaries logger.warning('Generating hourly and daily summaries') params = DataStore.params(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) return 0
def Hourly(data_dir): # get file locations params = DataStore.params(data_dir) # localise application Localisation.SetApplicationLanguage(params) # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # get weather station data # LogData.LogData(params, raw_data) # do the processing Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks if not Tasks.RegularTasks(params, calib_data, hourly_data, daily_data, monthly_data).do_tasks(): return 1 return 0
def Reprocess(data_dir): # delete old format summary files print 'Deleting old summaries' for summary in ['calib', 'hourly', 'daily', 'monthly']: for root, dirs, files in os.walk(os.path.join(data_dir, summary), topdown=False): print root for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) # create data summaries print 'Generating hourly and daily summaries' params = DataStore.params(data_dir) raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) return 0
def LiveLog(data_dir): logger = logging.getLogger('pywws.LiveLog') params = DataStore.params(data_dir) status = DataStore.status(data_dir) # localise application Localisation.SetApplicationLanguage(params) # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # create a DataLogger object datalogger = DataLogger(params, status, raw_data) # create a RegularTasks object asynch = eval(params.get('config', 'asynchronous', 'False')) tasks = Tasks.RegularTasks(params, status, raw_data, calib_data, hourly_data, daily_data, monthly_data, asynch=asynch) # get live data try: for data, logged in datalogger.live_data( logged_only=(not tasks.has_live_tasks())): if logged: # process new data Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks tasks.do_tasks() else: tasks.do_live(data) except Exception, ex: logger.exception(ex)
def LiveLog(data_dir): logger = logging.getLogger('pywws.LiveLog') params = DataStore.params(data_dir) # localise application Localisation.SetApplicationLanguage(params) # connect to weather station ws_type = params.get('config', 'ws type') if ws_type: params._config.remove_option('config', 'ws type') params.set('fixed', 'ws type', ws_type) ws_type = params.get('fixed', 'ws type', '1080') ws = WeatherStation.weather_station(ws_type=ws_type) fixed_block = CheckFixedBlock(ws, params, logger) if not fixed_block: logger.error("Invalid data from weather station") return 3 # open data file stores raw_data = DataStore.data_store(data_dir) calib_data = DataStore.calib_store(data_dir) hourly_data = DataStore.hourly_store(data_dir) daily_data = DataStore.daily_store(data_dir) monthly_data = DataStore.monthly_store(data_dir) # create a RegularTasks object tasks = Tasks.RegularTasks( params, calib_data, hourly_data, daily_data, monthly_data) # get time of last logged data two_minutes = timedelta(minutes=2) last_stored = raw_data.before(datetime.max) if last_stored == None: last_stored = datetime.min if datetime.utcnow() < last_stored: raise ValueError('Computer time is earlier than last stored data') last_stored += two_minutes # get live data hour = timedelta(hours=1) next_hour = datetime.utcnow().replace( minute=0, second=0, microsecond=0) + hour next_ptr = None for data, ptr, logged in ws.live_data( logged_only=(not tasks.has_live_tasks())): now = data['idx'] if logged: if ptr == next_ptr: # data is contiguous with last logged value raw_data[now] = data else: # catch up missing data Catchup(ws, logger, raw_data, now, ptr) next_ptr = ws.inc_ptr(ptr) # process new data Process.Process(params, raw_data, calib_data, hourly_data, daily_data, monthly_data) # do tasks tasks.do_tasks() if now >= next_hour: next_hour += hour fixed_block = CheckFixedBlock(ws, params, logger) if not fixed_block: logger.error("Invalid data from weather station") return 3 params.flush() else: tasks.do_live(data) return 0