def test_DST(self): os.environ['TZ'] = 'America/Los_Angeles' # Test start-of-day routines around a DST boundary: start_ts = time.mktime((2007, 3, 11, 1, 0, 0, 0, 0, -1)) start_of_day = startOfDay(start_ts) start2 = startOfArchiveDay(start_of_day) # Check that this is, in fact, a DST boundary: self.assertEqual(start_of_day, int(time.mktime((2007, 3, 11, 0, 0, 0, 0, 0, -1)))) self.assertEqual(start2 , int(time.mktime((2007, 3, 10, 0, 0, 0, 0, 0, -1))))
def test_DST(self): os.environ['TZ'] = 'America/Los_Angeles' # Test start-of-day routines around a DST boundary: start_ts = time.mktime((2007, 3, 11, 1, 0, 0, 0, 0, -1)) start_of_day = startOfDay(start_ts) start2 = startOfArchiveDay(start_of_day) # Check that this is, in fact, a DST boundary: self.assertEqual(start_of_day, int(time.mktime((2007, 3, 11, 0, 0, 0, 0, 0, -1)))) self.assertEqual(start2, int(time.mktime((2007, 3, 10, 0, 0, 0, 0, 0, -1))))
def get_record(self, record, dbmanager): """Augment record data with additional data derived from the archive. PVOutput requires 'energy' in each status record to be a cumulative value (either day or lifetime). Cumulative values are not normally included in a weewx record/packet so we need to calculate the data from the archive. In this case we will use the day cumualtive total. Returns results in the same units as the record. Input: record: A weewx archive record containing the data to be added. Dictionary. dbmanager: Manager object for the database being used. Returns: A dictionary of values """ _time_ts = record['dateTime'] _sod_ts = startOfDay(_time_ts) # Make a copy of the record, then start adding to it: _datadict = dict(record) # If the type 'energy' does not appear in the archive schema, or the # database is locked, an exception will be raised. Be prepared to catch # it. try: if 'dayEnergy' not in _datadict: _result = dbmanager.getSql( "SELECT SUM(energy), MIN(usUnits), MAX(usUnits) FROM %s " "WHERE dateTime>? AND dateTime<=?" % dbmanager.table_name, (_sod_ts, _time_ts)) if _result is not None and _result[0] is not None: if not _result[1] == _result[2] == record['usUnits']: raise ValueError( "Inconsistent units (%s vs %s vs %s) when querying for dayEnergy" % (_result[1], _result[2], record['usUnits'])) _datadict['dayEnergy'] = _result[0] else: _datadict['dayEnergy'] = None except weedb.OperationalError, e: syslog.syslog( syslog.LOG_DEBUG, "pvoutput: %s: Database OperationalError '%s'" % (self.protocol_name, e))
def __init__(self, config_dict, config_path, wu_config_dict, import_config_path, options): # call our parents __init__ super(WUSource, self).__init__(config_dict, wu_config_dict, options) # save our import config path self.import_config_path = import_config_path # save our import config dict self.wu_config_dict = wu_config_dict # get our WU station ID try: self.station_id = wu_config_dict['station_id'] except KeyError: _msg = "Weather Underground station ID not specified in '%s'." % import_config_path raise weewx.ViolatedPrecondition(_msg) # get our WU API key try: self.api_key = wu_config_dict['api_key'] except KeyError: _msg = "Weather Underground API key not specified in '%s'." % import_config_path raise weewx.ViolatedPrecondition(_msg) # wind dir bounds _wind_direction = option_as_list( wu_config_dict.get('wind_direction', '0,360')) try: if float(_wind_direction[0]) <= float(_wind_direction[1]): self.wind_dir = [ float(_wind_direction[0]), float(_wind_direction[1]) ] else: self.wind_dir = [0, 360] except (IndexError, TypeError): self.wind_dir = [0, 360] # some properties we know because of the format of the returned WU data # WU returns a fixed format date-time string self.raw_datetime_format = '%Y-%m-%d %H:%M:%S' # WU only provides hourly rainfall and a daily cumulative rainfall. # We use the latter so force 'cumulative' for rain. self.rain = 'cumulative' # initialise our import field-to-WeeWX archive field map self.map = None # For a WU import we might have to import multiple days but we can only # get one day at a time from WU. So our start and end properties # (counters) are datetime objects and our increment is a timedelta. # Get datetime objects for any date or date range specified on the # command line, if there wasn't one then default to today. self.start = dt.fromtimestamp(startOfDay(self.first_ts)) self.end = dt.fromtimestamp(startOfDay(self.last_ts)) # set our increment self.increment = datetime.timedelta(days=1) # property holding the current period being processed self.period = None # tell the user/log what we intend to do _msg = "Observation history for Weather Underground station '%s' will be imported." % self.station_id print(_msg) log.info(_msg) _msg = "The following options will be used:" if self.verbose: print(_msg) log.debug(_msg) _msg = " config=%s, import-config=%s" % (config_path, self.import_config_path) if self.verbose: print(_msg) log.debug(_msg) if options.date: _msg = " station=%s, date=%s" % (self.station_id, options.date) else: # we must have --from and --to _msg = " station=%s, from=%s, to=%s" % ( self.station_id, options.date_from, options.date_to) if self.verbose: print(_msg) log.debug(_msg) _obf_api_key_msg = '='.join( [' apiKey', '*' * (len(self.api_key) - 4) + self.api_key[-4:]]) if self.verbose: print(_obf_api_key_msg) log.debug(_obf_api_key_msg) _msg = " dry-run=%s, calc_missing=%s, ignore_invalid_data=%s" % ( self.dry_run, self.calc_missing, self.ignore_invalid_data) if self.verbose: print(_msg) log.debug(_msg) _msg = " tranche=%s, interval=%s, wind_direction=%s" % ( self.tranche, self.interval, self.wind_dir) if self.verbose: print(_msg) log.debug(_msg) _msg = "Using database binding '%s', which is bound to database '%s'" % ( self.db_binding_wx, self.dbm.database_name) print(_msg) log.info(_msg) _msg = "Destination table '%s' unit system is '%#04x' (%s)." % ( self.dbm.table_name, self.archive_unit_sys, unit_nicknames[self.archive_unit_sys]) print(_msg) log.info(_msg) if self.calc_missing: print("Missing derived observations will be calculated.") if options.date or options.date_from: print("Observations timestamped after %s and up to and" % timestamp_to_string(self.first_ts)) print("including %s will be imported." % timestamp_to_string(self.last_ts)) if self.dry_run: print( "This is a dry run, imported data will not be saved to archive." )
def do_fix(self, np_ts): """Apply the interval weighting fix to the daily summaries.""" # do we need to weight? Only weight if next day to weight ts is None or # there are records in the archive from that day if np_ts is None or self.dbm.last_timestamp > np_ts: t1 = time.time() log.info("intervalweighting: Applying %s..." % self.name) _days = 0 # Get the earliest daily summary ts and the obs that it came from first_ts, obs = self.first_summary() # Get the start and stop ts for our first transaction days _tr_start_ts = np_ts if np_ts is not None else first_ts _tr_stop_dt = datetime.datetime.fromtimestamp(_tr_start_ts) \ + datetime.timedelta(days=self.trans_days) _tr_stop_ts = time.mktime(_tr_stop_dt.timetuple()) _tr_stop_ts = min(startOfDay(self.dbm.last_timestamp), _tr_stop_ts) last_start = None while True: with weedb.Transaction(self.dbm.connection) as _cursor: for _day_span in self.genSummaryDaySpans(_tr_start_ts, _tr_stop_ts, obs): # Get the weight to be applied for the day _weight = self.get_interval(_day_span) * 60 # Get the current day stats in an accumulator _day_accum = self.dbm._get_day_summary(_day_span.start) # Set the unit system for the accumulator _day_accum.unit_system = self.dbm.std_unit_system # Weight the necessary accumulator stats, use a # try..except in case something goes wrong last_key = None try: for _day_key in self.dbm.daykeys: last_key = _day_key _day_accum[_day_key].wsum *= _weight _day_accum[_day_key].sumtime *= _weight # Do we have a vecstats accumulator? if hasattr(_day_accum[_day_key], 'wsquaresum'): # Yes, so update the weighted vector stats _day_accum[_day_key].wsquaresum *= _weight _day_accum[_day_key].xsum *= _weight _day_accum[_day_key].ysum *= _weight _day_accum[_day_key].dirsumtime *= _weight except Exception as e: # log the exception and re-raise it log.info("intervalweighting: Interval weighting of '%s' daily summary " "for %s failed: %s" % (last_key, timestamp_to_string(_day_span.start, format_str="%Y-%m-%d"), e)) raise # Update the daily summary with the weighted accumulator if not self.dry_run: self.dbm._set_day_summary(_day_accum, None, _cursor) _days += 1 # Save the ts of the weighted daily summary as the # 'lastWeightPatch' value in the archive_day__metadata # table if not self.dry_run: self.dbm._write_metadata('lastWeightPatch', _day_span.start, _cursor) # Give the user some information on progress if _days % 50 == 0: self._progress(_days, _day_span.start) last_start = _day_span.start # Setup our next tranche # Have we reached the end, if so break to finish if _tr_stop_ts >= startOfDay(self.dbm.last_timestamp): break # More to process so set our start and stop for the next # transaction _tr_start_dt = datetime.datetime.fromtimestamp(_tr_stop_ts) \ + datetime.timedelta(days=1) _tr_start_ts = time.mktime(_tr_start_dt.timetuple()) _tr_stop_dt = datetime.datetime.fromtimestamp(_tr_start_ts) \ + datetime.timedelta(days=self.trans_days) _tr_stop_ts = time.mktime(_tr_stop_dt.timetuple()) _tr_stop_ts = min(self.dbm.last_timestamp, _tr_stop_ts) # We have finished. Get rid of the no longer needed lastWeightPatch with weedb.Transaction(self.dbm.connection) as _cursor: _cursor.execute("DELETE FROM %s_day__metadata WHERE name=?" % self.dbm.table_name, ('lastWeightPatch',)) # Give the user some final information on progress, # mainly so the total tallies with the log self._progress(_days, last_start) print(file=sys.stdout) tdiff = time.time() - t1 # We are done so log and inform the user log.info("intervalweighting: Calculated weighting " "for %s days in %0.2f seconds." % (_days, tdiff)) if self.dry_run: log.info("intervalweighting: " "This was a dry run. %s was not applied." % self.name) else: # we didn't need to weight so inform the user log.info("intervalweighting: %s has already been applied." % self.name)
def __init__(self, config_dict, config_path, wu_config_dict, import_config_path, options, log): # call our parents __init__ super(WUSource, self).__init__(config_dict, wu_config_dict, options, log) # save our import config path self.import_config_path = import_config_path # save our import config dict self.wu_config_dict = wu_config_dict # get our WU station ID try: self.station_id = wu_config_dict['station_id'] except KeyError: raise weewx.ViolatedPrecondition("Weather Underground station ID not specified in '%s'." % import_config_path) # wind dir bounds _wind_direction = option_as_list(wu_config_dict.get('wind_direction', '0,360')) try: if float(_wind_direction[0]) <= float(_wind_direction[1]): self.wind_dir = [float(_wind_direction[0]), float(_wind_direction[1])] else: self.wind_dir = [0, 360] except: self.wind_dir = [0, 360] # some properties we know because of the format of the returned WU data # WU returns a fixed format date-time string self.raw_datetime_format = '%Y-%m-%d %H:%M:%S' # WU only provides hourly rainfall and a daily cumulative rainfall. # We use the latter so force 'cumulative' for rain. self.rain = 'cumulative' # initialise our import field-to-weewx archive field map self.map = None # For a WU import we might have to import multiple days but we can only # get one day at a time from WU. So our start and end properties # (counters) are datetime objects and our increment is a timedelta. # Get datetime objects for any date or date range specified on the # command line, if there wasn't one then default to today. self.start = dt.fromtimestamp(startOfDay(self.first_ts)) self.end = dt.fromtimestamp(startOfDay(self.last_ts)) # set our increment self.increment = datetime.timedelta(days=1) # tell the user/log what we intend to do _msg = "Observation history for Weather Underground station '%s' will be imported." % self.station_id self.wlog.printlog(syslog.LOG_INFO, _msg) _msg = "The following options will be used:" self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " config=%s, import-config=%s" % (config_path, self.import_config_path) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) if options.date: _msg = " station=%s, date=%s" % (self.station_id, options.date) else: # we must have --from and --to _msg = " station=%s, from=%s, to=%s" % (self.station_id, options.date_from, options.date_to) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " dry-run=%s, calc-missing=%s" % (self.dry_run, self.calc_missing) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " tranche=%s, interval=%s, wind_direction=%s" % (self.tranche, self.interval, self.wind_dir) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = "Using database binding '%s', which is bound to database '%s'" % (self.db_binding_wx, self.dbm.database_name) self.wlog.printlog(syslog.LOG_INFO, _msg) _msg = "Destination table '%s' unit system is '%#04x' (%s)." % (self.dbm.table_name, self.archive_unit_sys, unit_nicknames[self.archive_unit_sys]) self.wlog.printlog(syslog.LOG_INFO, _msg) if self.calc_missing: print "Missing derived observations will be calculated." if options.date or options.date_from: print "Observations timestamped after %s and up to and" % (timestamp_to_string(self.first_ts), ) print "including %s will be imported." % (timestamp_to_string(self.last_ts), ) if self.dry_run: print "This is a dry run, imported data will not be saved to archive."
def __init__(self, config_dict, config_path, wu_config_dict, import_config_path, options, log): # call our parents __init__ super(WUSource, self).__init__(config_dict, wu_config_dict, options, log) # save our import config path self.import_config_path = import_config_path # save our import config dict self.wu_config_dict = wu_config_dict # get our WU station ID try: self.station_id = wu_config_dict['station_id'] except KeyError: raise weewx.ViolatedPrecondition( "Weather Underground station ID not specified in '%s'." % import_config_path) # wind dir bounds _wind_direction = option_as_list( wu_config_dict.get('wind_direction', '0,360')) try: if float(_wind_direction[0]) <= float(_wind_direction[1]): self.wind_dir = [ float(_wind_direction[0]), float(_wind_direction[1]) ] else: self.wind_dir = [0, 360] except: self.wind_dir = [0, 360] # some properties we know because of the format of the returned WU data # WU returns a fixed format date-time string self.raw_datetime_format = '%Y-%m-%d %H:%M:%S' # WU only provides hourly rainfall and a daily cumulative rainfall. # We use the latter so force 'cumulative' for rain. self.rain = 'cumulative' # initialise our import field-to-weewx archive field map self.map = None # For a WU import we might have to import multiple days but we can only # get one day at a time from WU. So our start and end properties # (counters) are datetime objects and our increment is a timedelta. # Get datetime objects for any date or date range specified on the # command line, if there wasn't one then default to today. self.start = dt.fromtimestamp(startOfDay(self.first_ts)) self.end = dt.fromtimestamp(startOfDay(self.last_ts)) # set our increment self.increment = datetime.timedelta(days=1) # tell the user/log what we intend to do _msg = "Observation history for Weather Underground station '%s' will be imported." % self.station_id self.wlog.printlog(syslog.LOG_INFO, _msg) _msg = "The following options will be used:" self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " config=%s, import-config=%s" % (config_path, self.import_config_path) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " station=%s, date=%s" % (self.station_id, options.date) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " dry-run=%s, calc-missing=%s" % (self.dry_run, self.calc_missing) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = " tranche=%s, interval=%s, wind_direction=%s" % ( self.tranche, self.interval, self.wind_dir) self.wlog.verboselog(syslog.LOG_DEBUG, _msg) _msg = "Using database binding '%s', which is bound to database '%s'" % ( self.db_binding_wx, self.dbm.database_name) self.wlog.printlog(syslog.LOG_INFO, _msg) _msg = "Destination table '%s' unit system is '%#04x' (%s)." % ( self.dbm.table_name, self.archive_unit_sys, unit_nicknames[self.archive_unit_sys]) self.wlog.printlog(syslog.LOG_INFO, _msg) if self.calc_missing: print "Missing derived observations will be calculated." if options.date: print "Observations timestamped after %s and up to and" % ( timestamp_to_string(self.first_ts), ) print "including %s will be imported." % (timestamp_to_string( self.last_ts), ) if self.dry_run: print "This is a dry run, imported data will not be saved to archive."
def do_fix(self, np_ts): """Apply the interval weighting fix to the daily summaries.""" # do we need to weight? Only weight if next day to weight ts is None or # there are records in the archive from that day if np_ts is None or self.dbm.last_timestamp > np_ts: t1 = time.time() syslog.syslog(syslog.LOG_INFO, "intervalweighting: Applying %s..." % self.name) _days = 0 # Get the earliest daily summary ts and the obs that it came from first_ts, obs = self.first_summary() # Get the start and stop ts for our first transaction days _tr_start_ts = np_ts if np_ts is not None else first_ts _tr_stop_dt = datetime.datetime.fromtimestamp(_tr_start_ts) + datetime.timedelta(days=self.trans_days) _tr_stop_ts = time.mktime(_tr_stop_dt.timetuple()) _tr_stop_ts = min(startOfDay(self.dbm.last_timestamp), _tr_stop_ts) last_start = None while True: with weedb.Transaction(self.dbm.connection) as _cursor: for _day_span in self.genSummaryDaySpans(_tr_start_ts, _tr_stop_ts, obs): # Get the weight to be applied for the day _weight = self.get_interval(_day_span) * 60 # Get the current day stats in an accumulator _day_accum = self.dbm._get_day_summary(_day_span.start) # Set the unit system for the accumulator _day_accum.unit_system = self.dbm.std_unit_system # Weight the necessary accumulator stats, use a # try..except in case something goes wrong last_key = None try: for _day_key in self.dbm.daykeys: last_key = _day_key _day_accum[_day_key].wsum *= _weight _day_accum[_day_key].sumtime *= _weight # Do we have a vecstats accumulator? if hasattr(_day_accum[_day_key], 'wsquaresum'): # Yes, so update the weighted vector stats _day_accum[_day_key].wsquaresum *= _weight _day_accum[_day_key].xsum *= _weight _day_accum[_day_key].ysum *= _weight _day_accum[_day_key].dirsumtime *= _weight except Exception, e: # log the exception and re-raise it syslog.syslog(syslog.LOG_INFO, "intervalweighting: Interval weighting of '%s' daily summary " "for %s failed: %s" % (last_key, timestamp_to_string(_day_span.start, format="%Y-%m-%d"), e)) raise # Update the daily summary with the weighted accumulator if not self.dry_run: self.dbm._set_day_summary(_day_accum, None, _cursor) _days += 1 # Save the ts of the weighted daily summary as the # 'lastWeightPatch' value in the archive_day__metadata # table if not self.dry_run: self.dbm._write_metadata('lastWeightPatch', _day_span.start, _cursor) # Give the user some information on progress if _days % 50 == 0: self._progress(_days, _day_span.start) last_start = _day_span.start # Setup our next tranche # Have we reached the end, if so break to finish if _tr_stop_ts >= startOfDay(self.dbm.last_timestamp): break # More to process so set our start and stop for the next # transaction _tr_start_dt = datetime.datetime.fromtimestamp(_tr_stop_ts) + datetime.timedelta(days=1) _tr_start_ts = time.mktime(_tr_start_dt.timetuple()) _tr_stop_dt = datetime.datetime.fromtimestamp(_tr_start_ts) + datetime.timedelta(days=self.trans_days) _tr_stop_ts = time.mktime(_tr_stop_dt.timetuple()) _tr_stop_ts = min(self.dbm.last_timestamp, _tr_stop_ts) # We have finished. Get rid of the no longer needed lastWeightPatch with weedb.Transaction(self.dbm.connection) as _cursor: _cursor.execute("DELETE FROM %s_day__metadata WHERE name=?" % self.dbm.table_name, ('lastWeightPatch',)) # Give the user some final information on progress, # mainly so the total tallies with the log self._progress(_days, last_start) print >>sys.stdout tdiff = time.time() - t1 # We are done so log and inform the user syslog.syslog(syslog.LOG_INFO, "intervalweighting: calculated weighting for %s days in %0.2f seconds." % (_days, tdiff)) if self.dry_run: syslog.syslog(syslog.LOG_INFO, "intervalweighting: This was a dry run. %s was not applied." % self.name)