コード例 #1
0
        def time_since(expression):
            """Time since a sql expression evaluted true"""
            db_manager = db_lookup()
            sql_stmt = "SELECT dateTime FROM %s WHERE %s AND dateTime <= %d ORDER BY dateTime DESC LIMIT 1" \
                       % (db_manager.table_name, expression, timespan.stop)

            row = db_manager.getSql(sql_stmt)
            val = timespan.stop - row[0] if row else None
            vt = ValueTuple(val, 'second', 'group_deltatime')
            vh = ValueHelper(vt,
                             context="long_delta",
                             formatter=self.generator.formatter,
                             converter=self.generator.converter)
            return vh
コード例 #2
0
ファイル: test_cheetah.py プロジェクト: timtsm/weewx
 def test_ValueHelper(self):
     val_vh = ValueHelper(ValueTuple(20.0, 'degree_C', 'group_temperature'),
                          formatter=weewx.units.get_default_formatter())
     au = weewx.cheetahgenerator.AssureUnicode()
     filtered_value = au.filter(val_vh)
     self.assertEqual(filtered_value, u"20.0°C")
コード例 #3
0
 def test_ValueHelper(self):
     val_vh = ValueHelper(ValueTuple(20.0, 'degree_C', 'group_temperature'))
     au = weewx.cheetahgenerator.AssureUnicode()
     filtered_value = au.filter(val_vh)
     self.assertEqual(filtered_value, u"68.0°F")
コード例 #4
0
ファイル: xfrostday.py プロジェクト: hoetzgit/hesweewx
    def get_extension_list(self, timespan, db_lookup):
        """Returns various tags related to longest periods of outTemp min<0 days.

        Parameters:
          timespan: An instance of weeutil.weeutil.TimeSpan. This will
                    hold the start and stop times of the domain of
                    valid times.

          db_lookup: This is a function that, given a data binding
                     as its only parameter, will return a database manager
                     object.

        Returns:
          lastfrost_day                 last day of the year if outTemp MIN < 0
          lastfrost_delta_time          days, horas, mins lastfrost_day to now
          lasteis_day                   last day of the year if outTemp MAX < 0
          lasteis_delta_time            days, horas, mins lastfrost_day to now

          year_frost_minE_days:         Length of longest run of consecutive min<0
                                        days in current year
          year_frost_minE_days_time:    End dateTime of longest run of
                                        consecutive min<0 days in current year
          year_frost_minS_days_time:    Start dateTime of longest run of
                                        consecutive min<0 days in current year
          alltime_frost_minE_days:      Length of alltime longest run of
                                        consecutive min<0 days
          alltime_frost_minE_days_time: End dateTime of alltime longest run of
                                        consecutive min<0 days
          alltime_frost_minS_days_time: Start dateTime of alltime longest run of
                                        consecutive min<0 days

          year_frost_maxE_days:         Length of longest run of consecutive max<0
                                        days in current year
          year_frost_maxE_days_time:    End dateTime of longest run of
          year_frost_maxS_days_time:    Start dateTime of longest run of
                                        consecutive max<0 days in current year
          alltime_frost_maxE_days:      Length of alltime longest run of
                                        consecutive max<0 days
          alltime_frost_maxE_days_time: End dateTime of alltime longest run of
          alltime_frost_maxS_days_time: Start dateTime of alltime longest run of
                                        consecutive max<0 days

        """

        t1 = time.time()

        # Get current record from the archive
        if not self.generator.gen_ts:
            self.generator.gen_ts = db_lookup().lastGoodStamp()
        current_rec = db_lookup().getRecord(self.generator.gen_ts)
        # Get our time unit
        (dateTime_type,
         dateTime_group) = getStandardUnitType(current_rec['usUnits'],
                                               'dateTime')

        ##
        ## Get timestamps we need for the periods of interest
        ##
        # Get time obj for midnight
        _mn_t = datetime.time(0)
        # Get date obj for now
        _today_d = datetime.datetime.today()
        # Get midnight 1st of the year as a datetime object and then get it as a
        # timestamp
        _first_of_year_dt = get_first_day(_today_d, 0, 1 - _today_d.month)
        _mn_first_of_year_dt = datetime.datetime.combine(
            _first_of_year_dt, _mn_t)
        _mn_first_of_year_ts = time.mktime(_mn_first_of_year_dt.timetuple())
        _year_ts = TimeSpan(_mn_first_of_year_ts, timespan.stop)

        _row = db_lookup().getSql(
            "SELECT MAX(dateTime) FROM archive_day_outTemp WHERE min < 0.0")
        lastfrost_ts = _row[0]
        _lastfrost_ts = None
        if lastfrost_ts is not None:
            try:
                _row = db_lookup().getSql(
                    "SELECT MAX(dateTime) FROM archive WHERE outTemp < 0.0 AND dateTime > ? AND dateTime <= ?",
                    (lastfrost_ts, lastfrost_ts + 86400))
                _lastfrost_ts = _row[0]
            except:
                _lastfrost_ts = None

        _row = db_lookup().getSql(
            "SELECT MAX(dateTime) FROM archive_day_outTemp WHERE max < 0.0")
        lasteis_ts = _row[0]
        _lasteis_ts = None
        if lasteis_ts is not None:
            try:
                _row = db_lookup().getSql(
                    "SELECT MAX(dateTime) FROM archive WHERE outTemp < 0.0 AND dateTime > ? AND dateTime <= ?",
                    (lasteis_ts, lasteis_ts + 86400))
                _lasteis_ts = _row[0]
            except:
                _lasteis_ts = None

        # Get our year stats vectors
        _outTemp_vector = []
        _time_vector = []
        for tspan in weeutil.weeutil.genDaySpans(_mn_first_of_year_ts,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, min FROM archive_day_outTemp WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                if _row[1] < 0:
                    fr = 2
                else:
                    fr = 0

                _outTemp_vector.append(fr)
        # Get our run of year min0 days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_outTemp_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of les then 0 degree C (ie no outTemp) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _year_minE_run, _position) = max(_interim,
                                                     key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _year_minE_time_ts = _time_vector[_position] + (_year_minE_run -
                                                            1) * 86400
            _year_minS_time_ts = _year_minE_time_ts - (86400 * _year_minE_run)
        else:
            # If we did not find a run then set our results accordingly
            _year_minE_run = 0
            _year_minE_time_ts = None
            _year_minS_time_ts = None

        # Get our year stats vectors
        _outTemp_vector = []
        _time_vector = []
        for tspan in weeutil.weeutil.genDaySpans(_mn_first_of_year_ts,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, max FROM archive_day_outTemp WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                if _row[1] < 0:
                    fr = 2
                else:
                    fr = 0

                _outTemp_vector.append(fr)
        # Get our run of year max0 days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_outTemp_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of les then 0 degree C (ie no outTemp) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _year_maxE_run, _position) = max(_interim,
                                                     key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _year_maxE_time_ts = _time_vector[_position] + (_year_maxE_run -
                                                            1) * 86400
            _year_maxS_time_ts = _year_maxE_time_ts - (86400 * _year_maxE_run)
        else:
            # If we did not find a run then set our results accordingly
            _year_maxE_run = 0
            _year_maxE_time_ts = None
            _year_maxS_time_ts = None

        # Get our alltime stats vectors
        _outTemp_vector = []
        _time_vector = []
        for tspan in weeutil.weeutil.genDaySpans(timespan.start,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, min FROM archive_day_outTemp WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                if _row[1] < 0:
                    fr = 2
                else:
                    fr = 0

                _outTemp_vector.append(fr)
        # Get our run of alltime min0 days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_outTemp_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of 0s (ie no outTemp) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _alltime_minE_run, _position) = max(_interim,
                                                        key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _alltime_minE_time_ts = _time_vector[_position] + (
                _alltime_minE_run - 1) * 86400
            _alltime_minS_time_ts = _alltime_minE_time_ts - (86400 *
                                                             _alltime_minE_run)

        else:
            # If we did not find a run then set our results accordingly
            _alltime_minE_run = 0
            _alltime_minE_time_ts = None
            _alltime_minS_time_ts = None

        # Get our alltime stats vectors
        _outTemp_vector = []
        _time_vector = []
        for tspan in weeutil.weeutil.genDaySpans(timespan.start,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, max FROM archive_day_outTemp WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                if _row[1] < 0:
                    fr = 2
                else:
                    fr = 0

                _outTemp_vector.append(fr)
        # Get our run of alltime min0 days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_outTemp_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of 0s (ie no outTemp) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _alltime_maxE_run, _position) = max(_interim,
                                                        key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _alltime_maxE_time_ts = _time_vector[_position] + (
                _alltime_maxE_run - 1) * 86400
            _alltime_maxS_time_ts = _alltime_maxE_time_ts - (86400 *
                                                             _alltime_maxE_run)

        else:
            # If we did not find a run then set our results accordingly
            _alltime_maxE_run = 0
            _alltime_maxE_time_ts = None
            _alltime_maxS_time_ts = None

        # Make our timestamps ValueHelpers to give more flexibility in how we can format them in our reports
        _lastfrost_vt = (_lastfrost_ts, dateTime_type, dateTime_group)
        _lastfrost_vh = ValueHelper(_lastfrost_vt,
                                    formatter=self.generator.formatter,
                                    converter=self.generator.converter)
        _lasteis_vt = (_lasteis_ts, dateTime_type, dateTime_group)
        _lasteis_vh = ValueHelper(_lasteis_vt,
                                  formatter=self.generator.formatter,
                                  converter=self.generator.converter)

        _delta_time = time.time() - _lastfrost_ts if _lastfrost_ts else None
        _delta_time_vt = (_delta_time, 'second', 'group_deltatime')
        _delta_time_vh = ValueHelper(_delta_time_vt,
                                     context="long_delta",
                                     formatter=self.generator.formatter,
                                     converter=self.generator.converter)
        _delta_eistime = time.time() - _lasteis_ts if _lasteis_ts else None
        _delta_eistime_vt = (_delta_eistime, 'second', 'group_deltatime')
        _delta_eistime_vh = ValueHelper(_delta_eistime_vt,
                                        context="long_delta",
                                        formatter=self.generator.formatter,
                                        converter=self.generator.converter)

        _year_minE_time_vt = (_year_minE_time_ts, dateTime_type,
                              dateTime_group)
        _year_minE_time_vh = ValueHelper(_year_minE_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)
        _year_minS_time_vt = (_year_minS_time_ts, dateTime_type,
                              dateTime_group)
        _year_minS_time_vh = ValueHelper(_year_minS_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)

        _year_maxE_time_vt = (_year_maxE_time_ts, dateTime_type,
                              dateTime_group)
        _year_maxE_time_vh = ValueHelper(_year_maxE_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)
        _year_maxS_time_vt = (_year_maxS_time_ts, dateTime_type,
                              dateTime_group)
        _year_maxS_time_vh = ValueHelper(_year_maxS_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)

        _alltime_minE_time_vt = (_alltime_minE_time_ts, dateTime_type,
                                 dateTime_group)
        _alltime_minE_time_vh = ValueHelper(_alltime_minE_time_vt,
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)
        _alltime_minS_time_vt = (_alltime_minS_time_ts, dateTime_type,
                                 dateTime_group)
        _alltime_minS_time_vh = ValueHelper(_alltime_minS_time_vt,
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)

        _alltime_maxE_time_vt = (_alltime_maxE_time_ts, dateTime_type,
                                 dateTime_group)
        _alltime_maxE_time_vh = ValueHelper(_alltime_maxE_time_vt,
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)
        _alltime_maxS_time_vt = (_alltime_maxS_time_ts, dateTime_type,
                                 dateTime_group)
        _alltime_maxS_time_vh = ValueHelper(_alltime_maxS_time_vt,
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)

        # Create a small dictionary with the tag names (keys) we want to use
        search_list_extension = {
            'lastfrost_day': _lastfrost_vh,
            'lastfrost_delta_time': _delta_time_vh,
            'lasteis_day': _lasteis_vh,
            'lasteis_delta_time': _delta_eistime_vh,
            'year_frost_minE_days': _year_minE_run,
            'year_frost_minE_days_time': _year_minE_time_vh,
            'year_frost_minS_days_time': _year_minS_time_vh,
            'year_frost_maxE_days': _year_maxE_run,
            'year_frost_maxE_days_time': _year_maxE_time_vh,
            'year_frost_maxS_days_time': _year_maxS_time_vh,
            'alltime_frost_minE_days': _alltime_minE_run,
            'alltime_frost_minE_days_time': _alltime_minE_time_vh,
            'alltime_frost_minS_days_time': _alltime_minS_time_vh,
            'alltime_frost_maxE_days': _alltime_maxE_run,
            'alltime_frost_maxE_days_time': _alltime_maxE_time_vh,
            'alltime_frost_maxS_days_time': _alltime_maxS_time_vh
        }

        t2 = time.time()
        log.debug("MyFrostDays SLE executed in %0.3f seconds", (t2 - t1))

        return [search_list_extension]
コード例 #5
0
ファイル: wdastro.py プロジェクト: gjr80/weewx-weewx-wd
    def get_extension_list(self, timespan, db_lookup):
        """Create a search list with date-time of next perihelion and aphelion.

           Source: Earth perihelion and aphelion Table Courtesy of
                   Fred Espenak, www.Astropixels.com

        Parameters:
            timespan: An instance of weeutil.weeutil.TimeSpan. This will hold
                      the start and stop times of the domain of valid times.

          db_lookup: This is a function that, given a data binding as its only
                     parameter, will return a database manager object.

        Returns:
            next_perhelion: ValueHelper containing date-time of next perihelion
            next_aphelion: ValueHelper containing date-time of next aphelion
        """

        t1 = time.time()

        # get a timestamp for now
        search_ts = timespan.stop
        # wrap in a try..except just in case
        try:
            # find the index of the next perihelion
            next_perihelion_idx = bisect.bisect_left(self.perihelion,
                                                     search_ts)
            # find the index of the next aphelion
            next_aphelion_idx = bisect.bisect_left(self.aphelion, search_ts)
            # get ts of next perihelion
            next_perihelion_ts = self.perihelion[next_perihelion_idx]
            # get ts of next aphelion
            next_aphelion_ts = self.aphelion[next_aphelion_idx]
        except IndexError:
            # if an error then set them to None
            next_perihelion_ts = None
            next_aphelion_ts = None

        # make our ts into ValueHelpers
        next_perihelion_ts_vh = ValueHelper(
            (next_perihelion_ts, 'unix_epoch', 'group_time'),
            'current',
            formatter=self.generator.formatter,
            converter=self.generator.converter)
        next_aphelion_ts_vh = ValueHelper(
            (next_aphelion_ts, 'unix_epoch', 'group_time'),
            'current',
            formatter=self.generator.formatter,
            converter=self.generator.converter)

        # now create a small dictionary with suitable keys
        search_list_extension = {
            'next_perihelion': next_perihelion_ts_vh,
            'next_aphelion': next_aphelion_ts_vh
        }

        t2 = time.time()
        if weewx.debug >= 2:
            logdbg("EarthApsis SLE executed in %0.3f seconds" % (t2 - t1))

        return [search_list_extension]
コード例 #6
0
ファイル: xstats.py プロジェクト: hoetzgit/hesweewx
    def get_extension_list(self, timespan, db_lookup):
        """Returns a search list extension with additions.

        timespan: An instance of weeutil.weeutil.TimeSpan. This holds
                  the start and stop times of the domain of valid times.

        db_lookup: Function that returns a database manager given a
                   data binding.
        """

        # First, create a TimespanBinder object for all time. This one is easy
        # because the object timespan already holds all valid times to be
        # used in the report.
        all_stats = TimespanBinder(timespan,
                                   db_lookup,
                                   context='alltime',
                                   formatter=self.generator.formatter,
                                   converter=self.generator.converter,
                                   skin_dict=self.generator.skin_dict)

        # Now create a TimespanBinder for the last seven days. This one we
        # will have to calculate. First, calculate the time at midnight, seven
        # days ago. The variable week_dt will be an instance of datetime.date.
        week_dt = datetime.date.fromtimestamp(timespan.stop) - datetime.timedelta(weeks=1)
        # Now convert it to unix epoch time:
        week_ts = time.mktime(week_dt.timetuple())
        # Now form a TimeSpanStats object, using the time span just calculated:
        seven_day_stats = TimespanBinder(TimeSpan(week_ts, timespan.stop),
                                         db_lookup,
                                         context='seven_day',
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter,
                                         skin_dict=self.generator.skin_dict)

        # Now use a similar process to get statistics for the last 30 days.
        days_dt = datetime.date.fromtimestamp(timespan.stop) - datetime.timedelta(days=30)
        days_ts = time.mktime(days_dt.timetuple())
        thirty_day_stats = TimespanBinder(TimeSpan(days_ts, timespan.stop),
                                          db_lookup,
                                          context='thirty_day',
                                          formatter=self.generator.formatter,
                                          converter=self.generator.converter,
                                          skin_dict=self.generator.skin_dict)


        # Now use a similar process to get statistics for last year.
        year = datetime.date.today().year
        start_ts = time.mktime((year - 1, 1, 1, 0, 0, 0, 0, 0, 0))
        stop_ts = time.mktime((year, 1, 1, 0, 0, 0, 0, 0, 0))
        last_year_stats = TimespanBinder(TimeSpan(start_ts, stop_ts),
                                          db_lookup,
                                          context='last_year',
                                          formatter=self.generator.formatter,
                                          converter=self.generator.converter)

        # Now use a similar process to get statistics for last year to date.
        year = datetime.date.today().year
        month = datetime.date.today().month
        day = datetime.date.today().day
        start_ts = time.mktime((year - 1, 1, 1, 0, 0, 0, 0, 0, 0))
        stop_ts = time.mktime((year - 1, month, day, 0, 0, 0, 0, 0, 0))
        last_year_todate_stats = TimespanBinder(TimeSpan(start_ts, stop_ts),
                                                db_lookup,
                                                context='last_year_todate',
                                                formatter=self.generator.formatter,
                                                converter=self.generator.converter)

        # Now use a similar process to get statistics for last calendar month.
        start_ts = time.mktime((year, month - 1, 1, 0, 0, 0, 0, 0, 0))
        stop_ts = time.mktime((year, month, 1, 0, 0, 0, 0, 0, 0)) - 1
        last_month_stats = TimespanBinder(TimeSpan(start_ts, stop_ts),
                                          db_lookup,
                                          context='last_month',
                                          formatter=self.generator.formatter,
                                          converter=self.generator.converter)

        # Get ts Weewx was launched
        # get first good stamp
        starttime = 1383250000
        # Start Datenbank
        starttime_vt = (starttime, 'unix_epoch', 'group_time')
        starttime_vh = ValueHelper(starttime_vt,
                                   context='last_st',
                                   formatter=self.generator.formatter,
                                   converter=self.generator.converter)

        """Lazy evaluation of weewx uptime."""
        delta_time = time.time() -  starttime

        db_sta_end = ValueHelper(value_t=(delta_time, "second", "group_deltatime"),
                                       context='long_delta',
                                       formatter=self.generator.formatter,
                                       converter=self.generator.converter)

        return [{'alltime': all_stats,
                 'seven_day': seven_day_stats,
                 'thirty_day': thirty_day_stats,
                 'last_month': last_month_stats,
                 'db_sttime': starttime_vh,
                 'db_uptime': db_sta_end,
                 'last_year': last_year_stats,
                 'last_year_today': last_year_todate_stats,
               }]
コード例 #7
0
ファイル: wdastro.py プロジェクト: gjr80/weewx-weewx-wd
    def get_extension_list(self, timespan, db_lookup):
        """Create a search list with various lunar perigee and apogee details.

        Parameters:
          timespan: An instance of weeutil.weeutil.TimeSpan. This will hold the
                    start and stop times of the domain of valid times.

          db_lookup: This is a function that, given a data binding as its only
                     parameter, will return a database manager object.

        Returns:
          moon_apsis: A list of tuples with details of each apogee/perigee in
                      the current year. Tuple format is:
                        (apsis_type, apsis_ts, apsis_distance)
                      where:
                        apsis_type is 'a' for apogee or 'p' for perigee
                        apsis_ts is a ValueHelper with the timestamp of the
                          apsis
                        apsis_distance is the distance in km of the moon from
                          earth at apsis.
          next_apogee_ts: ValueHelper containing date-time of next apogee
                          (could be next year)
          next_apogee_dist_km: Earth to Moon distance in km at next apogee
                               (WeeWX has no notion of km/mi so cannot use a
                               ValueHelper)
          next_perigee_ts: ValueHelper containing date-time of next apogee
                           (could be next year)
          next_perigee_dist_km: Earth to Moon distance in km at next perigee
                                (Weewx has no notion of km/mi so cannot use a
                                ValueHelper)
          max_apogee: Tuple with details of apogee where Moon is furthest from
                      Earth (ie max apogee) this year.
                      Format is:
                        (apsis_ts, apsis_distance)
                      where apsis_ts and apsis_distance as per moon_apsis above
          min_perigee: Tuple with details of perigee where Moon is closest to
                       Earth (ie min apogee) this year.
                       Format is:
                         (apsis_ts, apsis_distance)
                       where apsis_ts and apsis_distance as per moon_apsis
                       above
        """

        t1 = time.time()

        # get starting date for our list of apogee/perigees
        curr_year = datetime.date.fromtimestamp(timespan.stop).year
        ssk = math.floor((curr_year - 1999.97) * 13.2555)
        apsis_list = []
        # Get our list of apogees/perigees for the current year. List will
        # include last apogee/perigee from previous year and first
        # apogee/perigee from next year
        for z in range(0, 40):
            sk = ssk + z * 0.5
            apsis = 'p' if (sk - math.floor(sk)) < 0.25 else 'a'
            pa = self.moonpa(sk)
            pa_ts = (pa[0] - 2440587.5) * 86400.0
            # save our ts as a ValueHelper
            pa_ts_vh = ValueHelper((pa_ts, 'unix_epoch', 'group_time'),
                                   formatter=self.generator.formatter,
                                   converter=self.generator.converter)
            # add the latest event to our list
            apsis_list.append((apsis, pa_ts_vh, pa[2]))
            if datetime.date.fromtimestamp(pa_ts).year > curr_year:
                # if we have an apsis from next year then grab one more then
                # stop, we have enough
                sk = ssk + (z + 1) * 0.5
                apsis = 'p' if (sk - math.floor(sk)) < 0.25 else 'a'
                pa = self.moonpa(sk)
                pa_ts = (pa[0] - 2440587.5) * 86400.0
                # save our ts as a ValueHelper
                pa_ts_vh = ValueHelper((pa_ts, 'unix_epoch', 'group_time'),
                                       formatter=self.generator.formatter,
                                       converter=self.generator.converter)
                # add the latest event to our list
                apsis_list.append((apsis, pa_ts_vh, pa[2]))
                break

        # make sure our list is in date order
        apsis_list.sort(key=lambda ts: ts[1].raw)

        # get timestamps for start of this year and start of next year,
        # necessary so we can identify which events occur this year
        _tt = time.localtime(timespan.stop)
        _ts = time.mktime((_tt.tm_year, 1, 1, 0, 0, 0, 0, 0, -1))
        _ts_y = time.mktime((_tt.tm_year + 1, 1, 1, 0, 0, 0, 0, 0, -1))
        # get max apogee for the year (ie greatest distance to moon)
        max_apogee = max(apsis_list,
                         key=lambda ap: ap[2]
                         if _ts <= ap[1].raw < _ts_y else 0)
        max_apogee = (max_apogee[1], max_apogee[2])
        # get min perigee for the year (ie least distance to moon)
        min_perigee = min(apsis_list,
                          key=lambda ap: ap[2]
                          if _ts <= ap[1].raw < _ts_y else 1000000)
        min_perigee = (min_perigee[1], min_perigee[2])

        # split our apsis list into individual components so we can find the
        # next apogee and perigee
        apsis_type_list, apsis_ts_vh_list, apsis_dist_list = list(
            zip(*apsis_list))
        # ts list elements are ValueHelpers so we need to break it down further
        apsis_ts_list = [ts_vh.raw for ts_vh in apsis_ts_vh_list]
        try:
            # find the index of the next apogee or perigee
            next_apsis_idx = bisect.bisect_left(apsis_ts_list, timespan.stop)
            if apsis_type_list[next_apsis_idx] == 'a':
                # if an apogee then capture apogee/perigee details accordingly
                next_apogee_ts_vh = apsis_ts_vh_list[next_apsis_idx]
                next_apogee_dist = apsis_dist_list[next_apsis_idx]
                next_perigee_ts_vh = apsis_ts_vh_list[next_apsis_idx + 1]
                next_perigee_dist = apsis_dist_list[next_apsis_idx + 1]
            else:
                # if a perigee then capture apogee/perigee details accordingly
                next_perigee_ts_vh = apsis_ts_vh_list[next_apsis_idx]
                next_perigee_dist = apsis_dist_list[next_apsis_idx]
                next_apogee_ts_vh = apsis_ts_vh_list[next_apsis_idx + 1]
                next_apogee_dist = apsis_dist_list[next_apsis_idx + 1]
        except ValueError:
            # if we had an error then set everything to None
            next_apogee_ts_vh = ValueHelper((None, 'unix_epoch', 'group_time'),
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)
            next_apogee_dist = None
            next_perigee_ts_vh = ValueHelper(
                (None, 'unix_epoch', 'group_time'),
                formatter=self.generator.formatter,
                converter=self.generator.converter)
            next_perigee_dist = None

        # now create a small dictionary with suitable keys
        search_list_extension = {
            'moon_apsis': apsis_list,
            'next_apogee_ts': next_apogee_ts_vh,
            'next_apogee_dist_km': next_apogee_dist,
            'next_perigee_ts': next_perigee_ts_vh,
            'next_perigee_dist_km': next_perigee_dist,
            'max_apogee': max_apogee,
            'min_perigee': min_perigee
        }

        t2 = time.time()
        if weewx.debug >= 2:
            logdbg("MoonApsis SLE executed in %0.3f seconds" % (t2 - t1))

        return [search_list_extension]
コード例 #8
0
ファイル: wdastro.py プロジェクト: gjr80/weewx-weewx-wd
    def get_extension_list(self, timespan, db_lookup):
        """Returns a search list with details of the next Solar and Lunar eclipse.

           Details provided include epoch timestamp of the eclipse as well as
           the type. Note that the dictionary of eclipses is all eclipses, not
           just eclipses visible at the stations location, so the eclipse
           returned may not be visible to the user. Eclipse data is based upon
           NASA Solar and Lunar eclipse tables at the following sites:

           http://eclipse.gsfc.nasa.gov/solar.html
           http://eclipse.gsfc.nasa.gov/lunar.html

        Parameters:
          timespan: An instance of weeutil.weeutil.TimeSpan. This will
                    hold the start and stop times of the domain of
                    valid times.

          db_lookup: This is a function that, given a data binding
                     as its only parameter, will return a database manager
                     object.

        Returns:
          next_solar_eclipse: Timestamp of next solar eclipse
          next_solar_eclipse_type: Type of next solar eclipse. Can be 'Annular',
                                   'Hybrid', 'Partial' or 'Total'
          next_lunar_eclipse: Timestamp of next lunar eclipse
          next_lunar_eclipse_type: Type of next lunar eclipse. Can be 'Partial',
                                   'Penumbral' or 'Total'
        # get a timestamp for now
        search_ts = timespan.stop
        # wrap in a try..except just in case
        try:
            # find the index of the next perihelion
            next_perihelion_idx = bisect.bisect_left(self.perihelion, search_ts)
            # find the index of the next aphelion
            next_aphelion_idx = bisect.bisect_left(self.aphelion, search_ts)
            # get ts of next perihelion
            next_perihelion_ts = self.perihelion[next_perihelion_idx]
            # get ts of next aphelion
            next_aphelion_ts = self.aphelion[next_aphelion_idx]
        except:
            # if an error then set them to None
            next_perhelion_ts = None
            next_aphelion_ts = None

        # make our ts into ValueHelpers
        """

        t1 = time.time()

        # get a timestamp for now
        search_ts = timespan.stop
        # split our eclipse list tuples into individual lists
        solar_eclipse_ts_list, solar_eclipse_type_list = list(
            zip(*self.solar_eclipses))
        try:
            # find the index of the next solar eclipse
            next_solar_eclipse_idx = bisect.bisect_left(
                solar_eclipse_ts_list, search_ts)
            # get ts of next solar eclipse
            next_solar_eclipse_ts = (
                solar_eclipse_ts_list[next_solar_eclipse_idx] -
                self.delta_t(solar_eclipse_ts_list[next_solar_eclipse_idx]))
            # get the type code of next solar eclipse
            next_solar_eclipse_type = solar_eclipse_type_list[
                next_solar_eclipse_idx]
        except ValueError:
            # if an error then set them to None
            next_solar_eclipse_ts = None
            next_solar_eclipse_type = None

        # make our ts into a ValueHelper
        next_solar_eclipse_ts_vh = ValueHelper(
            (next_solar_eclipse_ts, 'unix_epoch', 'group_time'),
            'current',
            formatter=self.generator.formatter,
            converter=self.generator.converter)
        # look up the eclipse type
        next_solar_eclipse_type = self.solar_eclipse_type_lookup[
            next_solar_eclipse_type]

        # split our eclipse list tuples into individual lists
        lunar_eclipse_ts_list, lunar_eclipse_data_list = list(
            zip(*self.lunar_eclipses))
        try:
            # find the index of the next lunar eclipse
            next_lunar_eclipse_idx = bisect.bisect_left(
                lunar_eclipse_ts_list, search_ts)
            # get ts of next lunar eclipse
            next_lunar_eclipse_ts = (
                lunar_eclipse_ts_list[next_lunar_eclipse_idx] -
                self.delta_t(lunar_eclipse_ts_list[next_lunar_eclipse_idx]))
            # get the type code of next lunar eclipse
            next_lunar_eclipse_type = lunar_eclipse_data_list[
                next_lunar_eclipse_idx]
        except ValueError:
            # if an error then set them to None
            next_lunar_eclipse_ts = None
            next_lunar_eclipse_type = None

        # make our ts into a ValueHelper
        next_lunar_eclipse_ts_vh = ValueHelper(
            (next_lunar_eclipse_ts, 'unix_epoch', 'group_time'),
            'current',
            formatter=self.generator.formatter,
            converter=self.generator.converter)
        # look up the eclipse type
        next_lunar_eclipse_type = self.lunar_eclipse_type_lookup[
            next_lunar_eclipse_type]

        # Now create a small dictionary with suitable keys:
        search_list_extension = {
            'next_solar_eclipse': next_solar_eclipse_ts_vh,
            'next_solar_eclipse_type': next_solar_eclipse_type,
            'next_lunar_eclipse': next_lunar_eclipse_ts_vh,
            'next_lunar_eclipse_type': next_lunar_eclipse_type
        }

        t2 = time.time()
        if weewx.debug >= 2:
            logdbg("Eclipse SLE executed in %0.3f seconds" % (t2 - t1))

        return [search_list_extension]
コード例 #9
0
    def get_extension_list(self, timespan, db_lookup):
        """Returns a search list extension with datetime of last rain and secs since then.

        Parameters:
          timespan: An instance of weeutil.weeutil.TimeSpan. This will
                    hold the start and stop times of the domain of 
                    valid times.

          db_lookup: This is a function that, given a data binding
                     as its only parameter, will return a database manager
                     object.

        Returns:
          last_rain:            A ValueHelper containing the datetime of the last rain
          time_since_last_rain: A ValueHelper containing the seconds since last rain
        """

        ##
        ## Get date and time of last rain
        ##
        ## Returns unix epoch of archive period of last rain
        ##
        ## Result is returned as a ValueHelper so standard Weewx formatting
        ## is available eg $last_rain.format("%d %m %Y")
        ##

        # Get ts for day of last rain from statsdb
        # Value returned is ts for midnight on the day the rain occurred
        # cumul de pluie > 1 mm
        _row = db_lookup().getSql(
            "SELECT MAX(dateTime) FROM archive_day_rain WHERE sum > 0.1")

        last_rain_ts = _row[0]
        # Now if we found a ts then use it to limit our search on the archive
        # so we can find the last archive record during which it rained. Wrap
        # in a try statement just in case

        if last_rain_ts is not None:
            try:
                _row = db_lookup().getSql(
                    "SELECT MAX(dateTime) FROM archive WHERE rain > 0 AND dateTime > ? AND dateTime <= ?",
                    (last_rain_ts, last_rain_ts + 86400))
                last_rain_ts = _row[0]
            except:
                last_rain_ts = None
        else:
            # the dreaded you should never reach here block
            # intent is to belt'n'suspender for a new db with no rain recorded yet
            last_rain_ts = None

        # Wrap our ts in a ValueHelper
        last_rain_vt = (last_rain_ts, 'unix_epoch', 'group_time')
        last_rain_vh = ValueHelper(last_rain_vt,
                                   formatter=self.generator.formatter,
                                   converter=self.generator.converter)

        # next idea stolen with thanks from weewx station.py
        # note this is delta time from 'now' not the last weewx db time
        #  - weewx used time.time() but weewx-wd suggests timespan.stop()
        delta_time = time.time() - last_rain_ts if last_rain_ts else None

        # Wrap our ts in a ValueHelper
        delta_time_vt = (delta_time, 'second', 'group_deltatime')
        delta_time_vh = ValueHelper(delta_time_vt,
                                    formatter=self.generator.formatter,
                                    converter=self.generator.converter)

        # Create a small dictionary with the tag names (keys) we want to use
        search_list_extension = {
            'last_rain': last_rain_vh,
            'time_since_last_rain': delta_time_vh
        }

        # uncomment to enable debugging
        #### logdbg("last_rain  = %s" % last_rain_ts )
        #### logdbg("delta_time = %s" % delta_time   )

        return [search_list_extension]
コード例 #10
0
ファイル: junk2.py プロジェクト: cqlanus/docker-weewx
import weewx.units

from weewx.units import ValueHelper

value_t = (68.01, "degree_F", "group_temperature")
vh = ValueHelper(value_t)
x = str(vh)
print type(x)
print x
コード例 #11
0
    def get_extension_list(self, timespan, db_lookup):
        """Returns various tags related to longest periods of rainy/dry days.

           This SLE uses the stats database daily rainfall totals to determine
           the longest runs of consecutive dry or wet days over various periods
           (month, year, alltime). The SLE also determines the start date of
           each run.

           Period (xxx_days) tags are returned as integer numbers of days.
           Times (xx_time) tags are returned as dateTime ValueHelpers set to
           midnight (at start) of the first day of the run concerned. If the
           length of the run is 0 then the corresponding start time of the run
           is returned as None.

        Parameters:
          timespan: An instance of weeutil.weeutil.TimeSpan. This will
                    hold the start and stop times of the domain of
                    valid times.

          db_lookup: This is a function that, given a data binding
                     as its only parameter, will return a database manager
                     object.

        Returns:
          month_con_dry_days:        Length of longest run of consecutive dry
                                     days in current month
          month_con_dry_days_time:   End dateTime of longest run of
                                     consecutive dry days in current month
          month_con_wet_days:        Length of longest run of consecutive wet
                                     days in current month
          month_con_wet_days_time:   End dateTime of longest run of
                                     consecutive wet days in current month
          year_con_dry_days:         Length of longest run of consecutive dry
                                     days in current year
          year_con_dry_days_time:    End dateTime of longest run of
                                     consecutive dry days in current year
          year_con_wet_days:         Length of longest run of consecutive wet
                                     days in current year
          year_con_wet_days_time:    End dateTime of longest run of
                                     consecutive wet days in current year
          alltime_con_dry_days:      Length of alltime longest run of
                                     consecutive dry days
          alltime_con_dry_days_time: End dateTime of alltime longest run of
                                     consecutive dry days
          alltime_con_wet_days:      Length of alltime longest run of
                                     consecutive wet days
          alltime_con_wet_days_time: End dateTime of alltime longest run of
                                     consecutive wet days

        """

        t1 = time.time()

        ##
        ## Get units for use later with ValueHelpers
        ##
        # Get current record from the archive
        if not self.generator.gen_ts:
            self.generator.gen_ts = db_lookup().lastGoodStamp()
        current_rec = db_lookup().getRecord(self.generator.gen_ts)
        # Get our time unit
        (dateTime_type,
         dateTime_group) = getStandardUnitType(current_rec['usUnits'],
                                               'dateTime')
        #dateTime_type = unix_epoch
        #dateTime_group = dateTime
        ##
        ## Get timestamps we need for the periods of interest
        ##
        # Get time obj for midnight
        _mn_t = datetime.time(0)

        # Get date obj for now
        _today_d = datetime.datetime.today()

        # Get midnight 1st of the month as a datetime object and then get it as a
        # timestamp
        first_of_month_dt = get_first_day(_today_d)
        _mn_first_of_month_dt = datetime.datetime.combine(
            first_of_month_dt, _mn_t)
        _mn_first_of_month_ts = time.mktime(_mn_first_of_month_dt.timetuple())
        _month_ts = TimeSpan(_mn_first_of_month_ts, timespan.stop)

        # Get midnight 1st of the year as a datetime object and then get it as a
        # timestamp
        _first_of_year_dt = get_first_day(_today_d, 0, 1 - _today_d.month)
        _mn_first_of_year_dt = datetime.datetime.combine(
            _first_of_year_dt, _mn_t)
        _mn_first_of_year_ts = time.mktime(_mn_first_of_year_dt.timetuple())
        _year_ts = TimeSpan(_mn_first_of_year_ts, timespan.stop)

        # Get vectors of our month stats
        _rain_vector = []
        _time_vector = []

        # Step through each day in our month timespan and get our daily rain
        # total and timestamp. This is a day_archive version of the archive
        # getSqlVectors method.
        for tspan in weeutil.weeutil.genDaySpans(_mn_first_of_month_ts,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, sum FROM archive_day_rain WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                _rain_vector.append(_row[1])
        # As an aside lets get our number of rainy days this month
        _month_rainy_days = sum(1 for i in _rain_vector if i > 0)
        # Get our run of month dry days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_rain_vector):
            _length = len(list(g))
            if k == 0:  # If we have a run of 0s (ie no rain) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _month_dry_run, _position) = max(_interim,
                                                     key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _month_dry_time_ts = _time_vector[_position] + (_month_dry_run -
                                                            1) * 86400
            _month_dryS_time_ts = _month_dry_time_ts - (86400 * _month_dry_run)
        else:
            # If we did not find a run then set our results accordingly
            _month_dry_run = 0
            _month_dry_time_ts = None
            _month_dryS_time_ts = None

        # Get our run of month rainy days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_rain_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of something > 0 (ie some rain) add
                # it to our list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _month_wet_run, _position) = max(_interim,
                                                     key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _month_wet_time_ts = _time_vector[_position] + (_month_wet_run -
                                                            1) * 86400
            _month_wetS_time_ts = _month_wet_time_ts - (86400 * _month_wet_run)
        else:
            # If we did not find a run then set our results accordingly
            _month_wet_run = 0
            _month_wet_time_ts = None
            _month_wetS_time_ts = None

        # Get our year stats vectors
        _rain_vector = []
        _time_vector = []
        for tspan in weeutil.weeutil.genDaySpans(_mn_first_of_year_ts,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, sum FROM archive_day_rain WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                _rain_vector.append(_row[1])
        # As an aside lets get our number of rainy days this month
        _year_rainy_days = sum(1 for i in _rain_vector if i > 0)
        # Get our run of year dry days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_rain_vector):
            _length = len(list(g))
            if k == 0:  # If we have a run of 0s (ie no rain) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _year_dry_run, _position) = max(_interim,
                                                    key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _year_dry_time_ts = _time_vector[_position] + (_year_dry_run -
                                                           1) * 86400
            _year_dryS_time_ts = _year_dry_time_ts - (86400 * _year_dry_run)
        else:
            # If we did not find a run then set our results accordingly
            _year_dry_run = 0
            _year_dry_time_ts = None
            _year_dryS_time_ts = None

        # Get our run of year rainy days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_rain_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of something > 0 (ie some rain) add
                # it to our list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _year_wet_run, _position) = max(_interim,
                                                    key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _year_wet_time_ts = _time_vector[_position] + (_year_wet_run -
                                                           1) * 86400
            _year_wetS_time_ts = _year_wet_time_ts - (86400 * _year_wet_run)
        else:
            # If we did not find a run then set our results accordingly
            _year_wet_run = 0
            _year_wet_time_ts = None
            _year_wetS_time_ts = None

        # Get our alltime stats vectors
        _rain_vector = []
        _time_vector = []
        for tspan in weeutil.weeutil.genDaySpans(timespan.start,
                                                 timespan.stop):
            _row = db_lookup().getSql(
                "SELECT dateTime, sum FROM archive_day_rain WHERE dateTime >= ? AND dateTime < ? ORDER BY dateTime",
                (tspan.start, tspan.stop))
            if _row is not None:
                _time_vector.append(_row[0])
                _rain_vector.append(_row[1])
        # As an aside lets get our number of rainy days this month
        _alltime_rainy_days = sum(1 for i in _rain_vector if i > 0)
        # Get our run of alltime dry days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_rain_vector):
            _length = len(list(g))
            if k == 0:  # If we have a run of 0s (ie no rain) add it to our
                # list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _alltime_dry_run, _position) = max(_interim,
                                                       key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _alltime_dry_time_ts = _time_vector[_position] + (
                _alltime_dry_run - 1) * 86400
            _alltime_dryS_time_ts = _alltime_dry_time_ts - (86400 *
                                                            _alltime_dry_run)
        else:
            # If we did not find a run then set our results accordingly
            _alltime_dry_run = 0
            _alltime_dry_time_ts = None
            _alltime_dryS_time_ts = None

        # Get our run of alltime rainy days
        _interim = []  # List to hold details of any runs we might find
        _index = 0  # Placeholder so we can track the start dateTime of any runs
        # Use itertools groupby method to make our search for a run easier
        # Step through each of the groups itertools has found
        for k, g in itertools.groupby(_rain_vector,
                                      key=lambda r: 1 if r > 0 else 0):
            _length = len(list(g))
            if k > 0:  # If we have a run of something > 0 (ie some rain) add
                # it to our list of runs
                _interim.append((k, _length, _index))
            _index += _length
        if _interim != []:
            # If we found a run (we want the longest one) then get our results
            (_temp, _alltime_wet_run, _position) = max(_interim,
                                                       key=lambda a: a[1])
            # Our 'time' is the day the run ends so we need to add on run-1 days
            _alltime_wet_time_ts = _time_vector[_position] + (
                _alltime_wet_run - 1) * 86400
            _alltime_wetS_time_ts = _alltime_wet_time_ts - (86400 *
                                                            _alltime_wet_run)
        else:
            # If we did not find a run then set our results accordingly
            _alltime_wet_run = 0
            _alltime_wet_time_ts = None
            _alltime_wetS_time_ts = None

        # Make our timestamps ValueHelpers to give more flexibility in how we can format them in our reports
        _month_dry_time_vt = (_month_dry_time_ts, dateTime_type,
                              dateTime_group)
        _month_dry_time_vh = ValueHelper(_month_dry_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)
        _month_wet_time_vt = (_month_wet_time_ts, dateTime_type,
                              dateTime_group)
        _month_wet_time_vh = ValueHelper(_month_wet_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)
        _year_dry_time_vt = (_year_dry_time_ts, dateTime_type, dateTime_group)
        _year_dry_time_vh = ValueHelper(_year_dry_time_vt,
                                        formatter=self.generator.formatter,
                                        converter=self.generator.converter)
        _year_wet_time_vt = (_year_wet_time_ts, dateTime_type, dateTime_group)
        _year_wet_time_vh = ValueHelper(_year_wet_time_vt,
                                        formatter=self.generator.formatter,
                                        converter=self.generator.converter)
        _alltime_dry_time_vt = (_alltime_dry_time_ts, dateTime_type,
                                dateTime_group)
        _alltime_dry_time_vh = ValueHelper(_alltime_dry_time_vt,
                                           formatter=self.generator.formatter,
                                           converter=self.generator.converter)
        _alltime_wet_time_vt = (_alltime_wet_time_ts, dateTime_type,
                                dateTime_group)
        _alltime_wet_time_vh = ValueHelper(_alltime_wet_time_vt,
                                           formatter=self.generator.formatter,
                                           converter=self.generator.converter)
        # Start Time for dry and wet
        _month_dryS_time_vt = (_month_dryS_time_ts, 'unix_epoch', 'group_time')
        _month_dryS_time_vh = ValueHelper(_month_dryS_time_vt,
                                          formatter=self.generator.formatter,
                                          converter=self.generator.converter)
        _year_dryS_time_vt = (_year_dryS_time_ts, 'unix_epoch', 'group_time')
        _year_dryS_time_vh = ValueHelper(_year_dryS_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)
        _alltime_dryS_time_vt = (_alltime_dryS_time_ts, 'unix_epoch',
                                 'group_time')
        _alltime_dryS_time_vh = ValueHelper(_alltime_dryS_time_vt,
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)

        _month_wetS_time_vt = (_month_wetS_time_ts, 'unix_epoch', 'group_time')
        _month_wetS_time_vh = ValueHelper(_month_wetS_time_vt,
                                          formatter=self.generator.formatter,
                                          converter=self.generator.converter)
        _year_wetS_time_vt = (_year_wetS_time_ts, 'unix_epoch', 'group_time')
        _year_wetS_time_vh = ValueHelper(_year_wetS_time_vt,
                                         formatter=self.generator.formatter,
                                         converter=self.generator.converter)
        _alltime_wetS_time_vt = (_alltime_wetS_time_ts, 'unix_epoch',
                                 'group_time')
        _alltime_wetS_time_vh = ValueHelper(_alltime_wetS_time_vt,
                                            formatter=self.generator.formatter,
                                            converter=self.generator.converter)

        # Create a small dictionary with the tag names (keys) we want to use
        search_list_extension = {
            'month_con_dry_days': _month_dry_run,
            'month_con_dry_days_time': _month_dry_time_vh,
            'year_con_dry_days': _year_dry_run,
            'year_con_dry_days_time': _year_dry_time_vh,
            'alltime_con_dry_days': _alltime_dry_run,
            'alltime_con_dry_days_time': _alltime_dry_time_vh,
            'month_con_wet_days': _month_wet_run,
            'month_con_wet_days_time': _month_wet_time_vh,
            'year_con_wet_days': _year_wet_run,
            'year_con_wet_days_time': _year_wet_time_vh,
            'alltime_con_wet_days': _alltime_wet_run,
            'alltime_con_wet_days_time': _alltime_wet_time_vh,
            'month_rainy_days': _month_rainy_days,
            'year_rainy_days': _year_rainy_days,
            'alltime_rainy_days': _alltime_rainy_days,
            'month_con_dryS_days_time': _month_dryS_time_vh,
            'year_con_dryS_days_time': _year_dryS_time_vh,
            'alltime_con_dryS_days_time': _alltime_dryS_time_vh,
            'month_con_wetS_days_time': _month_wetS_time_vh,
            'year_con_wetS_days_time': _year_wetS_time_vh,
            'alltime_con_wetS_days_time': _alltime_wetS_time_vh
        }
        t2 = time.time()
        log.debug("MyXRainNo SLE executed in %0.3f seconds", t2 - t1)

        return [search_list_extension]
コード例 #12
0
ファイル: xgreen.py プロジェクト: hoetzgit/hesweewx
    def get_extension_list(self, timespan, db_lookup):
        """Returns green_day as dateTime, green_sum, warmTemp and coolTemp.

         Parameters:
           genDaySpans: An instance of weeutil.weeutil.genDaySpans. This will
                        take the start and stop times of the domain of
                        given times.

           db_lookup: This is a function that, given a data binding
                      as its only parameter, will return a database manager
                      object.

         Returns:
           green_sum: Growing green_sum, Numeric value only, not a ValueTuple.
                      if day.outTemp.avg > 0.0 get for
                              Jan day.outTemp.avg * 0.5
                              Feb day.outTemp.avg * 0.75
                              Mae day.outTemp.avg * 1
                              Summe as green_sum
           green_day: Get the datetime for the Day to date this year where
                      green_sum > 200 as datetime.
           coolT_sum: Kaeltesumme day.outTemp.avg less than 0 degree C from Nov year before to Mar this year
           warmT_sum: Waermesumme day.outTemp.avg more than 20 degree C from Jun to Aug of this year
         """

        t1 = time.time()

        # Get year and month for today
        today = datetime.date.today()
        ano = today.year
        anomo = today.month

        jan_ano = datetime.date(ano, 1, 1)
        feb_ano = datetime.date(ano, 2, 1)
        mae_ano = datetime.date(ano, 3, 1)
        maee_ano = datetime.date(ano, 3, 31)
        jun_ano = datetime.date(ano, 6, 1)
        auge_ano = datetime.date(ano, 8, 31)
        nov_ano = datetime.date(ano, 11, 1)
        novA_ano = datetime.date(ano - 1, 11, 1)

        # get timetuple of the days per year
        dat_ts = time.mktime(today.timetuple())
        jan_ano_ts = time.mktime(jan_ano.timetuple())
        feb_ano_ts = time.mktime(feb_ano.timetuple())
        jane_ano_ts = feb_ano_ts - 86400
        mae_ano_ts = time.mktime(mae_ano.timetuple())
        febe_ano_ts = mae_ano_ts - 86400
        maee_ano_ts = time.mktime(maee_ano.timetuple())
        jun_ano_ts = time.mktime(jun_ano.timetuple())
        maie_ano_ts = jun_ano_ts - 86400
        auge_ano_ts = time.mktime(auge_ano.timetuple())
        nov_ano_ts = time.mktime(nov_ano.timetuple())
        novA_ano_ts = time.mktime(novA_ano.timetuple())
        tavg_ts = None
        tavgS = 0.0
        warmS = 0.0
        coolS = 0.0

        # call green_sum as sum of day.outTemp.avg more than 0 degree C
        #      green_day as datetime were green_sum more then 200
        if anomo > 5:
            #self.filename = '/home/weewx/bin/user/zzgreenDay'
            self.filename = '/home/weewx/archive/zzgreenDay'
            #self.filename1 = '/home/weewx/bin/user/zzgreenSum'
            self.filename1 = '/home/weewx/archive/zzgreenSum'
            try:
                with open(self.filename1) as f1:
                    tavgS = f1.read()
                    tavgS = float(tavgS)
                with open(self.filename) as f:
                    tavg_ts = f.read()
                    tavg_ts = float(tavg_ts)

            except Exception as e:
                log.error("greenDay cannot read green: %s", e)

        else:
            tavgS = 0.0
            tavg_ts = None
            try:
                for tspan in weeutil.weeutil.genDaySpans(
                        jan_ano_ts, jun_ano_ts):
                    _row = db_lookup().getSql(
                        "SELECT dateTime,wsum,sumtime FROM archive_day_outTemp WHERE dateTime>? AND dateTime<=?",
                        (tspan.start, tspan.stop))

                    if _row is None or _row[1] is None or _row[2] is None:
                        continue

                    date = datetime.datetime.fromtimestamp(_row[0])
                    mo_date = date.month
                    tavg0 = _row[1] / _row[2]

                    if tavg0 <= 0.0:
                        day_green = 0.0
                    else:
                        if mo_date == 1:
                            day_green = tavg0 * 0.5

                        elif mo_date == 2:
                            day_green = tavg0 * 0.75

                        elif mo_date > 2 and mo_date < 6:
                            day_green = tavg0

                        else:
                            day_green = 0.0

                    tavgS += day_green
                    if tavgS >= 200.0 and tavg_ts is None:
                        tavg_ts = _row[0]

            except weedb.DatabaseError:
                pass

            #dat_gs = open("/home/weewx/bin/user/zzgreenSum", "w")
            dat_gs = open("/home/weewx/archive/zzgreenSum", "w")
            dat_gs.write(str(tavgS))
            dat_gs.close()

            #dat_gd = open("/home/weewx/bin/user/zzgreenDay", "w")
            dat_gd = open("/home/weewx/archive/zzgreenDay", "w")
            dat_gd.write(str(tavg_ts))
            dat_gd.close()

        # call warmT_sum as sum of day.outTemp.avg if more than 20 degree C
        if 5 < anomo < 9:
            _warmS = []
            try:
                for tspan in weeutil.weeutil.genDaySpans(
                        jun_ano_ts, auge_ano_ts):
                    _row = db_lookup().getSql(
                        "SELECT dateTime,wsum,sumtime FROM archive_day_outTemp WHERE dateTime>? AND dateTime<=?",
                        (tspan.start, tspan.stop))
                    if _row is None or _row[1] is None or _row[2] is None:
                        continue

                    _warmS.append(_row[1] / _row[2])

                warmS = sum(i for i in _warmS if i > 20.0)

            except weedb.DatabaseError:
                pass

            #dat_ws = open("/home/weewx/bin/user/zzwarmSum", "w")
            dat_ws = open("/home/weewx/archive/zzwarmSum", "w")
            dat_ws.write(str(warmS))
            dat_ws.close()

        else:
            #self.filename_warm = '/home/weewx/bin/user/zzwarmSum'
            self.filename_warm = '/home/weewx/archive/zzwarmSum'
            try:
                with open(self.filename_warm) as f:
                    warmS = f.read()
                    warmS = float(warmS)
            except Exception as e:
                log.error("warmSum cannot read zzwarmSum: %s", e)

        # call coolT_sum as sum of day.outTemp.avg less than 0 degree C
        #if maee_ano_ts < dat_ts < nov_ano_ts:
        if 3 < anomo < 11:
            #self.filename_cool = '/home/weewx/bin/user/zzcoolSum'
            self.filename_cool = '/home/weewx/archive/zzcoolSum'
            try:
                with open(self.filename_cool) as f:
                    coolS = f.read()
                    coolS = float(coolS)
            except Exception as e:
                log.error("coolSum cannot read zzcoolSum: %s", e)

        else:
            if anomo > 10:
                coolsta = nov_ano_ts
                coolend = dat_ts
            else:
                coolsta = novA_ano_ts
                coolend = maee_ano_ts

            _cooS = []
            try:
                for tspan in weeutil.weeutil.genDaySpans(coolsta, coolend):
                    _row = db_lookup().getSql(
                        "SELECT dateTime,wsum,sumtime FROM archive_day_outTemp WHERE dateTime>? AND dateTime<=?",
                        (tspan.start, tspan.stop))
                    if _row is None or _row[1] is None or _row[2] is None:
                        continue

                    _cooS.append(_row[1] / _row[2])

                coolS = sum(i for i in _cooS if i < 0.0)
                coolS = abs(coolS)

            except weedb.DatabaseError:
                pass

            #dat_cs = open("/home/weewx/bin/user/zzcoolSum", "w")
            dat_cs = open("/home/weewx/archive/zzcoolSum", "w")
            dat_cs.write(str(coolS))
            dat_cs.close()

        # Wrap our ts in a ValueHelper
        tavg_vt = (tavg_ts, 'unix_epoch', 'group_time')
        tavg_vh = ValueHelper(tavg_vt,
                              formatter=self.generator.formatter,
                              converter=self.generator.converter)

        search_list_extension = {
            'green_sum': tavgS,
            'green_day': tavg_vh,
            'coolT_sum': coolS,
            'warmT_sum': warmS,
        }

        t2 = time.time()
        log.debug("xGreenDay SLE executed in %0.3f seconds", t2 - t1)
        #log.info("xGreenDay SLE executed in %0.3f seconds", t2 - t1)

        return [search_list_extension]
コード例 #13
0
ファイル: xlastrain.py プロジェクト: hoetzgit/hesweewx
    def get_extension_list(self, timespan, db_lookup):

        _row = db_lookup().getSql(
            "SELECT MAX(dateTime) FROM archive_day_snow WHERE sum > 0")
        lastsnow_ts = _row[0]

        if lastsnow_ts is not None:
            try:
                _row = db_lookup().getSql(
                    "SELECT MAX(dateTime) FROM archive WHERE snow > 0 AND dateTime > ? AND dateTime <=?",
                    (lastsnow_ts, lastsnow_ts + 86400))
                lastsnow_ts = _row[0]
            except:
                lastsnow_ts = None

        # Schneedecke
        _row = db_lookup().getSql(
            "SELECT MAX(dateTime) FROM archive_day_snowTotal WHERE sum > 0")
        lastsnowT_ts = _row[0]

        if lastsnowT_ts is not None:
            try:
                _row = db_lookup().getSql(
                    "SELECT MAX(dateTime) FROM archive WHERE snowTotal > 0 AND dateTime > ? AND dateTime <=?",
                    (lastsnow_ts, lastsnow_ts + 86400))
                lastsnowT_ts = _row[0]
            except:
                lastsnowT_ts = None

        # Wrap our ts in a ValueHelper
        lastsnow_vt = (lastsnow_ts, 'unix_epoch', 'group_time')
        lastsnow_vh = ValueHelper(lastsnow_vt,
                                  formatter=self.generator.formatter,
                                  converter=self.generator.converter)

        delta_time = time.time() - lastsnow_ts if lastsnow_ts else None
        # Wrap our ts in a ValueHelper
        delta_time_vt = (delta_time, 'second', 'group_deltatime')
        delta_time_vh = ValueHelper(delta_time_vt,
                                    formatter=self.generator.formatter,
                                    converter=self.generator.converter)

        # schneedecke
        lastsnowT_vt = (lastsnowT_ts, 'unix_epoch', 'group_time')
        lastsnowT_vh = ValueHelper(lastsnowT_vt,
                                   formatter=self.generator.formatter,
                                   converter=self.generator.converter)

        deltaT_time = time.time() - lastsnowT_ts if lastsnowT_ts else None
        # Wrap our ts in a ValueHelper
        deltaT_time_vt = (deltaT_time, 'second', 'group_deltatime')
        deltaT_time_vh = ValueHelper(deltaT_time_vt,
                                     formatter=self.generator.formatter,
                                     converter=self.generator.converter)

        return [{
            'lastsnow_day': lastsnow_vh,
            'lastsnow_delta_time': delta_time_vh,
            'lastsnowT_day': lastsnowT_vh,
            'lastsnowT_delta_time': deltaT_time_vh
        }]