Exemplo n.º 1
0
    def check_time_extents(self, ds):
        """
        Check that the values of time_coverage_start/time_coverage_end approximately match the data.
        """
        if not (hasattr(ds, 'time_coverage_start') and hasattr(ds, 'time_coverage_end')):
            return

        # Parse the ISO 8601 formatted dates
        try:
            t_min = dateparse(ds.time_coverage_start)
            t_max = dateparse(ds.time_coverage_end)
        except:
            return Result(BaseCheck.MEDIUM,
                          False,
                          'time_coverage_extents_match',
                          ['time_coverage attributes are not formatted properly. Use the ISO 8601:2004 date format, preferably the extended format.'])

        timevar = cfutil.get_time_variable(ds)

        if not timevar:
            return Result(BaseCheck.MEDIUM,
                          False,
                          'time_coverage_extents_match',
                          ['Could not find time variable to test extent of time_coverage_start/time_coverage_end, see CF-1.6 spec chapter 4.4'])

        # Time should be monotonically increasing, so we make that assumption here so we don't have to download THE ENTIRE ARRAY
        try:
            # num2date returns as naive date, but with time adjusted to UTC
            # we need to attach timezone information here, or the date
            # subtraction from t_min/t_max will assume that a naive timestamp is
            # in the same time zone and cause erroneous results.
            # Pendulum uses UTC by default, but we are being explicit here
            time0 = pendulum.instance(num2date(ds.variables[timevar][0],
                                      ds.variables[timevar].units), 'UTC')
            time1 = pendulum.instance(num2date(ds.variables[timevar][-1],
                                      ds.variables[timevar].units), 'UTC')
        except:
            return Result(BaseCheck.MEDIUM,
                          False,
                          'time_coverage_extents_match',
                          ['Failed to retrieve and convert times for variables %s.' % timevar])

        start_dt = abs(time0 - t_min)
        end_dt = abs(time1 - t_max)

        score = 2
        msgs = []
        if start_dt > timedelta(hours=1):
            msgs.append("Date time mismatch between time_coverage_start and actual "
                        "time values %s (time_coverage_start) != %s (time[0])" % (t_min.isoformat(), time0.isoformat()))
            score -= 1
        if end_dt > timedelta(hours=1):
            msgs.append("Date time mismatch between time_coverage_end and actual "
                        "time values %s (time_coverage_end) != %s (time[N])" % (t_max.isoformat(), time1.isoformat()))
            score -= 1

        return Result(BaseCheck.MEDIUM,
                      (score, 2),
                      'time_coverage_extents_match',
                      msgs)
Exemplo n.º 2
0
def test_datetime_primitives():
    dt = datetime.now()

    utc = UTC()
    assert utc.utcoffset(dt) == ZERO
    assert utc.utcoffset(None) == ZERO

    assert utc.tzname(dt) == "UTC"

    assert utc.dst(dt) == ZERO
    assert utc.dst(None) == ZERO

    p = pendulum.instance(dt)
    n = naive(p)
    assert n == dt
    assert type(n) == type(p)  # use pendulum naive type

    p2 = utcnow()

    assert p2.tz == p2.in_timezone("UTC").tz

    p3 = localnow()

    v = vanilla(p3)
    assert pendulum.instance(v) == p3

    tod = parse_time_of_day("2015-01-01 12:34:56")
    assert str(tod) == "12:34:56"

    d = pendulum.Date(2017, 1, 1)
    dt = combine_date_and_time(d, tod)
    assert str(dt) == "2017-01-01T12:34:56+00:00"

    sbrd = sqlbagrelativedelta(days=5, weeks=6, months=7)
    assert str(sbrd) == "7 months 47 days"
Exemplo n.º 3
0
Arquivo: base.py Projeto: leb2dg/SHARE
    def _validate_dates(self, start_date, end_date):
        assert not (bool(start_date) ^ bool(end_date)), 'Must specify both a start and end date or neither'
        assert isinstance(start_date, (datetime.timedelta, datetime.datetime, pendulum.Pendulum)) and isinstance(end_date, (datetime.timedelta, datetime.datetime, pendulum.Pendulum)), 'start_date and end_date must be either datetimes or timedeltas'
        assert not (isinstance(start_date, datetime.timedelta) and isinstance(end_date, datetime.timedelta)), 'Only one of start_date and end_date may be a timedelta'

        if isinstance(start_date, datetime.datetime):
            start_date = pendulum.instance(start_date)

        if isinstance(end_date, datetime.datetime):
            end_date = pendulum.instance(end_date)

        if isinstance(start_date, datetime.timedelta):
            start_date = pendulum.instance(end_date + start_date)

        if isinstance(end_date, datetime.timedelta):
            end_date = pendulum.instance(start_date + end_date)

        og_start, og_end = start_date, end_date
        start_date, end_date = self.shift_range(start_date, end_date)
        assert isinstance(start_date, pendulum.Pendulum) and isinstance(end_date, pendulum.Pendulum), 'transpose_time_window must return a tuple of 2 datetimes'

        if (og_start, og_end) != (start_date, end_date):
            logger.warning('Date shifted from {} - {} to {} - {}. Disable shifting by passing shift_range=False'.format(og_start, og_end, start_date, end_date))

        assert start_date < end_date, 'start_date must be before end_date {} < {}'.format(start_date, end_date)

        return start_date, end_date
Exemplo n.º 4
0
    def __init__(self, start, end, absolute=False):
        super(Period, self).__init__()

        if not isinstance(start, pendulum.Date):
            if isinstance(start, datetime):
                start = pendulum.instance(start)
            else:
                start = pendulum.date(start.year, start.month, start.day)

            _start = start
        else:
            if isinstance(start, pendulum.DateTime):
                _start = datetime(
                    start.year,
                    start.month,
                    start.day,
                    start.hour,
                    start.minute,
                    start.second,
                    start.microsecond,
                    tzinfo=start.tzinfo,
                )
            else:
                _start = date(start.year, start.month, start.day)

        if not isinstance(end, pendulum.Date):
            if isinstance(end, datetime):
                end = pendulum.instance(end)
            else:
                end = pendulum.date(end.year, end.month, end.day)

            _end = end
        else:
            if isinstance(end, pendulum.DateTime):
                _end = datetime(
                    end.year,
                    end.month,
                    end.day,
                    end.hour,
                    end.minute,
                    end.second,
                    end.microsecond,
                    tzinfo=end.tzinfo,
                )
            else:
                _end = date(end.year, end.month, end.day)

        self._invert = False
        if start > end:
            self._invert = True

            if absolute:
                end, start = start, end
                _end, _start = _start, _end

        self._absolute = absolute
        self._start = start
        self._end = end
        self._delta = precise_diff(_start, _end)
    def process_result_value(self, value, dialect):
        if value is not None:
            result = pendulum.instance(value)

            if value.tzinfo is not None:
                # remove timezone from timezone-aware fields
                offset_removed = vanilla(result)
                result = pendulum.instance(offset_removed)
            return result
Exemplo n.º 6
0
def parse_date(text, relative_to=None):
    """Converts a date string into a datetime.date

    This is relative to the relative_to date which defaults to today.

    :arg text: the text to parse
    :arg relative_to: (optional) the datetime object to parse dates
        relative to

    :returns: Pendulum (subclass of datetime)

    :raises ValueError: if the text is not parseable

    """
    # First, if it's a date, try parsing it with pendulum--this doesn't require
    # a relative-to date.
    try:
        return pendulum.instance(datetime.datetime.strptime(text, '%Y-%m-%d'))
    except ValueError:
        pass

    if relative_to is None:
        relative_to = pendulum.today()
    else:
        relative_to = pendulum.instance(relative_to)

    # Match on lowercase messages
    text = text.lower()

    # Today and tomorrow
    if text.startswith('tod'):
        return relative_to
    if text.startswith('tom'):
        return relative_to.add(days=1)

    # Day of week; parsed as after today
    # (day of week is 0-based where 0 is a sunday)
    today_index = relative_to.day_of_week
    pairs = [
        ('sunday', 0),
        ('monday', 1),
        ('tuesday', 2),
        ('wednesday', 3),
        ('thursday', 4),
        ('friday', 5),
        ('saturday', 6)
    ]
    for day, offset in pairs:
        if day.startswith(text):
            adjustment = (offset - today_index) % 7
            print today_index, offset, adjustment
            return relative_to.add(days=adjustment)

    # FIXME: Other things to support from taskwarrior:
    # http://taskwarrior.org/docs/dates.html#names
    raise ValueError('"%s" is not parseable' % text)
Exemplo n.º 7
0
    def parse(self, value, instance):  # type: (str, base.Entity) -> pendulum.DateTime
        config = getattr(instance, '_config', None) or utils.Config.factory()

        if isinstance(value, datetime.datetime):
            if self._is_naive(value):
                return pendulum.instance(value, config.timezone)

            return pendulum.instance(value)
        elif isinstance(value, pendulum.DateTime):
            return value

        try:
            return pendulum.parse(value, strict=False, dayfirst=config.day_first,
                                  yearfirst=config.year_first)
        except AttributeError:
            return pendulum.parse(value, strict=False)
Exemplo n.º 8
0
def test_minute():
    def run(now):
        assert pycron.is_now('* * * * *', now)
        assert pycron.is_now('9 * * * *', now)
        assert pycron.is_now('*/1 * * * *', now)
        assert pycron.is_now('*/3 * * * *', now)
        assert pycron.is_now('*/9 * * * *', now)
        assert pycron.is_now('3,9,25,16 * * * *', now)
        assert pycron.is_now('*/2 * * * *', now) is False
        assert pycron.is_now('*/4 * * * *', now) is False
        assert pycron.is_now('*/5 * * * *', now) is False
        assert pycron.is_now('*/12 * * * *', now) is False
        assert pycron.is_now('3,25,16 * * * *', now) is False
        assert pycron.is_now('0-10 * * * *', now)
        assert pycron.is_now('0-10 0-10 * * *', now)
        assert pycron.is_now('10-20 * * * *', now) is False
        assert pycron.is_now('10-20 10-20 * * *', now) is False
        assert pycron.is_now('1,2,5-10 * * * *', now)
        assert pycron.is_now('9,5-8 * * * *', now)
        assert pycron.is_now('10,20-30 * * * *', now) is False

        # Issue 14
        assert pycron.is_now('1-59/2 * * * *', now) is True
        assert pycron.is_now('1-59/4 * * * *', now) is True
        assert pycron.is_now('1-59/8 * * * *', now) is True

    now = datetime(2015, 6, 18, 0, 9)
    run(now)
    run(now.replace(tzinfo=utc))
    run(pendulum.instance(now))
    run(arrow.get(now))
    run(udatetime.from_string(now.isoformat()))
    run(Delorean(datetime=now, timezone='UTC').datetime)
Exemplo n.º 9
0
async def reset(request, token):
    try:
        reset = db.session.query(PasswordReset).filter_by(UUID=token).first()
        if not reset:
            return response.json({"error": "Invalid reset token"}, 404)

        if not reset.isValid:
            return response.json({"error": "Reset token has already been used"}, 404)

        if pendulum.now("UTC") > pendulum.instance(reset.expireTime):
            return response.json({"error": "Reset token has expired."}, 400)

        # Invalidate all resets for this user
        # db.session.query(PasswordReset).filter_by(userId=reset.userId).update(
        #     {"isValid": False}
        # )
        db.session.commit()

        user = utils.get_account_by_id(reset.userId)
        userData = user.serialize()
        userData["jwt"] = user.gen_token(expire_hours=1)
        userData["message"] = "Valid token provided. Prompt user to change password"

        return response.json(userData, 200)

    except Exception as e:
        return utils.exeption_handler(e, "Password reset confirmation failed", 500)
Exemplo n.º 10
0
def test_day_matching():
    def run(now):
        for i in range(0, 7):
            # Test day matching from Sunday onwards...
            now += timedelta(days=1)
            assert pycron.is_now('* * * * %s' % (pycron.DAY_NAMES[i]), now)
            assert pycron.is_now('* * * * %s' % (pycron.DAY_ABBRS[i]), now)
            # Test weekdays
            assert pycron.is_now('* * * * mon,tue,wed,thu,fri',
                                 now) is (True if i not in [0, 6] else False)
            assert pycron.is_now('* * * * monday,tuesday,wednesday,thursday,friday',
                                 now) is (True if i not in [0, 6] else False)
            assert pycron.is_now(
                '* * * * mon-fri', now) is (True if i not in [0, 6] else False)
            assert pycron.is_now(
                '* * * * monday-friday', now) is (True if i not in [0, 6] else False)
            assert pycron.is_now('* * * * mon,tue,wed,thu-fri',
                                 now) is (True if i not in [0, 6] else False)
            assert pycron.is_now('* * * * monday,tuesday,wednesday,thursday-friday',
                                 now) is (True if i not in [0, 6] else False)
            # Test weekends
            assert pycron.is_now(
                '* * * * sun,sat', now) is (True if i in [0, 6] else False)
            assert pycron.is_now(
                '* * * * sunday,saturday', now) is (True if i in [0, 6] else False)

    now = datetime(2015, 6, 20, 16, 7)
    run(now)
    run(now.replace(tzinfo=utc))
    run(pendulum.instance(now))
    run(arrow.get(now))
    run(udatetime.from_string(now.isoformat()))
    run(Delorean(datetime=now, timezone='UTC').datetime)
Exemplo n.º 11
0
    def test_add_basic(self, cmd, fake, config):
        start = pendulum.instance(fake.past_datetime(start_date='-9d'))
        result = cmd('add \'{}\' 1h2m2s \'{}\''.format(start.format('MMM D YYYY HH:mm:ss'), fake.sentence()))
        assert result.obj.exit_code == 0

        entry = TimeEntry.objects.get(result.created_id(), config=config)  # type: TimeEntry
        assert entry.start == start
        assert (entry.stop - entry.start).seconds == 3722
Exemplo n.º 12
0
 def default(self, obj):
     if hasattr(obj, 'serializable'):
         return obj.serializable
     if isinstance(obj, datetime.datetime):
         return pendulum.instance(obj).to_iso8601_string(extended=True)
     if isinstance(obj, decimal.Decimal):
         return str(obj)
     return super(CustomEncoder, self).default(obj)
Exemplo n.º 13
0
    def closest(self, dt1, dt2, *dts):
        from functools import reduce

        """
        Get the farthest date from the instance.

        :type dt1: datetime.datetime
        :type dt2: datetime.datetime
        :type dts: list[datetime.datetime,]

        :rtype: DateTime
        """
        dt1 = pendulum.instance(dt1)
        dt2 = pendulum.instance(dt2)
        dts = [dt1, dt2] + [pendulum.instance(x) for x in dts]
        dts = [(abs(self - dt), dt) for dt in dts]

        return min(dts)[1]
Exemplo n.º 14
0
def test_minute_ranges():
    for i in range(1, 59, 2):
        now = datetime(2015, 6, 18, 0, i)
        assert pycron.is_now('1-59/2 * * * *', now)
        assert pycron.is_now('1-59/2 * * * *', now.replace(tzinfo=utc))
        assert pycron.is_now('1-59/2 * * * *', pendulum.instance(now))
        assert pycron.is_now('1-59/2 * * * *', arrow.get(now))
        assert pycron.is_now('1-59/2 * * * *', udatetime.from_string(now.isoformat()))
        assert pycron.is_now('1-59/2 * * * *', Delorean(datetime=now, timezone='UTC').datetime)

    for i in range(0, 59, 2):
        now = datetime(2015, 6, 18, 0, i)
        assert pycron.is_now('1-59/2 * * * *', now) is False
        assert pycron.is_now('1-59/2 * * * *', now.replace(tzinfo=utc)) is False
        assert pycron.is_now('1-59/2 * * * *', pendulum.instance(now)) is False
        assert pycron.is_now('1-59/2 * * * *', arrow.get(now)) is False
        assert pycron.is_now('1-59/2 * * * *', udatetime.from_string(now.isoformat())) is False
        assert pycron.is_now('1-59/2 * * * *', Delorean(datetime=now, timezone='UTC').datetime) is False
Exemplo n.º 15
0
    def __set__(self, instance, value):  # type: (typing.Optional['base.Entity'], typing.Union[datetime.datetime, pendulum.DateTime]) -> None
        if value is None:
            return super().__set__(instance, value)

        config = instance._config or utils.Config.factory()

        if isinstance(value, datetime.datetime):
            if self._is_naive(value):
                value = pendulum.instance(value, config.timezone)
            else:
                value = pendulum.instance(value)
        elif isinstance(value, pendulum.DateTime):
            pass
        else:
            raise TypeError('Value which is being set to DateTimeField have to be either '
                            'datetime.datetime or pendulum.DateTime object!')

        super().__set__(instance, value)
Exemplo n.º 16
0
def alerts():
    alerts = []
    now = pendulum.now('local')
    for alert in dataview.alerts:
        trigger_time = pendulum.instance(alert[0])
        start_time = pendulum.instance(alert[1])
        if start_time.date() == now.date():
            start = format_time(start_time)[1]
        else:
            start = format_datetime(start_time, short=True)[1]
        trigger = format_time(trigger_time)[1]
        command = ", ".join(alert[2])
        summary = alert[3]
        prefix = '#' if trigger_time < now else ' '
        alerts.append(f"{prefix} {trigger} ({command}) {summary} {start}")
    if alerts:
        return "\n".join(alerts)
    else:
        return "There are no alerts for today."
Exemplo n.º 17
0
def get_historical_metrics():
    metrics = {}

    metrics["briefs_total_count"] = []
    brief_day = func.date_trunc('day', Brief.published_at)
    briefs_by_day = select([brief_day, func.count(brief_day)])\
        .where(Brief.withdrawn_at.is_(None))\
        .where(Brief.published_at.isnot(None))\
        .order_by(brief_day)\
        .group_by(brief_day)
    for (day, count) in db.session.execute(briefs_by_day):
        metrics["briefs_total_count"].append({"value": count, "ts": pendulum.instance(day).to_iso8601_string()})

    metrics["brief_response_count"] = []
    brief_responses_day = func.date_trunc('day', BriefResponse.created_at)
    brief_responses_by_day = select([brief_responses_day, func.count(brief_responses_day)]) \
        .order_by(brief_responses_day) \
        .group_by(brief_responses_day)
    for (day, count) in db.session.execute(brief_responses_by_day):
        metrics["brief_response_count"].append({"value": count, "ts": pendulum.instance(day).to_iso8601_string()})

    metrics["buyer_count"] = []
    buyer_day = func.date_trunc('day', User.created_at)
    buyers_by_day = select([buyer_day, func.count(buyer_day)])\
        .where(User.email_address.contains("+").is_(False) | User.email_address.contains("digital.gov.au").is_(False))\
        .where(User.active.is_(True)) \
        .where(User.role == 'buyer') \
        .order_by(buyer_day)\
        .group_by(buyer_day)
    for (day, count) in db.session.execute(buyers_by_day):
        metrics["buyer_count"].append({"value": count, "ts": pendulum.instance(day).to_iso8601_string()})

    metrics["supplier_count"] = []
    supplier_day = func.date_trunc('day', Supplier.creation_time)
    suppliers_by_day = select([supplier_day, func.count(supplier_day)]) \
        .where(Supplier.abn != Supplier.DUMMY_ABN) \
        .order_by(supplier_day) \
        .group_by(supplier_day)
    for (day, count) in db.session.execute(suppliers_by_day):
        metrics["supplier_count"].append({"value": count, "ts": pendulum.instance(day).to_iso8601_string()})

    return jsonify(metrics)
Exemplo n.º 18
0
    def test_add_project(self, cmd, fake, config, factories):
        project = factories.ProjectFactory()
        start = pendulum.instance(fake.past_datetime(start_date='-9d'))
        end = start + pendulum.duration(hours=2)
        cmd('projects ls')
        result = cmd('add \'{}\' \'{}\' \'{}\' --project \'{}\''
                     .format(start.format('MMM D HH:mm:ss'), end.format('MMM D HH:mm:ss'), fake.sentence(), project.name))
        assert result.obj.exit_code == 0

        entry = TimeEntry.objects.get(result.created_id(), config=config)  # type: TimeEntry
        assert entry.project == project

        start = pendulum.instance(fake.past_datetime(start_date='-9d'))
        end = start + pendulum.duration(hours=2)
        result = cmd('add \'{}\' \'{}\' \'{}\' --project \'{}\''
                     .format(start.format('MMM D HH:mm:ss'), end.format('MMM D HH:mm:ss'), fake.sentence(), project.id))
        assert result.obj.exit_code == 0

        entry = TimeEntry.objects.get(result.created_id(), config=config)  # type: TimeEntry
        assert entry.project == project
Exemplo n.º 19
0
def convert_date(date_val, check_datetime=False, date_to_datetime=False):
    """Given a * date or datetime string.  Optionally checks the type parsed
       of the parsed value prior to being returned as a string"""
    utc = pendulum.timezone("UTC")
    if date_val is None or date_val == '*':
        if check_datetime:
            raise ValueError("Value is not datetime")
        return '*'
    else:
        d_raw = pendulum.parsing.parse_iso8601(date_val.strip())
        if (check_datetime and not isinstance(d_raw, datetime.datetime) and
            not date_to_datetime):
            raise ValueError("Value is not datetime")
        if isinstance(d_raw, datetime.datetime):
            pendulum_date = utc.convert(pendulum.instance(d_raw))
            # need to truncate/eliminate microseconds in order to work with solr
            if pendulum_date.microsecond == 0:
               return pendulum_date.to_iso8601_string()
            else:
                log.info("Datetime has nonzero microseconds, truncating to "
                         "zero for compatibility with Solr")
                return pendulum_date.replace(microsecond=0).to_iso8601_string()
        # if not a datetime, then it's a date
        elif isinstance(d_raw, datetime.date):
            if date_to_datetime:
                # any more elegant way to achieve conversion to datetime?
                dt_force = datetime.datetime.combine(d_raw,
                                                datetime.datetime.min.time())
                # probably don't strictly need tz argument, but doesn't hurt
                # to be explicit
                new_dt_str = pendulum.instance(dt_force,
                                               tz=utc).to_iso8601_string()
                log.info("Converted date {} to datetime {}".format(
                            d_raw.isoformat(), new_dt_str))
                return new_dt_str
            else:
               return d_raw.isoformat()

        else:
            # probably won't reach here, but not a bad idea to be defensive anyhow
            raise ValueError("Type {} is not handled by the datetime conversion routine")
Exemplo n.º 20
0
    def test_days_ago(self):
        today = pendulum.today()
        today_midnight = pendulum.instance(datetime.fromordinal(today.date().toordinal()))

        self.assertTrue(dates.days_ago(0) == today_midnight)

        self.assertTrue(dates.days_ago(100) == today_midnight + timedelta(days=-100))

        self.assertTrue(dates.days_ago(0, hour=3) == today_midnight + timedelta(hours=3))
        self.assertTrue(dates.days_ago(0, minute=3) == today_midnight + timedelta(minutes=3))
        self.assertTrue(dates.days_ago(0, second=3) == today_midnight + timedelta(seconds=3))
        self.assertTrue(dates.days_ago(0, microsecond=3) == today_midnight + timedelta(microseconds=3))
Exemplo n.º 21
0
    def is_same_day(self, dt):
        """
        Checks if the passed in date is the same day
        as the instance current day.

        :type dt: DateTime or datetime or str or int

        :rtype: bool
        """
        dt = pendulum.instance(dt)

        return self.to_date_string() == dt.to_date_string()
Exemplo n.º 22
0
    def test_add_tags(self, cmd, fake, config):
        start = pendulum.instance(fake.past_datetime(start_date='-9d'))
        end = start + pendulum.duration(hours=2)
        result = cmd('add \'{}\' \'{}\' \'{}\' --tags \'some tag,another tag\''.format(start.format('MMM D HH:mm:ss'),
                                                                                       end.format('MMM D HH:mm:ss'),
                                                                                       fake.sentence()))
        assert result.obj.exit_code == 0

        entry = TimeEntry.objects.get(result.created_id(), config=config)  # type: TimeEntry
        assert len(entry.tags) == 2
        assert 'some tag' in entry.tags
        assert 'another tag' in entry.tags
    def login(self, acceptance_stale):
        midnight_sydney_time = datetime.combine(self.terms_manager.current_version.datetime, time(1, 23, 0))
        dt = pendulum.instance(midnight_sydney_time, tz='Australia/Sydney')
        dt = dt.in_timezone('UTC')

        if acceptance_stale:
            offset = timedelta(seconds=-1)
        else:
            offset = timedelta(seconds=1)

        terms_accepted_at = (dt + offset)
        self.login_as_buyer(terms_accepted_at=terms_accepted_at)
Exemplo n.º 24
0
def upload_ring_event(idx, ring_event):
    recording_id = ring_event["id"]
    date = pendulum.instance(ring_event["created_at"]).in_tz("America/Vancouver")
    date_path_kind = f"{PATH_BASE}{date.date()}/{ring_event['kind']}/"
    make_directory_if_not_exists(date_path_kind)
    date_path_kind_id = f"{date_path_kind}{date.hour}-{date.minute}-{recording_id}.mp4"
    print(f"{idx}: {date_path_kind_id}")
    if not Path(date_path_kind_id).is_file():
        print("Downloading")
        doorbell.recording_download(recording_id, date_path_kind_id)
    else:
        print("Already Present")
Exemplo n.º 25
0
    def is_birthday(self, dt=None):
        """
        Check if its the birthday.
        Compares the date/month values of the two dates.

        :rtype: bool
        """
        if dt is None:
            dt = self.now(self.tz)

        instance = pendulum.instance(dt)

        return (self.month, self.day) == (instance.month, instance.day)
Exemplo n.º 26
0
def maybe_alerts(now):
    global current_datetime
    if dataview.alerts and not ('alerts' in settings and settings['alerts']):
        logger.warn("alerts have not been configured")
        return
    for alert in dataview.alerts:
        if alert[0].hour == now.hour and alert[0].minute == now.minute:
            alertdt = alert[0] 
            if not isinstance(alertdt, pendulum.DateTime):
                # rrule produces datetime.datetime objects
                alertdt = pendulum.instance(alertdt)
            startdt = alert[1]
            if not isinstance(startdt, pendulum.DateTime):
                # rrule produces datetime.datetime objects
                startdt = pendulum.instance(startdt)
            # when = startdt.diff_for_humans()
            if startdt >= alertdt:
                when = f"in {(startdt-alertdt).in_words()}"
            else:
                when = f"{(alertdt-startdt).in_words()} ago"
            start = format_datetime(startdt)[1]
            summary = alert[3]
            doc_id = alert[4]
            command_list = alert[2]
            item = dataview.db.get(doc_id=doc_id)
            location = item.get('l', '')
            description = item.get('d', '')
            if 'e' in command_list:
                command_list.remove('e')
                dataview.send_mail(doc_id)
            if 't' in command_list:
                command_list.remove('t')
                dataview.send_text(doc_id)
            commands = [settings['alerts'].get(x).format(start=start, when=when, summary=summary, location=location, description=description) for x in command_list]

            for command in commands:
                if command:
                    check_output(command)
Exemplo n.º 27
0
def convert_to_utc(value):
    """
    Returns the datetime with the default timezone added if timezone
    information was not associated
    :param value: datetime
    :return: datetime with tzinfo
    """
    if not value:
        return value

    if not is_localized(value):
        value = pendulum.instance(value, TIMEZONE)

    return value.astimezone(utc)
Exemplo n.º 28
0
def test_last_minute():
    def run(now):
        assert pycron.is_now('* * * * *', now)
        assert pycron.is_now('59 * * * *', now)
        assert pycron.is_now('*/1 * * * *', now)
        # Issue 14
        assert pycron.is_now('1-59/2 * * * *', now) is True

    now = datetime(2015, 6, 18, 0, 59)
    run(now)
    run(now.replace(tzinfo=utc))
    run(pendulum.instance(now))
    run(arrow.get(now))
    run(udatetime.from_string(now.isoformat()))
    run(Delorean(datetime=now, timezone='UTC').datetime)
Exemplo n.º 29
0
def pen_from_fmt(s, z='local'):
    """
    >>> pen_from_fmt("20120622T0000")
    Date(2012, 6, 22)
    """
    if not isinstance(s, str):
        return pendulum.instance(s)
    if len(s) == 8:
        dt = pendulum.from_format(s, "YYYYMMDD", z)
        return dt.date()
    else:
        dt = pendulum.from_format(s, "YYYYMMDDTHHmm", z)
        if z in ['local', 'Factory'] and dt.hour == dt.minute == 0:
            dt = dt.date()
        return dt
Exemplo n.º 30
0
def test_day_names():
    def run(now):
        assert pycron.is_now('* * * * *', now)
        assert pycron.is_now('* * * * thu', now)
        assert pycron.is_now('* * * * thursday', now)
        assert pycron.is_now('* * * * */thu', now)
        assert pycron.is_now('* * * * */thursday', now)
        assert pycron.is_now('* * * * sun,wed,thu', now)
        assert pycron.is_now('* * * * sunday,wednesday,thursday', now)
        assert pycron.is_now('* * * * wed', now) is False
        assert pycron.is_now('* * * * wednesday', now) is False
        assert pycron.is_now('* * * * */wed', now) is False
        assert pycron.is_now('* * * * */wednesday', now) is False
        assert pycron.is_now('* * * * sun,wed,sat', now) is False
        assert pycron.is_now('* * * * sunday,wednesday,saturday', now) is False
        assert pycron.DOW_CHOICES[now.isoweekday()][1] == 'thursday'
        assert pycron.DOW_CHOICES[0][1] == 'sunday'
        assert pycron.is_now('* * * * sun-thu', now)
        assert pycron.is_now('* * * * sunday-thursday', now)
        assert pycron.is_now('* * * * fri-sat', now) is False
        assert pycron.is_now('* * * * friday-saturday', now) is False
        # Special cases, where the day names are more or less incorrectly set...
        assert pycron.is_now('* * * * thu-sun', now)
        assert pycron.is_now('* * * * thursday-sunday', now)
        assert pycron.is_now('* * * * wed-sun', now)
        assert pycron.is_now('* * * * wednesday-sunday', now)
        assert pycron.is_now('* * * * wed-mon', now)
        assert pycron.is_now('* * * * wednesday-monday', now)
        assert pycron.is_now('* * * * fri-sun', now) is False
        assert pycron.is_now('* * * * friday-sunday', now) is False
        assert pycron.is_now('* * * * fri-wed', now) is False
        assert pycron.is_now('* * * * friday-wednesday', now) is False

        # Test day matching for dividers
        assert pycron.is_now('* * * * monday-sunday/3', now) is False
        assert pycron.is_now('* * * * mon-sun/3', now) is False
        assert pycron.is_now('* * * * tuesday-sunday/2', now) is False
        assert pycron.is_now('* * * * tue-sun/2', now) is False

    now = datetime(2015, 6, 18, 16, 7)
    run(now)
    run(now.replace(tzinfo=utc))
    run(pendulum.instance(now))
    run(arrow.get(now))
    run(udatetime.from_string(now.isoformat()))
    run(Delorean(datetime=now, timezone='UTC').datetime)
Exemplo n.º 31
0
    def next_run(self):
        if not self.enabled:
            return

        instance = croniter(self.cron, start_time=self.last_run or self.created, ret_type=pendulum.DateTime)
        return pendulum.instance(next(instance))
Exemplo n.º 32
0
 def subtract(self, **kwargs):
     """"Returns a new MayaDT object with the given offsets."""
     return self.from_datetime(pendulum.instance(self.datetime()).subtract(**kwargs))
Exemplo n.º 33
0
def datetime_to_pendulum(dt: datetime.datetime) -> pendulum.DateTime:

    return pendulum.instance(dt).in_tz(app.config["TZ"])
Exemplo n.º 34
0
 def now(self):
     return pendulum.instance(self._scope['emt.now'])
Exemplo n.º 35
0
 def build_fhir_birthdate(cls, patient):
     try:
         return instance(patient.name.dob).to_iso8601_string()
     except:
         return None
Exemplo n.º 36
0
def _parse(text, **options):
    """
    Parses a string with the given options.

    :param text: The string to parse.
    :type text: str

    :rtype: mixed
    """
    # Handling special cases
    if text == 'now':
        return pendulum.now()

    parsed = base_parse(text, **options)

    if isinstance(parsed, datetime.datetime):
        return pendulum.datetime(
            parsed.year, parsed.month, parsed.day,
            parsed.hour, parsed.minute, parsed.second, parsed.microsecond,
            tz=parsed.tzinfo or options.get('tz', UTC)
        )

    if isinstance(parsed, datetime.date):
        return pendulum.date(parsed.year, parsed.month, parsed.day)

    if isinstance(parsed, datetime.time):
        return pendulum.time(
            parsed.hour, parsed.minute, parsed.second, parsed.microsecond
        )

    if isinstance(parsed, _Interval):
        if parsed.duration is not None:
            duration = parsed.duration

            if parsed.start is not None:
                dt = pendulum.instance(parsed.start, tz=options.get('tz', UTC))

                return pendulum.period(
                    dt, dt.add(
                        years=duration.years, months=duration.months,
                        weeks=duration.weeks, days=duration.remaining_days,
                        hours=duration.hours, minutes=duration.minutes,
                        seconds=duration.remaining_seconds,
                        microseconds=duration.microseconds
                    )
                )

            dt = pendulum.instance(parsed.end, tz=options.get('tz', UTC))

            return pendulum.period(
                dt.subtract(
                    years=duration.years, months=duration.months,
                    weeks=duration.weeks, days=duration.remaining_days,
                    hours=duration.hours, minutes=duration.minutes,
                    seconds=duration.remaining_seconds,
                    microseconds=duration.microseconds
                ),
                dt
            )

        return pendulum.period(
            pendulum.instance(parsed.start, tz=options.get('tz', UTC)),
            pendulum.instance(parsed.end, tz=options.get('tz', UTC))
        )

    if CDuration and isinstance(parsed, CDuration):
        return pendulum.duration(
            years=parsed.years, months=parsed.months, weeks=parsed.weeks, days=parsed.days,
            hours=parsed.hours, minutes=parsed.minutes, seconds=parsed.seconds,
            microseconds=parsed.microseconds
        )

    return parsed
Exemplo n.º 37
0
def schedule_execution_time_iterator(start_timestamp, cron_schedule,
                                     execution_timezone):
    check.float_param(start_timestamp, "start_timestamp")
    check.str_param(cron_schedule, "cron_schedule")
    check.opt_str_param(execution_timezone, "execution_timezone")
    timezone_str = execution_timezone if execution_timezone else pendulum.now(
    ).timezone.name

    start_datetime = pendulum.from_timestamp(start_timestamp, tz=timezone_str)

    date_iter = croniter(cron_schedule, start_datetime)

    # Go back one iteration so that the next iteration is the first time that is >= start_datetime
    # and matches the cron schedule
    next_date = to_timezone(
        pendulum.instance(date_iter.get_prev(datetime.datetime)), timezone_str)

    cron_parts = cron_schedule.split(" ")

    check.invariant(len(cron_parts) == 5)

    is_numeric = [part.isnumeric() for part in cron_parts]

    delta_fn = None

    # Special-case common intervals (hourly/daily/weekly/monthly) since croniter iteration can be
    # much slower than adding a fixed interval
    if cron_schedule.endswith(" * *") and all(is_numeric[0:3]):  # monthly
        delta_fn = lambda d, num: d.add(months=num)
        should_hour_change = False
    elif (all(is_numeric[0:2]) and is_numeric[4] and cron_parts[2] == "*"
          and cron_parts[3] == "*"):  # weekly
        delta_fn = lambda d, num: d.add(weeks=num)
        should_hour_change = False
    elif all(is_numeric[0:2]) and cron_schedule.endswith(" * * *"):  # daily
        delta_fn = lambda d, num: d.add(days=num)
        should_hour_change = False
    elif is_numeric[0] and cron_schedule.endswith(" * * * *"):  # hourly
        delta_fn = lambda d, num: d.add(hours=num)
        should_hour_change = True

    while True:
        if delta_fn:
            curr_hour = next_date.hour

            next_date_cand = delta_fn(next_date, 1)
            new_hour = next_date_cand.hour

            if not should_hour_change and new_hour != curr_hour:
                # If the hour changes during a daily/weekly/monthly schedule, it
                # indicates that the time shifted due to falling in a time that doesn't
                # exist due to a DST transition (for example, 2:30AM CST on 3/10/2019).
                # Instead, execute at the first time that does exist (the start of the hour),
                # but return to the original hour for all subsequent executions so that the
                # hour doesn't stay different permanently.

                check.invariant(new_hour == curr_hour + 1)
                yield next_date_cand.replace(minute=0)

                next_date_cand = delta_fn(next_date, 2)
                check.invariant(next_date_cand.hour == curr_hour)

            next_date = next_date_cand
        else:
            next_date = to_timezone(
                pendulum.instance(date_iter.get_next(datetime.datetime)),
                timezone_str)

        yield next_date
Exemplo n.º 38
0
def test_partitions_for_hourly_schedule_decorators_with_timezone():
    with instance_for_test() as instance:
        with pendulum.test(
                pendulum.create(2019, 2, 27, 0, 1, 1, tz="US/Central")):
            start_date = datetime(year=2019, month=1, day=1)

            # You can specify a start date with no timezone and it will be assumed to be
            # in the execution timezone

            @hourly_schedule(
                pipeline_name="foo_pipeline",
                start_date=start_date,
                execution_time=time(hour=0, minute=25),
                execution_timezone="US/Central",
            )
            def hourly_central_schedule(hourly_time):
                return {"hourly_time": hourly_time.isoformat()}

            assert hourly_central_schedule.execution_timezone == "US/Central"

            _check_partitions(
                hourly_central_schedule,
                HOURS_UNTIL_FEBRUARY_27,
                pendulum.instance(start_date, tz="US/Central"),
                DEFAULT_HOURLY_FORMAT_WITH_TIMEZONE,
                relativedelta(hours=1),
            )

            valid_time = pendulum.create(year=2019,
                                         month=1,
                                         day=27,
                                         hour=1,
                                         minute=25,
                                         tz="US/Central")
            context_with_valid_time = ScheduleExecutionContext(
                instance, valid_time)

            execution_data = hourly_central_schedule.get_execution_data(
                context_with_valid_time)
            assert len(execution_data) == 1
            assert isinstance(execution_data[0], RunRequest)
            assert execution_data[0].run_config == {
                "hourly_time":
                pendulum.create(year=2019,
                                month=1,
                                day=27,
                                hour=0,
                                tz="US/Central").isoformat()
            }

            # You can specify a start date in a different timezone and it will be transformed into the
            # execution timezone
            start_date_with_different_timezone = pendulum.create(
                2019, 1, 1, 0, tz="US/Pacific")

            @hourly_schedule(
                pipeline_name="foo_pipeline",
                start_date=start_date_with_different_timezone,
                execution_time=time(hour=0, minute=25),
                execution_timezone="US/Central",
            )
            def hourly_central_schedule_with_timezone_start_time(hourly_time):
                return {"hourly_time": hourly_time.isoformat()}

            _check_partitions(
                hourly_central_schedule_with_timezone_start_time,
                HOURS_UNTIL_FEBRUARY_27 -
                2,  # start date is two hours later since it's in PT
                start_date_with_different_timezone.in_tz("US/Central"),
                DEFAULT_HOURLY_FORMAT_WITH_TIMEZONE,
                relativedelta(hours=1),
            )
Exemplo n.º 39
0
    def _execute(self, session=None):
        """
        Initializes all components required to run a dag for a specified date range and
        calls helper method to execute the tasks.
        """
        ti_status = BackfillJob._DagRunTaskStatus()

        start_date = self.bf_start_date

        # Get DagRun schedule between the start/end dates, which will turn into dag runs.
        dagrun_start_date = timezone.coerce_datetime(start_date)
        if self.bf_end_date is None:
            dagrun_end_date = pendulum.now(timezone.utc)
        else:
            dagrun_end_date = pendulum.instance(self.bf_end_date)
        dagrun_infos = list(
            self.dag.iter_dagrun_infos_between(dagrun_start_date,
                                               dagrun_end_date))
        if self.run_backwards:
            tasks_that_depend_on_past = [
                t.task_id for t in self.dag.task_dict.values()
                if t.depends_on_past
            ]
            if tasks_that_depend_on_past:
                raise AirflowException(
                    f'You cannot backfill backwards because one or more '
                    f'tasks depend_on_past: {",".join(tasks_that_depend_on_past)}'
                )
            dagrun_infos = dagrun_infos[::-1]

        if not dagrun_infos:
            if not self.run_at_least_once:
                self.log.info(
                    "No run dates were found for the given dates and dag interval."
                )
                return
            dagrun_infos = [
                DagRunInfo.interval(dagrun_start_date, dagrun_end_date)
            ]

        # picklin'
        pickle_id = None

        if not self.donot_pickle and self.executor_class not in (
                executor_constants.LOCAL_EXECUTOR,
                executor_constants.SEQUENTIAL_EXECUTOR,
                executor_constants.DASK_EXECUTOR,
        ):
            pickle = DagPickle(self.dag)
            session.add(pickle)
            session.commit()
            pickle_id = pickle.id

        executor = self.executor
        executor.job_id = "backfill"
        executor.start()

        ti_status.total_runs = len(dagrun_infos)  # total dag runs in backfill

        try:
            remaining_dates = ti_status.total_runs
            while remaining_dates > 0:
                dagrun_infos_to_process = [
                    dagrun_info for dagrun_info in dagrun_infos
                    if dagrun_info.logical_date not in
                    ti_status.executed_dag_run_dates
                ]
                self._execute_dagruns(
                    dagrun_infos=dagrun_infos_to_process,
                    ti_status=ti_status,
                    executor=executor,
                    pickle_id=pickle_id,
                    start_date=start_date,
                    session=session,
                )

                remaining_dates = ti_status.total_runs - len(
                    ti_status.executed_dag_run_dates)
                err = self._collect_errors(ti_status=ti_status,
                                           session=session)
                if err:
                    raise BackfillUnfinished(err, ti_status)

                if remaining_dates > 0:
                    self.log.info(
                        "max_active_runs limit for dag %s has been reached "
                        " - waiting for other dag runs to finish",
                        self.dag_id,
                    )
                    time.sleep(self.delay_on_limit_secs)
        except (KeyboardInterrupt, SystemExit):
            self.log.warning("Backfill terminated by user.")

            # TODO: we will need to terminate running task instances and set the
            # state to failed.
            self._set_unfinished_dag_runs_to_failed(ti_status.active_runs)
        finally:
            session.commit()
            executor.end()

        self.log.info("Backfill done. Exiting.")
Exemplo n.º 40
0
def test_instance_timezone_aware_datetime_any_tzinfo():
    dt = datetime(2016, 8, 7, 12, 34, 56, tzinfo=tz.gettz("Europe/Paris"))
    now = pendulum.instance(dt)
    assert now.timezone_name == "+02:00"
Exemplo n.º 41
0
def test_instance_timezone_aware_datetime_pytz():
    now = pendulum.instance(datetime.now(pytz.timezone("Europe/Paris")))
    assert now.timezone_name == "Europe/Paris"
Exemplo n.º 42
0
def test_instance_naive_datetime_defaults_to_utc():
    now = pendulum.instance(datetime.now())
    assert now.timezone_name == "UTC"
Exemplo n.º 43
0
def launch_scheduled_runs_for_schedule(
    instance,
    logger,
    schedule_state,
    repo_location,
    end_datetime_utc,
    max_catchup_runs,
    debug_crash_flags=None,
):
    check.inst_param(instance, "instance", DagsterInstance)
    check.inst_param(schedule_state, "schedule_state", JobState)
    check.inst_param(end_datetime_utc, "end_datetime_utc", datetime.datetime)
    check.inst_param(repo_location, "repo_location", RepositoryLocation)

    latest_tick = instance.get_latest_job_tick(schedule_state.job_origin_id)

    if not latest_tick:
        start_timestamp_utc = schedule_state.job_specific_data.start_timestamp
    elif latest_tick.status == JobTickStatus.STARTED:
        # Scheduler was interrupted while performing this tick, re-do it
        start_timestamp_utc = latest_tick.timestamp
    else:
        start_timestamp_utc = latest_tick.timestamp + 1

    schedule_name = schedule_state.job_name
    repo_name = schedule_state.origin.external_repository_origin.repository_name

    check.invariant(
        repo_location.has_repository(repo_name),
        "Could not find repository {repo_name} in location {repo_location_name}"
        .format(repo_name=repo_name, repo_location_name=repo_location.name),
    )

    external_repo = repo_location.get_repository(repo_name)
    external_schedule = external_repo.get_external_schedule(schedule_name)

    timezone_str = external_schedule.execution_timezone
    if not timezone_str:
        timezone_str = pendulum.now().timezone.name
        logger.warn(
            f"Using the system timezone, {timezone_str}, for {external_schedule.name} as it did not specify "
            "an execution_timezone in its definition. Specifying an execution_timezone "
            "on all schedules will be required in the dagster 0.11.0 release.")

    end_datetime = end_datetime_utc.in_tz(timezone_str)

    tick_times = []
    for next_time in external_schedule.execution_time_iterator(
            start_timestamp_utc):
        if next_time.timestamp() > end_datetime.timestamp():
            break

        tick_times.append(next_time)

    if not tick_times:
        logger.info(f"No new runs for {schedule_name}")
        return

    if not external_schedule.partition_set_name and len(tick_times) > 1:
        logger.warning(
            f"{schedule_name} has no partition set, so not trying to catch up")
        tick_times = tick_times[-1:]
    elif len(tick_times) > max_catchup_runs:
        logger.warning(
            f"{schedule_name} has fallen behind, only launching {max_catchup_runs} runs"
        )
        tick_times = tick_times[-max_catchup_runs:]

    if len(tick_times) == 1:
        tick_time = tick_times[0].strftime(_SCHEDULER_DATETIME_FORMAT)
        logger.info(f"Evaluating schedule `{schedule_name}` at {tick_time}")
    else:
        times = ", ".join(
            [time.strftime(_SCHEDULER_DATETIME_FORMAT) for time in tick_times])
        logger.info(
            f"Evaluating schedule `{schedule_name}` at the following times: {times}"
        )

    for tick_time in tick_times:
        schedule_time = pendulum.instance(tick_time).in_tz(timezone_str)
        schedule_timestamp = schedule_time.timestamp()

        if latest_tick and latest_tick.timestamp == schedule_timestamp:
            tick = latest_tick
            logger.info("Resuming previously interrupted schedule execution")

        else:
            tick = instance.create_job_tick(
                JobTickData(
                    job_origin_id=external_schedule.get_external_origin_id(),
                    job_name=schedule_name,
                    job_type=JobType.SCHEDULE,
                    status=JobTickStatus.STARTED,
                    timestamp=schedule_timestamp,
                ))

            _check_for_debug_crash(debug_crash_flags, "TICK_CREATED")

        with _ScheduleLaunchContext(tick, instance, logger) as tick_context:

            _check_for_debug_crash(debug_crash_flags, "TICK_HELD")

            _schedule_runs_at_time(
                instance,
                logger,
                repo_location,
                external_repo,
                external_schedule,
                schedule_time,
                tick_context,
                debug_crash_flags,
            )
Exemplo n.º 44
0
def process_bidstacks(start_date, end_date, timeseries={}):
    """ Derives competition indicators from bids submitted by generators.  """

    print("Processing Bidstacks")
    # Get bidstacks for every time period.
    query = BidStack.objects(trading_period__gte=start_date,
                             trading_period__lte=end_date).fields(
                                 trading_period=1, id=1)
    print('Retrieved Bidstacks')

    i = 0

    for bidstack in query:
        # Get the trading period label.
        dt = pendulum.instance(bidstack.trading_period)

        # if(dt.hour == 12 and dt.minute == 0 and dt.day %5 == 0):
        # Filter based on hour so we don't process tonnes of them
        if (dt.hour in RESEARCH_HOURS and dt.minute == 0):
            timeseries[dt] = {} if not dt in timeseries else timeseries[dt]
            print("Bid Analysis", dt)
            # Grab the bids and order in economic dispatch order.
            bidstack = BidStack.objects.get(id=bidstack.id)
            # simple_bids, srmc_bids, lrmc_bids = settle(bidstack)
            # print("Got bid stacks.")

            # Grab demand data.
            demand_req = Demand.objects(date_time=dt)
            regional_demand = {d.region: d.demand for d in demand_req}
            total_demand = int(
                float(
                    sum([
                        regional_demand[region] for region in regional_demand
                    ])))

            # Grab price data
            price_req = Price.objects(date_time=dt, price_type='AEMO_SPOT')
            regional_prices = {p.region: p.price for p in price_req}
            # print(regional_prices)
            weighted_average_price = float(
                sum([
                    regional_prices[p] * regional_demand[p]
                    for p in regional_prices
                ])) / float(total_demand)
            # print(weighted_average_price)

            # Get a dict of all the residual supply indices, augmented with max network flows.
            network_residual_supply_indices = get_network_extended_residual_supply_indices(
                bidstack, regional_demand)

            timeseries[dt]['weighted_average_price'] = weighted_average_price

            timeseries[dt]['demand_ALL'] = total_demand
            timeseries[dt]['datetime'] = dt
            timeseries[dt]['price'] = regional_prices

            for key in regional_demand:
                timeseries[dt]['demand_' + key] = regional_demand[key]
            for key in regional_prices:
                timeseries[dt]['price_' + key] = regional_prices[key]

            for state in STATES:
                # Get a dict of all the bid-based market shares for this time period
                generator_bid_shares = get_generator_bid_market_shares(
                    bidstack, state)

                # Get a dict of all the residual supply indices for this time period
                residual_supply_indices = get_residual_supply_indices(
                    bidstack, total_demand, state)

                # Get a dict of all the pivotal supplier indices for this time period.
                pivotal_supplier_indices = get_pivotal_supplier_indices(
                    bidstack, total_demand, state)

                if state != "ALL":
                    # Get a dict of all firm weighted offers
                    firm_weighted_offer_prices = get_firm_volume_weighted_offer_price(
                        bidstack, state)
                    for firm in firm_weighted_offer_prices:
                        timeseries[dt][
                            firm.lower() + '_weighted_offer_price_' +
                            state] = firm_weighted_offer_prices[firm]

                    # Calculate average NERSI for all firms in the state.
                    for firm in network_residual_supply_indices[state]:
                        timeseries[dt][
                            firm.lower() + '_nersi_' +
                            state] = network_residual_supply_indices[state][
                                firm]
                    timeseries[dt]['average_nersi_' + state] = float(
                        sum([
                            network_residual_supply_indices[state][firm]
                            for firm in network_residual_supply_indices[state]
                        ])) / float(len(
                            network_residual_supply_indices[state]))
                    timeseries[dt]['minimum_nersi_' + state] = min([
                        network_residual_supply_indices[state][firm]
                        for firm in network_residual_supply_indices[state]
                    ])

                # Record results.
                timeseries[dt]['hhi_bids_' +
                               state] = get_hhi(generator_bid_shares)
                timeseries[dt]['entropy_bids_' +
                               state] = get_entropy(generator_bid_shares)
                timeseries[dt]['four_firm_concentration_ratio_bids_' +
                               state] = get_four_firm_concentration_ratio(
                                   generator_bid_shares)
                timeseries[dt]['average_rsi_' + state] = float(
                    sum([
                        residual_supply_indices[firm]
                        for firm in residual_supply_indices
                    ])) / float(len([f for f in residual_supply_indices]))
                timeseries[dt]['minimum_rsi_' + state] = min([
                    residual_supply_indices[firm]
                    for firm in residual_supply_indices
                ])
                timeseries[dt]['sum_psi_' + state] = sum([
                    pivotal_supplier_indices[firm]
                    for firm in pivotal_supplier_indices
                ])

                for firm in residual_supply_indices:
                    timeseries[dt][firm.lower() + '_rsi_' +
                                   state] = residual_supply_indices[firm]
                for firm in pivotal_supplier_indices:
                    timeseries[dt][firm.lower() + '_psi_' +
                                   state] = pivotal_supplier_indices[firm]

    print("Finished Processing Bidstack")
    return timeseries
Exemplo n.º 45
0
 def _serialize(self, value, attr, obj, **kwargs):  # type: ignore
     if value is not None:
         dt = pendulum.instance(value)
         return dict(dt=dt.naive().to_iso8601_string(), tz=dt.tzinfo.name)
Exemplo n.º 46
0
 def fromtimestamp(cls, t, tz=None):
     return pendulum.instance(datetime.datetime.fromtimestamp(t, tz=tz),
                              tz=tz)
Exemplo n.º 47
0
 def build_fhir_deceased_datetime(cls, patient):
     try:
         return instance(patient.dod).to_iso8601_string()
     except:
         return None
Exemplo n.º 48
0
 def utcfromtimestamp(cls, t):
     return pendulum.instance(datetime.datetime.utcfromtimestamp(t),
                              tz=None)
Exemplo n.º 49
0
 async def on_reminder_timer_complete(self, timer: Timer):
     channel = self.bot.get_channel(timer.kwargs['channel_id'])
     member = self.bot.get_guild(timer.kwargs['guild_id']).get_member(timer.kwargs['author_id'])
     delta = (pendulum.instance(timer.expires_at) - pendulum.instance(timer.created_at)).in_words()
     await channel.send(f"{member.mention}, {delta} ago:\n{timer.kwargs['text']}")
Exemplo n.º 50
0
 def fromordinal(cls, n):
     return pendulum.instance(datetime.datetime.fromordinal(n), tz=None)
Exemplo n.º 51
0
def serialize_fmt(dt):
    p_dt = pendulum.instance(dt)
    return dict(dt=p_dt.naive().to_iso8601_string(), tz=p_dt.tzinfo.name)
Exemplo n.º 52
0
 def now(self):
     return pendulum.instance(self.environ['wpp.now'], 'UTC')
Exemplo n.º 53
0
def human_date(date: datetime) -> str:
    return pendulum.instance(date).diff_for_humans()
Exemplo n.º 54
0
 def _sys_now(self):
     return pendulum.instance(datetime.utcnow(), 'UTC')
Exemplo n.º 55
0
 def combine(cls, date, time):
     return pendulum.instance(datetime.datetime.combine(date, time),
                              tz=None)
Exemplo n.º 56
0
Arquivo: d3a_jobs.py Projeto: xg86/d3a
def start(scenario, settings, events):
    logging.getLogger().setLevel(logging.ERROR)

    job = get_current_job()
    job.save_meta()

    try:
        if settings is None:
            settings = {}
        else:
            settings = {
                k: v
                for k, v in settings.items() if v is not None and v != "None"
            }

        advanced_settings = settings.get('advanced_settings', None)
        if advanced_settings is not None:
            update_advanced_settings(ast.literal_eval(advanced_settings))

        if events is not None:
            events = ast.literal_eval(events)

        config_settings = {
            "start_date":
            instance(
                datetime.combine(settings.get('start_date'),
                                 datetime.min.time()))
            if 'start_date' in settings else GlobalConfig.start_date,
            "sim_duration":
            duration(days=settings['duration'].days)
            if 'duration' in settings else GlobalConfig.sim_duration,
            "slot_length":
            duration(seconds=settings['slot_length'].seconds)
            if 'slot_length' in settings else GlobalConfig.slot_length,
            "tick_length":
            duration(seconds=settings['tick_length'].seconds)
            if 'tick_length' in settings else GlobalConfig.tick_length,
            "market_maker_rate":
            settings.get(
                'market_maker_rate',
                str(ConstSettings.GeneralSettings.DEFAULT_MARKET_MAKER_RATE)),
            "market_count":
            settings.get('market_count', GlobalConfig.market_count),
            "cloud_coverage":
            settings.get('cloud_coverage', GlobalConfig.cloud_coverage),
            "pv_user_profile":
            settings.get('pv_user_profile', None),
            "iaa_fee":
            settings.get('iaa_fee', GlobalConfig.iaa_fee),
            "max_panel_power_W":
            settings.get('max_panel_power_W',
                         ConstSettings.PVSettings.MAX_PANEL_OUTPUT_W)
        }

        validate_global_settings(config_settings)

        config = SimulationConfig(**config_settings)

        spot_market_type = settings.get('spot_market_type', None)
        if spot_market_type is not None:
            ConstSettings.IAASettings.MARKET_TYPE = spot_market_type

        if scenario is None:
            scenario_name = "default_2a"
        elif scenario in available_simulation_scenarios:
            scenario_name = scenario
        else:
            scenario_name = 'json_arg'
            config.area = scenario

        kwargs = {
            "no_export": True,
            "pricing_scheme": 0,
            "seed": settings.get('random_seed', 0)
        }

        run_simulation(setup_module_name=scenario_name,
                       simulation_config=config,
                       simulation_events=events,
                       slowdown=settings.get('slowdown', 0),
                       redis_job_id=job.id,
                       kwargs=kwargs)
    except Exception:
        import traceback
        from d3a.d3a_core.redis_connections.redis_communication import publish_job_error_output
        publish_job_error_output(job.id, traceback.format_exc())
        logging.getLogger().error(
            f"Error on jobId {job.id}: {traceback.format_exc()}")
Exemplo n.º 57
0
 def astimezone(self, tz=None):
     return pendulum.instance(super(DateTime, self).astimezone(tz))
Exemplo n.º 58
0
 def strptime(cls, time, fmt):  # type: (str, str) -> DateTime
     return pendulum.instance(datetime.datetime.strptime(time, fmt))
Exemplo n.º 59
0
© 2018 Kyber, licensed under MIT
modified by https://github.com/EJH2
© 2019 EJH2
"""

import base64
import html
import re

import demoji
import pendulum

from api.emoji import EMOJI_LIST, EMOJI_REGEX, UNICODE_LIST

if not demoji.last_downloaded_timestamp() or pendulum.now() > \
        (pendulum.instance(demoji.last_downloaded_timestamp()).add(days=7)):
    demoji.download_codes()

demoji.set_emoji_pattern()
# This is taken from the Demoji module, because they decided to make the emoji pattern private
esc = (re.escape(c) for c in sorted(dict(demoji.stream_unicodeorg_emojifile(demoji.URL)), key=len, reverse=True))
UNICODE_EMOJI_PAT = re.compile(r"|".join(esc)).pattern
ESCAPED_EMOJI_PAT = fr'\\({UNICODE_EMOJI_PAT})'


def _encode_codeblock(m):
    return f'\x1AM{base64.b64encode(m.group(1).encode()).decode()}\x1AM'


def _encode_link(m):
    encoded_1 = base64.b64encode(m.group(1).encode()).decode()
Exemplo n.º 60
0
def reply_cmds(update, bot):
    _message = update.message
    _chat_id = _message.chat.id
    _user = _message.from_user
    _reply_user = _message.reply_to_message.from_user
    _text = _message.text
    _text_array = _text.split()
    try:
        if len(_text_array) >= 2:
            pass
        else:
            if _text == "/log":
                answer = ""

                users = UserLogs.select().where(UserLogs.user_id == _reply_user.id,
                                                UserLogs.chat_id == _chat_id).order_by(UserLogs.date.desc()).limit(5)
                counter = 1
                for log in users:
                    date = pendulum.instance(log.date)
                    answer += "" + str(date.diff_for_humans()) + ":\n" + str(log.text) + "\n\n"
                    counter += 1
                if answer != "":
                    update.message.reply_text(answer)
                else:
                    update.message.reply_text("Журнал пуст.")
                return True
            elif "/info" in _text:
                user_query = User.select().where(User.user_id == _reply_user.id, User.chat_id == _chat_id).limit(1)
                if user_query.exists():
                    user_object = user_query.first()
                    rating_value = float(user_object.rating_plus / user_object.rating_minus)
                    bot.send_message(_chat_id, "Рейтинг " + get_username_or_name_sb(
                        _reply_user) + ": " + str("%.1f" % rating_value))
                return True
            elif _text in ("/sps", "/like"):
                print("{} liked {}".format(_user.first_name, _reply_user.first_name))
                if not check_rate_flood(_user.id, _reply_user.id):
                    user_query = User.select().where(User.user_id == _reply_user.id, User.chat_id == _chat_id).limit(1)
                    user_object = user_query.first()
                    if user_object:
                        user_object.rating_plus += 1
                        user_object.save()
                        bot.send_message(_chat_id, get_username_or_name(_user) + " → 🙂 → " + get_username_or_name(
                            _reply_user))
                try:
                    bot.delete_message(chat_id=_chat_id, message_id=_message.message_id)
                except Exception as e:
                    print("Permission: " + str(e))
                return True
            elif _text in ("/ban", "/dis"):
                print("{} disliked {}".format(_user.first_name, _reply_user.first_name))
                if not check_rate_flood(_user.id, _reply_user.id):
                    user_query = User.select().where(User.user_id == _reply_user.id, User.chat_id == _chat_id).limit(1)
                    user_object = user_query.first()
                    if user_object:
                        user_object.rating_minus += 1
                        user_object.save()
                        bot.send_message(_chat_id, get_username_or_name(_user) + " → 😡 → " + get_username_or_name(
                            _reply_user))
                try:
                    bot.delete_message(chat_id=_chat_id, message_id=_message.message_id)
                except Exception as e:
                    print("Permission: " + str(e))
                return True

    except Exception as e:
        update.message.reply_text("Exception: " + str(e))