Ejemplo n.º 1
0
    def acquire_lock(self, key, lease_time=3600, **kwargs) -> Lock:

        lockfile = os.path.join(self.basepath, f'rssalertbot-{key}.lock')

        # create our release callback
        def release():
            self.release_lock(key)

        try:
            self.locks[key] = zc.lockfile.LockFile(lockfile, content_template='{pid};{hostname}')
            log.debug(f"Acquired lock '{key}' on {lockfile}")

            return Lock(release, expires = pendulum.now().add(3600))

        except zc.lockfile.LockError:

            # we've already got a lockfile here, so we should check the age of it
            # - if it's older than the lease time, we can re-acquire, otherwise
            # nope!
            stats = os.stat(lockfile)
            expires = pendulum.from_timestamp(stats.st_mtime)
            age = pendulum.now() - expires
            if age > lease_time:
                log.debug(f"Acquired lock '{key}' on {lockfile}")
                return Lock(release, expires)

            # no you can't have this lock
            log.debug(f"Lock '{key}' denied")
            raise LockNotAcquired()
Ejemplo n.º 2
0
    def get_durations(log_records):
        """Calculate visit duration per day per IP"""

        timezone = cherrypy.engine.publish(
            "registry:local_timezone"
        ).pop()

        maximums = defaultdict(int)
        minimums = defaultdict(int)

        for row in log_records:
            timestamp = row["unix_timestamp"]
            formatted_timestamp = pendulum.from_timestamp(
                timestamp
            ).in_timezone(timezone).format('YYYY-MM-DD')

            lookup_key = (row["ip"], formatted_timestamp)

            if lookup_key not in maximums or timestamp > maximums[lookup_key]:
                maximums[lookup_key] = timestamp
                continue

            if lookup_key not in minimums or timestamp < minimums[lookup_key]:
                minimums[lookup_key] = timestamp

        durations = {
            lookup_key: pendulum.duration(
                seconds=(maximums[lookup_key] - minimums[lookup_key])
            )
            for lookup_key in maximums
            if minimums[lookup_key] > 0
        }

        return durations
Ejemplo n.º 3
0
    def save_date(self, feed, date: pendulum.DateTime):
        """
        Save the date for the current event.
        """
        # just in case someone didn't follow the type hints
        if isinstance(date, datetime.datetime):
            date = pendulum.from_timestamp(date.timestamp())

        datafile = os.path.join(self.basepath, f'last.{feed}.dat')
        with open(datafile, 'w') as f:
            f.write(str(date.in_tz('UTC')))
Ejemplo n.º 4
0
def get(asset, base):
    log.debug('checking feeds for %s/%s at %s' % (asset, base, NAME))
    headers = {'content-type': 'application/json',
               'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) Gecko/20100101 Firefox/22.0'}

    data = requests.get('http://api.zb.com/data/v1/ticker?market={}_{}'.format(asset.lower(), base.lower()),
                        headers=headers).json()
    t = data['ticker']
    return FeedPrice(float(t['last']), asset, base,
                     volume=float(t['vol']),
                     last_updated=pendulum.from_timestamp(float(data['date']) / 1000),
                     provider=NAME)
Ejemplo n.º 5
0
    def add(self, url=None, title=None, comments=None, tags=None, added=None):
        """Store a bookmarked URL and its metadata."""

        bookmark = self.find(url=url)

        if bookmark:
            sql = """UPDATE bookmarks SET title=?, tags=?, comments=?,
            updated=CURRENT_TIMESTAMP, deleted=NULL WHERE rowid=?"""

            values = (
                title,
                tags,
                comments,
                bookmark["rowid"]
            )

            self._update(sql, [values])
            return True

        sql = """INSERT INTO bookmarks
        (domain, url, added, added_date, title, tags, comments)
        VALUES (?, ?, ?, ?, ?, ?, ?)"""

        domain_and_url = self.domain_and_url(url)

        if added and added.isnumeric():
            numeric_timestamp = int(added)
            add_date = pendulum.from_timestamp(numeric_timestamp)
        else:
            add_date = pendulum.now()

        values = (
            domain_and_url[0],
            domain_and_url[1],
            add_date.format('YYYY-MM-DD HH:mm:ss'),
            add_date.to_date_string(),
            title,
            tags,
            comments
        )

        self._insert(sql, [values])

        cherrypy.engine.publish(
            "scheduler:add",
            2,
            "bookmarks:add:fulltext"
        )

        return True
Ejemplo n.º 6
0
    def get_deltas(log_records):
        """Calculate elapsed time intervals between records"""

        deltas = []
        for index, row in enumerate(log_records):
            try:
                current_timestamp = pendulum.from_timestamp(
                    row["unix_timestamp"]
                )

                previous_timestamp = pendulum.from_timestamp(
                    log_records[index + 1]["unix_timestamp"]
                )

                delta = current_timestamp.diff_for_humans(
                    previous_timestamp,
                    True
                )
            except (KeyError, IndexError):
                delta = 0

            deltas.append(delta)

        return deltas
Ejemplo n.º 7
0
 def _get_dayname(ts, idx, *, tz=None):
     """
     Returns the day name given a Unix timestamp, day index and (optionally) a timezone.
     """
     if pendulum is not None:
         p = pendulum.from_timestamp(ts, tz=tz)
         return p.format('dddd')
     else:
         # Fallback
         if idx == 0:
             return 'Today'
         elif idx == 1:
             return 'Tomorrow'
         else:
             return 'Day_%d' % idx
Ejemplo n.º 8
0
    def last_run(self) -> pendulum.Pendulum:
        from share.models import CeleryProviderTask

        if self._last_run is not None:
            if isinstance(self._last_run, int):
                last_run = pendulum.from_timestamp(self._last_run)
            else:
                last_run = pendulum.parse(self._last_run)
        else:
            logger.debug('Finding last successful job')
            last_run = CeleryProviderTask.objects.filter(
                app_label=self.config.label,
                status=CeleryProviderTask.STATUS.succeeded,
            ).order_by(
                '-timestamp'
            ).values_list('timestamp', flat=True).first()
            if last_run:
                last_run = pendulum.instance(last_run)
            else:
                last_run = pendulum.from_timestamp(0)
            logger.info('Found last job %s', last_run)

        logger.info('Using last run of %s', last_run)
        return last_run
Ejemplo n.º 9
0
def get_all():
    feeds = requests.get('https://api.coinmarketcap.com/v1/ticker/').json()
    result = FeedSet()
    for f in feeds:
        try:
            price = float(f['price_usd'])
            volume = float(f['24h_volume_usd']) / price if f.get('24h_volume_usd') else None
            result.append(FeedPrice(price, f['symbol'], 'USD', volume=volume,
                                    last_updated=pendulum.from_timestamp(int(f['last_updated']))))
        except TypeError as e:
            # catches: TypeError: float() argument must be a string or a number, not 'NoneType'
            # on: f['price_usd']
            log.debug('Could not get USD price for feed: {}'.format(json.dumps(f, indent=4)))
            log.exception(e)
            pass
    return result
Ejemplo n.º 10
0
    def ago_filter(value):
        """Calculate a human-readable time delta between a date in the past
        and now.

        If the date provided as an integer, it is treated as a unix timestamp.

        """

        date = value
        if isinstance(value, int):
            date = pendulum.from_timestamp(value)

        zone = cherrypy.engine.publish(
            "registry:local_timezone"
        ).pop()

        return date.in_timezone(zone).diff_for_humans()
Ejemplo n.º 11
0
    def localtime_filter(value, timezone=None):
        """Switch a datetime to the local timezone, then format it"""

        if not value:
            return ""

        if not timezone:
            timezone = cherrypy.engine.publish(
                "registry:local_timezone"
            ).pop()

        if isinstance(value, (int, float)):
            value = pendulum.from_timestamp(value)
        else:
            value = pendulum.instance(value)

        return value.in_timezone(timezone)
Ejemplo n.º 12
0
def get(cur, base):
    log.debug('checking feeds for %s/%s at %s' % (cur, base, NAME))


    try:
        secret_key = core.config['credentials']['bitcoinaverage']['secret_key']
        public_key = core.config['credentials']['bitcoinaverage']['public_key']
    except KeyError:
        raise KeyError('config.yaml does not specify both "credentials.bitcoinaverage.secret_key" and '
                       '"credentials.bitcoinaverage.public_key" variables')


    client = RestfulClient(secret_key=secret_key, public_key=public_key)
    r = client.ticker_short_local()[cur + base]

    return FeedPrice(float(r['last']), cur, base,
                     last_updated=pendulum.from_timestamp(r['timestamp']),
                     provider=NAME)
Ejemplo n.º 13
0
    async def process(self, timeout=60):
        """
        Fetch and process this feed.

        Args:
            timeout (int): HTTP timeout
        """

        self.log.info(f"Begining processing feed {self.name}, previous date {self.previous_date}")

        new_date = pendulum.datetime(1970, 1, 1, tz='UTC')
        now = pendulum.now('UTC')

        for entry in await self.fetch_and_parse(timeout):

            pubdate = dateutil.parser.parse(entry.published, tzinfos=rssalertbot.BOGUS_TIMEZONES)
            entry.published = pendulum.from_timestamp(pubdate.timestamp())
            # also save a prettified string format
            entry.datestring = self.format_timestamp_local(entry.published)

            # store the date from the first entry
            if entry.published > new_date:
                new_date = entry.published

            # skip anything that's stale
            if entry.published <= now.subtract(days=1):
                continue

            # and anything before the previous date
            if entry.published <= self.previous_date:
                continue

            self.log.debug(f"Found new entry {entry.published}")

            # alert on it
            await self.alert(entry)

            if not new_date:
                new_date = now

        self.save_date(new_date)
        self.log.info(f"End processing feed {self.name}, previous date {new_date}")
Ejemplo n.º 14
0
 def handle(self, sender, target, tokens, bot):
     if len(tokens) < 2:
         self.send_help(sender, bot)
         return
     nick = tokens[1]
     config = self.get_config(bot)
     nick_record = config.get(nick.lower())
     msgs = []
     if nick_record is None:
         msgs.append('I have not seen {} in {}'.format(nick, bot.c.get('irc:channel')))
     else:
         diff = pendulum.from_timestamp(nick_record['time']).diff_for_humans()
         msgs.append('I saw {} in {} {}'.format(nick, bot.c.get('irc:channel'), diff))
         msgs.append(nick_record['text'])
     if bot.is_irc_channel(target):
         response_target = target
     else:
         response_target = sender
     for msg in msgs:
         bot.send_privmsg(response_target, msg)
Ejemplo n.º 15
0
    def _deserialize(cls, encoded_var: Any) -> Any:
        """Helper function of depth first search for deserialization."""
        # JSON primitives (except for dict) are not encoded.
        if cls._is_primitive(encoded_var):
            return encoded_var
        elif isinstance(encoded_var, list):
            return [cls._deserialize(v) for v in encoded_var]

        if not isinstance(encoded_var, dict):
            raise ValueError(
                f"The encoded_var should be dict and is {type(encoded_var)}")
        var = encoded_var[Encoding.VAR]
        type_ = encoded_var[Encoding.TYPE]

        if type_ == DAT.DICT:
            return {k: cls._deserialize(v) for k, v in var.items()}
        elif type_ == DAT.DAG:
            return SerializedDAG.deserialize_dag(var)
        elif type_ == DAT.OP:
            return SerializedBaseOperator.deserialize_operator(var)
        elif type_ == DAT.DATETIME:
            return pendulum.from_timestamp(var)
        elif type_ == DAT.POD:
            if not HAS_KUBERNETES:
                raise RuntimeError(
                    "Cannot deserialize POD objects without kubernetes libraries installed!"
                )
            pod = PodGenerator.deserialize_model_dict(var)
            return pod
        elif type_ == DAT.TIMEDELTA:
            return datetime.timedelta(seconds=var)
        elif type_ == DAT.TIMEZONE:
            return decode_timezone(var)
        elif type_ == DAT.RELATIVEDELTA:
            return decode_relativedelta(var)
        elif type_ == DAT.SET:
            return {cls._deserialize(v) for v in var}
        elif type_ == DAT.TUPLE:
            return tuple(cls._deserialize(v) for v in var)
        else:
            raise TypeError(f'Invalid type {type_!s} in deserialization.')
Ejemplo n.º 16
0
    def _deserialize(cls, encoded_var: Any) -> Any:  # pylint: disable=too-many-return-statements
        """Helper function of depth first search for deserialization."""
        # JSON primitives (except for dict) are not encoded.
        if cls._is_primitive(encoded_var):
            return encoded_var
        elif isinstance(encoded_var, list):
            return [cls._deserialize(v) for v in encoded_var]

        if not isinstance(encoded_var, dict):
            raise ValueError(
                f"The encoded_var should be dict and is {type(encoded_var)}")
        var = encoded_var[Encoding.VAR]
        type_ = encoded_var[Encoding.TYPE]

        if type_ == DAT.DICT:
            return {k: cls._deserialize(v) for k, v in var.items()}
        elif type_ == DAT.DAG:
            return SerializedDAG.deserialize_dag(var)
        elif type_ == DAT.OP:
            return SerializedBaseOperator.deserialize_operator(var)
        elif type_ == DAT.DATETIME:
            return pendulum.from_timestamp(var)
        elif type_ == DAT.POD:
            pod = PodGenerator.deserialize_model_dict(var)
            return pod
        elif type_ == DAT.TIMEDELTA:
            return datetime.timedelta(seconds=var)
        elif type_ == DAT.TIMEZONE:
            return Timezone(var)
        elif type_ == DAT.RELATIVEDELTA:
            if 'weekday' in var:
                var['weekday'] = relativedelta.weekday(
                    *var['weekday'])  # type: ignore
            return relativedelta.relativedelta(**var)
        elif type_ == DAT.SET:
            return {cls._deserialize(v) for v in var}
        elif type_ == DAT.TUPLE:
            return tuple([cls._deserialize(v) for v in var])
        else:
            raise TypeError(
                'Invalid type {!s} in deserialization.'.format(type_))
Ejemplo n.º 17
0
    def calculate_workout_body_part(self, user_id):
        last_user_workouts = self.db.get_last_user_workouts(user_id)

        if last_user_workouts.__len__() > 1:
            recent_workout = last_user_workouts[0]
            previous_workout = last_user_workouts[1]

            today = pendulum.now()
            week_start = today.start_of('week')

            tz = pendulum.timezone('Europe/Paris')
            previous_workout_date = pendulum.from_timestamp(
                previous_workout["workout_date"])
            previous_workout_date = tz.convert(previous_workout_date)

            if week_start <= previous_workout_date:
                recent_workout_body_part = self.db.select_workout_by_id(
                    recent_workout['workout_id'])['workout'][0]["body_part"]
                previous_workout_body_part = self.db.select_workout_by_id(
                    previous_workout['workout_id'])['workout'][0]["body_part"]

                body_parts_array = numpy.arange(4)

                body_parts_array = body_parts_array[
                    body_parts_array != recent_workout_body_part]

                body_parts_array = body_parts_array[
                    body_parts_array != previous_workout_body_part]

                if body_parts_array.__len__() == 3:
                    return random.choice(body_parts_array)

                if body_parts_array.__len__() == 2:
                    if recent_workout_body_part != 0 and previous_workout_body_part != 0:
                        return body_parts_array[body_parts_array != 0][0]
                    else:
                        return random.choice(body_parts_array)
            else:
                return 0
        else:
            return 0
Ejemplo n.º 18
0
 def parse_title(page):
     """Makes a pretty string containing page info."""
     bold = "\x02{}\x0F"
     if Showmore.page_is_scp(page):
         if page['scp_num']:
             if page['title']:
                 title_preview = "{}: {}".format(page['scp_num'].upper(),
                                                 bold.format(page['title']))
             else:
                 title_preview = bold.format(page['scp_num'].upper())
         else:
             title_preview = bold.format(page['title'])
     else:
         title_preview = bold.format(page['title'])
     return "{} · {} · {} · {} · {}".format(
         title_preview,
         "by " + " & ".join(page['authors']),
         ("+" if page['rating'] >= 0 else "") + str(page['rating']),
         pd.from_timestamp(page['date_posted']).diff_for_humans(),
         "https://scp-wiki.wikidot.com/" + page['fullname'],
     )
Ejemplo n.º 19
0
 def dmf_list(self):
     """List resources in the current workspace. *Arguments*: none.
     """
     self._init_required('list')
     lines = [
         '| ID | Name(s) | Type | Modified | Description | ',
         '| -- | ------- | ---- | -------- | ----------- |'
     ]
     for rsrc in self._dmf.find():
         msince = pendulum.from_timestamp(rsrc.v['modified']) \
             .diff_for_humans()
         rowstr = '| {id} | {names} | {type} | {mdate} | {desc} |'.format(
             id=rsrc.id,
             names=','.join(rsrc.v['aliases']),
             type=rsrc.type,
             mdate=msince,
             desc=rsrc.v['desc'])
         lines.append(rowstr)
     listing = '\n'.join(lines)
     self._dmf_markdown(listing)
     return True
Ejemplo n.º 20
0
    def __init__(self,
                 session,
                 deployment_id,
                 log_group_names=None,
                 out_file=sys.stderr):
        self.deployment_id = deployment_id
        self.status = None
        self.log_group_names = log_group_names or []

        self._client = session.client('codedeploy')
        self._list_deployment_targets = \
            self._client.get_paginator('list_deployment_targets').paginate
        self._out_file = out_file

        self._deploy_info = None
        self._target_ids = None
        self._target_lifecycle_events = {}
        self._targets = None
        self._log_watcher = LogWatcher(session, out_file=out_file)
        self._last_update_time = pendulum.from_timestamp(0)
        self._complete_time = None
Ejemplo n.º 21
0
def get_external_schedule_execution(
    recon_repo,
    instance_ref,
    schedule_name,
    scheduled_execution_timestamp,
    scheduled_execution_timezone,
):
    check.inst_param(
        recon_repo,
        "recon_repo",
        ReconstructableRepository,
    )
    definition = recon_repo.get_definition()
    schedule_def = definition.get_schedule_def(schedule_name)
    with DagsterInstance.from_ref(instance_ref) as instance:

        scheduled_execution_time = (
            pendulum.from_timestamp(
                scheduled_execution_timestamp,
                tz=scheduled_execution_timezone,
            )
            if scheduled_execution_timestamp
            else None
        )

        schedule_context = ScheduleExecutionContext(instance, scheduled_execution_time)

        try:
            with user_code_error_boundary(
                ScheduleExecutionError,
                lambda: "Error occurred during the execution function for schedule "
                "{schedule_name}".format(schedule_name=schedule_def.name),
            ):
                return ExternalScheduleExecutionData.from_execution_data(
                    schedule_def.get_execution_data(schedule_context)
                )
        except ScheduleExecutionError:
            return ExternalScheduleExecutionErrorData(
                serializable_error_info_from_exc_info(sys.exc_info())
            )
Ejemplo n.º 22
0
def take_and_send_measurements(sense, connection_status, connection_code,
                               mqttc, mqtt_topic, mqtt_topic_prefix,
                               device_identifier):
    time_epochmillis = int(time.time() * 1000)
    now = pendulum.from_timestamp(time_epochmillis / 1000.0, UTC)
    # see https://pythonhosted.org/sense-hat/api/#environmental-sensors
    # degrees Celsius
    temperature_value_raw = sense.get_temperature()
    # scale the measurement which comes from inside the case of the Raspberry Pi
    # in order to obtain measurements similar to usual factory floor temperatures
    temperature_value = (temperature_value_raw * 4.0) - 138.0
    #    temperature_value = 22.22
    # RH percentage
    relative_humidity_value = sense.get_humidity()
    #    relative_humidity_value = 44.44
    # Millibars
    pressure_value = sense.get_pressure()
    #    pressure_value = 1111.11
    send_measurements(sense, connection_status, connection_code, mqttc,
                      mqtt_topic, mqtt_topic_prefix, device_identifier,
                      time_epochmillis, now, temperature_value,
                      relative_humidity_value, pressure_value)
Ejemplo n.º 23
0
def testCheckHeartbeat(loggingMixin, mocker, inputTimestamps, expectedTimestamps, expectedDeadReceivers):
    """ Test checking the heartbeat of given subsystems. """
    # Setup subsystems
    # Normally, we could just get the subsystems by calling `el.keys() for el in inputTimestamps`.
    # Hoewver, specifing the ``mockInputTimestamps`` and ``expectedTimestamps`` properly depends on the
    # order in which the values are insert, which in turn depends on the order in which the subsystems
    # are interated over. Py2 dict keys don't preserve order, so these tests will sometimes fail. By
    # specifying the order in the ``subsystems`` list, we don't have to get the keys from the dict.
    subsystems = ["EMC", "HLT"]
    monitor.parameters["subsystemList"] = subsystems
    # Setup input values
    # Converts into [emc1, hlt1, emc2, hlt2] so that the return values are in the appropriate order.
    mockInputTimestamps = [el[subsystem] for el in inputTimestamps for subsystem in subsystems]
    # Spot check the third element.
    assert mockInputTimestamps[2] == inputTimestamps[1]["EMC"]
    # Sanity check to ensure that we haven't missed any subsystems
    assert set(subsystems) == set(inputTimestamps[1].keys())
    # Setup expected values
    if isinstance(expectedTimestamps, int):
        logger.debug("Using fixed offset.")
        offset = expectedTimestamps
        expectedSource = inputTimestamps
    else:
        logger.debug("Using expected timestamps specified in the parametrization.")
        offset = 0
        expectedSource = expectedTimestamps
    expectedTimestamps = [pendulum.from_timestamp(el[subsystem] + offset, tz = "Europe/Zurich") for el in expectedSource for subsystem in subsystems]
    # Setup mocks using expected input and output values.
    mHeartbeat = mocker.MagicMock(side_effect = mockInputTimestamps)
    mocker.patch("overwatch.receiver.monitor.getHeartbeat", mHeartbeat)
    mPendulumNow = mocker.MagicMock(side_effect = expectedTimestamps)
    mocker.patch("overwatch.receiver.monitor.pendulum.now", mPendulumNow)

    deadReceivers = set()
    # Make the check as many times as specified.
    for edr in expectedDeadReceivers:
        deadReceivers = monitor.checkHeartbeat(deadReceivers)
        # The assertion here lets us test each step of checking the heartbeat.
        assert deadReceivers == edr
Ejemplo n.º 24
0
    def execution_time_iterator(self, start_timestamp):
        check.float_param(start_timestamp, "start_timestamp")

        timezone_str = (
            self.execution_timezone if self.execution_timezone else pendulum.now().timezone.name
        )

        start_datetime = pendulum.from_timestamp(start_timestamp, tz=timezone_str)

        date_iter = croniter(self.cron_schedule, start_datetime)

        # Go back one iteration so that the next iteration is the first time that is >= start_datetime
        # and matches the cron schedule
        date_iter.get_prev(datetime.datetime)

        while True:
            next_date = pendulum.instance(date_iter.get_next(datetime.datetime)).in_tz(timezone_str)

            # During DST transitions, croniter returns datetimes that don't actually match the
            # cron schedule, so add a guard here
            if croniter.match(self.cron_schedule, next_date):
                yield next_date
Ejemplo n.º 25
0
    def cancel(self, now=False):
        """
        Cancel a subscription
        """
        cancel = self._processor.cancel(self.plan_id, now=now)

        if cancel:
            if now:
                # delete it now
                subscription = self._get_subscription()
                subscription.ends_at = pendulum.now()
                subscription.trial_ends_at = None
                subscription.save()
                return True
            else:
                # update the ended at date
                subscription = self._get_subscription()
                subscription.ends_at = pendulum.from_timestamp(
                    cancel['current_period_end'])
                subscription.save()
                return True
        return False
Ejemplo n.º 26
0
def test_timespan(session_or_test, get_real_object):
    obj = session_or_test

    timespan = get_real_object(obj).timespan
    assert timespan is not None
    span_start = timespan.lower
    assert span_start is not None
    assert timespan.upper is None

    assert abs(timespan.lower - pendulum.from_timestamp(obj.start_time).astimezone()) < _MIN_THRESHOLD

    duration = 10

    flux.current_timeline.sleep(duration) # pylint: disable=no-member

    obj.report_end()
    timespan = get_real_object(obj).timespan
    assert timespan is not None
    assert timespan.upper is not None
    assert timespan.lower == span_start

    assert abs((timespan.upper - timespan.lower) - datetime.timedelta(seconds=duration)) < _MIN_THRESHOLD
def _add_calculated_fields(status: CarStatus, initial_status: CarStatus,
                           configuration: Configuration):
    start_time = pendulum.from_timestamp(configuration.startTime.timestamp(),
                                         tz='utc')
    now = pendulum.now(tz='utc')
    end_time = start_time.add(hours=configuration.hours)

    status.start_time = start_time
    status.end_time = end_time
    status.start_odometer = initial_status.odometer
    status.distance = status.odometer - initial_status.odometer if status.odometer and initial_status.odometer else 0
    status.time_since_start = pendulum.period(
        start_time, now, True) if now >= start_time else pendulum.period(
            now, now, True)
    status.time_to_end = pendulum.period(
        now, end_time, True) if now <= end_time else pendulum.period(
            now, now, True)

    if _car_laps_list:
        current_lap = _car_laps_list[-1]
        status.lap = current_lap.id
        status.lap_distance = current_lap.distance
Ejemplo n.º 28
0
 def _save_data(self, d):
     self._update_last_time(d)
     if not d:
         log.error("bad data: %s" % (d))
         return ERR_BAD_PARAM
     timestamp = d.get("timestamp")
     if not timestamp:
         log.error("bad data without timestamp: %s" % (d))
         return ERR_BAD_PARAM
     cndt = self.cntz.convert(pendulum.from_timestamp(int(timestamp)))
     datestr = cndt.format("YYYYMMDD")
     fname = "%s_%s" % (d.get("ip_from"), d.get("ip_to"))
     dpath = os.path.join(settings.PINGDATA.data_dir, datestr)
     if not os.path.exists(dpath):
         log.info("make new dir: %s" % (dpath))
         os.makedirs(dpath)
     fpath = os.path.join(dpath, fname)
     with open(fpath, "a") as f:
         f.write(json_.dumps(d))
         f.write("\n")
         return 0
     return ERR_FAIL_TO_SAVE
Ejemplo n.º 29
0
    def form_valid(self, form):
        now = pendulum.now()
        twenty_four_hours_ago = pendulum.now().subtract(hours=4)
        participation_metrics_request_datetime = self.request.session.get(
            'participation_metrics_request_datetime', 0)

        if pendulum.from_timestamp(
                participation_metrics_request_datetime).between(
                    now, twenty_four_hours_ago):
            messages.add_message(
                self.request, messages.WARNING,
                _('Sorry! You can request the participation metrics report once in 24 hours.'
                  ))
        else:
            self.request.session[
                'participation_metrics_request_datetime'] = now.int_timestamp
            form.generate_metrics()
            messages.add_message(
                self.request, messages.INFO,
                _('The participation metrics report will be emailed to you in a couple of hours'
                  ))
        return super(ParticipationMetricsFormView, self).form_valid(form)
Ejemplo n.º 30
0
def decode_literal(node, prim):
    core_type, value = next(iter(node.items()))
    if prim in ['int', 'nat']:
        return int(value)
    if prim == 'timestamp':
        if core_type == 'int':
            return pendulum.from_timestamp(int(value))
        else:
            return pendulum.parse(value)
    if prim == 'mutez':
        return Decimal(value) / 10**6
    if prim == 'bool':
        return value == 'True'
    if prim == 'address' and core_type == 'bytes':
        prefix = {
            '0000': b'tz1',
            '0001': b'tz2',
            '0002': b'tz3'
        }  # TODO: check it's ttr
        return base58_encode(bytes.fromhex(value[4:]),
                             prefix[value[:4]]).decode()
    return value
Ejemplo n.º 31
0
    async def get_updates(self, moment: DateTime) -> List[Update]:
        result = []

        self.log(f"Requesting feed from {self.INDEX_URL}...")
        response, page = await fetch(self.INDEX_URL)
        rss = feedparser.parse(page)

        for entry in rss.entries:
            if "published_parsed" in entry:
                parsed_timestamp = entry.published_parsed
            else:
                parsed_timestamp = entry.updated_parsed
            published_on = pendulum.from_timestamp(mktime(parsed_timestamp))

            if published_on < moment:
                continue

            update = await self.process_entry(entry, published_on)
            if update is not None:
                result.append(update)

        return result
Ejemplo n.º 32
0
    def emit(self, record) -> None:  # type: ignore
        # if we shouldn't log to cloud, don't emit
        if not prefect.context.config.logging.log_to_cloud:
            return

        try:
            from prefect.client import Client

            if self.client is None:
                self.client = Client()  # type: ignore

            assert isinstance(self.client, Client)  # mypy assert

            record_dict = record.__dict__.copy()
            # remove potentially non-json serializable formatting args
            record_dict.pop("args", None)

            log = dict()
            log["flow_run_id"] = prefect.context.get("flow_run_id", None)
            log["task_run_id"] = prefect.context.get("task_run_id", None)
            log["timestamp"] = pendulum.from_timestamp(
                record_dict.pop("created", time.time())
            ).isoformat()
            log["name"] = record_dict.pop("name", None)
            log["message"] = record_dict.pop("message", None)
            log["level"] = record_dict.pop("levelname", None)

            if record_dict.get("exc_text") is not None:
                log["message"] += "\n" + record_dict.pop("exc_text", "")
                record_dict.pop("exc_info", None)

            log["info"] = record_dict
            self.put(log)
        except Exception as exc:
            message = "Failed to write log with error: {}".format(str(exc))
            self.logger.critical(message)

            self.put(self._make_error_log(message))
def populate_db(pair, sample_distance):
    now = NTPClient.request('time.google.com')
    pivot_dt = '1007681680000'
    while 1:
        print(pivot_dt)
        # Build url
        url = params['http_api_url'] + 'candles/trade:{sample_distance}:{pair}/' \
                                       'hist?limit={limit}&start={pivot_dt}&sort=1' \
            .format(sample_distance=sample_distance, pair=pair, limit=10000, pivot_dt=pivot_dt)
        print(url)
        # Request API
        json_response = requests.get(url)
        response = json.loads(json_response.text)
        time.sleep(3)
        if 'error' in response:

            # Check rate limit
            if response[1] == ERROR_CODE_RATE_LIMIT:
                print('Error: reached the limit number of requests. Wait 120 seconds...')
                time.sleep(120)
                continue
            # Check platform status
            elif response[1] == ERROR_CODE_START_MAINTENANCE:
                print('Error: platform is in maintenance. Forced to stop all requests.')
                break
        else:
            # Get last timestamp of request (in second, so divided by 1000)
            print(response)

            last_dt = int(response[::-1][0][0]) // 1000
            print('2')
            last_dt = pendulum.from_timestamp(last_dt)

            # Put it as new start datetime (in millisecond, so multiplied by 1000)
            if pivot_dt == last_dt.int_timestamp * 1000:
                break
            pivot_dt = last_dt.int_timestamp * 1000
            client.write_api().write(record=serialize_points(response), bucket=pair)
Ejemplo n.º 34
0
    def emit(self, record) -> None:  # type: ignore
        # if we shouldn't log to cloud, don't emit
        if not prefect.context.config.logging.log_to_cloud:
            return

        try:
            from prefect.client import Client

            if self.client is None:
                self.client = Client()  # type: ignore

            assert isinstance(self.client, Client)  # mypy assert

            record_dict = record.__dict__.copy()
            flow_run_id = prefect.context.get("flow_run_id", None)
            task_run_id = prefect.context.get("task_run_id", None)
            timestamp = pendulum.from_timestamp(
                record_dict.get("created", time.time()))
            name = record_dict.get("name", None)
            message = record_dict.get("message", None)
            level = record_dict.get("levelname", None)

            if record_dict.get("exc_text") is not None:
                message += "\n" + record_dict["exc_text"]
                record_dict.pop("exc_info", None)

            self.client.write_run_log(
                flow_run_id=flow_run_id,
                task_run_id=task_run_id,
                timestamp=timestamp,
                name=name,
                message=message,
                level=level,
                info=record_dict,
            )
        except Exception as exc:
            self.logger.critical("Failed to write log with error: {}".format(
                str(exc)))
Ejemplo n.º 35
0
 def command(irc_c, msg, cmd):
     if defer.check(cmd, 'Secretary_Helen'):
         return
     cmd.expandargs(["first f",
                     "count c"])
     # have to account for .seen -f name
     if 'first' in cmd:
         cmd.args['root'].extend(cmd.args['first'])
     if 'count' in cmd:
         cmd.args['root'].extend(cmd.args['count'])
     if len(cmd.args['root']) < 1:
         raise CommandError("Specify a user and I'll tell you when I last "
                            "saw them")
     nick = cmd.args['root'][0]
     messages = DB.get_messages_from_user(nick, msg.raw_channel)
     if len(messages) == 0:
         raise MyFaultError("I've never seen {} in this channel."
                            .format(nick))
     if 'count' in cmd:
         msg.reply("I've seen {} {} times in this channel."
                   .format(nick, len(messages)))
         return
     if 'first' in cmd:
         message = messages[0]
         response = "I first saw {} {} saying: {}"
     else:
         if nick == msg.sender:
             msg.reply("I can see you right now, {}.".format(msg.sender))
             return
         message = messages[-1]
         response = "I last saw {} {} saying: {}"
     response = response.format(
         nick if nick == message['sender']
         else "{} as {}".format(nick, message['sender']),
         pd.from_timestamp(message['timestamp']).diff_for_humans(),
         gib.obfuscate(message['message'],
                       DB.get_channel_members(msg.raw_channel)))
     msg.reply(response)
Ejemplo n.º 36
0
def get_all():
    feeds = requests.get('https://api.coinmarketcap.com/v1/ticker/').json()
    result = FeedSet()
    for f in feeds:
        try:
            price = float(f['price_usd'])
            volume = float(f['24h_volume_usd']) / price if f.get(
                '24h_volume_usd') else None
            result.append(
                FeedPrice(price,
                          f['symbol'],
                          'USD',
                          volume=volume,
                          last_updated=pendulum.from_timestamp(
                              int(f['last_updated']))))
        except TypeError as e:
            # catches: TypeError: float() argument must be a string or a number, not 'NoneType'
            # on: f['price_usd']
            log.debug('Could not get USD price for feed: {}'.format(
                json.dumps(f, indent=4)))
            log.exception(e)
            pass
    return result
Ejemplo n.º 37
0
    def _process_metrics_interval(self, interval):
        """
        this method wraps up a completed metrics interval and really does too much:
        1. append to the interval_queue to keep track to be able to able to looks back numerous data points
        2. display the metrics around this interval
        3. check if there is any alerts in the last 120 seconds of metric intervals
        """
        self.interval_queue.append(interval)
        # display some stats
        interval.display_stats()

        alert_window_hits = sum([i.hits for i in self.interval_queue])
        alert_dt = pendulum.from_timestamp(interval.end_ts)

        if (alert_window_hits / self.alert_window) > self.avg_hits_threshold:
            self._set_alert(True, alert_dt, alert_window_hits)
        else:
            self._set_alert(False, alert_dt, alert_window_hits)

        # popping off older intervals beyond the alert window to clean up
        num_intervals_lookback = int(self.alert_window / self.refresh_rate)
        if len(self.interval_queue) == num_intervals_lookback:
            self.interval_queue.popleft()
Ejemplo n.º 38
0
    def init_from_logfile(self) -> None:
        '''Read plot ID and job start time from logfile.  Return true if we
           find all the info as expected, false otherwise'''
        assert self.logfile
        # Try reading for a while; it can take a while for the job to get started as it scans
        # existing plot dirs (especially if they are NFS).
        found_id = False
        found_log = False
        for attempt_number in range(3):
            with open(self.logfile, 'r') as f:
                with contextlib.suppress(UnicodeDecodeError):
                    for line in f:
                        m = re.match('^ID: ([0-9a-f]*)', line)
                        if m:
                            self.plot_id = m.group(1)
                            found_id = True
                        m = re.match(r'^Starting phase 1/4:.*\.\.\. (.*)', line)
                        if m:
                            # Mon Nov  2 08:39:53 2020
                            self.start_time = parse_chia_plot_time(m.group(1))
                            found_log = True
                            break  # Stop reading lines in file

            if found_id and found_log:
                break  # Stop trying
            else:
                time.sleep(1)  # Sleep and try again

        # If we couldn't find the line in the logfile, the job is probably just getting started
        # (and being slow about it).  In this case, use the last metadata change as the start time.
        # TODO: we never come back to this; e.g. plot_id may remain uninitialized.
        # TODO: should we just use the process start time instead?
        if not found_log:
            self.start_time = pendulum.from_timestamp(os.path.getctime(self.logfile))

        # Load things from logfile that are dynamic
        self.update_from_logfile()
Ejemplo n.º 39
0
    def _deserialize(cls, encoded_var):  # pylint: disable=too-many-return-statements
        """Helper function of depth first search for deserialization."""
        from airflow.serialization.serialized_dag import SerializedDAG
        from airflow.serialization.serialized_baseoperator import SerializedBaseOperator
        # JSON primitives (except for dict) are not encoded.
        if cls._is_primitive(encoded_var):
            return encoded_var
        elif isinstance(encoded_var, list):
            return [cls._deserialize(v) for v in encoded_var]

        assert isinstance(encoded_var, dict)
        var = encoded_var[Encoding.VAR]
        type_ = encoded_var[Encoding.TYPE]

        if type_ == DAT.DICT:
            return {k: cls._deserialize(v) for k, v in var.items()}
        elif type_ == DAT.DAG:
            return SerializedDAG.deserialize_dag(var)
        elif type_ == DAT.OP:
            return SerializedBaseOperator.deserialize_operator(var)
        elif type_ == DAT.DATETIME:
            return pendulum.from_timestamp(var)
        elif type_ == DAT.TIMEDELTA:
            return datetime.timedelta(seconds=var)
        elif type_ == DAT.TIMEZONE:
            return pendulum.timezone(var)
        elif type_ == DAT.RELATIVEDELTA:
            if 'weekday' in var:
                var['weekday'] = relativedelta.weekday(*var['weekday'])
            return relativedelta.relativedelta(**var)
        elif type_ == DAT.SET:
            return {cls._deserialize(v) for v in var}
        elif type_ == DAT.TUPLE:
            return tuple([cls._deserialize(v) for v in var])
        else:
            raise TypeError(
                'Invalid type {!s} in deserialization.'.format(type_))
Ejemplo n.º 40
0
 def generate_identity(self, age=(20, 30), addr_code=None):
     '''
         6位数字地址码
         8位数字出生日期码
         3位数字顺序码  同一天出生的顺序号 奇数表示男 偶数表示女
         1位校验码
     '''
     if addr_code:
         acode = random.choice(addr_code.split())
         locations = [
             loc for loc in self.LOCATION if loc[0].startswith(acode)
         ]
     else:
         locations = self.LOCATION
     area = random.choice(locations)
     now = pendulum.now()
     tstart = now.subtract(years=age[1])
     tstop = now.subtract(years=age[0])
     rts = random.randint(tstart.int_timestamp, tstop.int_timestamp)
     birthday = pendulum.from_timestamp(rts).format('YYYYMMDD')
     code = str(random.randint(0, 999)).zfill(3)
     pre = area[0] + birthday + code
     vcode = self.identity_verify_code(pre)
     return pre + vcode, area[1]
def water_usage(request, **kwargs):
    response_obj = {
        "units": "l",
        "data": [],
        "labels": []
    }
    period = kwargs['period']

    dt = pendulum.from_timestamp(kwargs['timestamp'])

    if period == 'day':
        objects = SensorUsage.get_usage_by_day(dt, "W")
        for obj in objects:
            response_obj["data"].append(obj.value)
            response_obj["labels"].append(f"{obj.datetime.hour}:{obj.datetime.minute}")

        return JsonResponse(response_obj)
    elif period == "month":
        observations = SensorUsage.get_usage_by_month(dt, "W")
        results = {
            "data": {},
        }
        for observation in observations:
            if observation.datetime.day not in results["labels"]:
                results["labels"][observation.datetime.day] = observation.value
            else:
                results["labels"][observation.datetime.day] += observation.value
    else:
        return JsonResponse({
            "error": "unknown period given, only day or month is supported"
        })

    return JsonResponse({
        "Success": False,
        "Error": "Unknown error"
    })
Ejemplo n.º 42
0
    def add_war_logs(self, document):
        added_war_logs = []
        for war_log in document:
            # Add timestamps for update
            self.add_timestamps(war_log)

            # Add readable created dates
            created_date = pendulum.from_timestamp(war_log["createdDate"],
                                                   tz="UTC")
            local_created_date = created_date.in_timezone("America/Denver")
            war_log[CREATED_DATE_UTC] = created_date.to_datetime_string()
            war_log[
                CREATED_DATE_LOCAL] = local_created_date.to_datetime_string()

            try:
                results = self.war_logs.insert_one(war_log)
                if results.acknowledged:
                    added_war_logs.append(war_log)

                print(results)
            except DuplicateKeyError:
                pass

        return added_war_logs
Ejemplo n.º 43
0
def get_form(atx, form_id, start_date, token=None, next_page=False):
    """Calls the API client get_form_responses() method with requisite params.

    The API limits responses to a max of 1000 per call, but it supports
        paging through responses using a `before` or `after` param.

    https://developer.typeform.com/responses/walkthroughs/#use-query-parameters-to-retrieve-specific-data

    By using a `sort` with `submitted_at` in ascending order in the initial call
        we can then use the `after` param that has the greatest `submitted_at`
        value in the initial response for the next call.

    :param atx: The Typeform API client.
    :param form_id: The form id to which responses are to be retrieved.
    :param start_date: Date to use in the `since` param of the API call.
    :param token: The token to use for the `after` param of the API call.
    :param next_page: Boolean if retreiving more records using the `after` param
        for paginating through responses.

    :return: API client response.
    """
    after = f'after token: {token} ' if token else ''
    LOGGER.info('Forms query - form: {} start_date: {} {}'.format(
        form_id,
        pendulum.from_timestamp(start_date).strftime("%Y-%m-%d %H:%M"), after))

    sort = None if next_page else 'submitted_at,asc'  # sorting isn't suppored when using `after`

    return atx.client.get_form_responses(form_id,
                                         params={
                                             'since': start_date,
                                             'page_size':
                                             MAX_RESPONSES_PAGE_SIZE,
                                             'sort': sort,
                                             'after': token,
                                         })
Ejemplo n.º 44
0
def route_list_users_recommendation(user_id: str):
    """Endpoint for getting recommended users."""
    user = User.objects.get_or_404(id=user_id)
    user.identify(request)

    recommendation = user.get_recommendation()

    last_recommended_at = pendulum.from_timestamp(
        recommendation.last_recommended_at, tz="Asia/Seoul")
    is_today_recommended = last_recommended_at.date() == pendulum.today().date()

    if is_today_recommended and len(recommendation.user_ids) >= 2:
        user_ids = recommendation.user_ids
    else:
        user_ids = user.list_recommended_user_ids()
        user_ids.extend(recommendation.user_ids)
        recommendation.user_ids = user_ids
        recommendation.last_recommended_at = pendulum.now().int_timestamp
        recommendation.save()

    users = User.list(id__in=user_ids[:MAXIMUM_RECOMMENDATION_SHOW_COUNT]).as_pymongo()
    users = sort_order_by_ids(user_ids, users)
    response = encode(list(users))
    return Response(response, mimetype="application/json")
Ejemplo n.º 45
0
def check_for_payments(iota, t_hash, addr) -> Dict:
    payments = {}
    receiver_addr = AddressNoChecksum()._apply(TryteString(addr))
    t_bytes = bytes(t_hash)
    t_trytes = str(iota.get_trytes([t_bytes])["trytes"][0])
    transaction = Transaction.from_tryte_string(t_trytes)
    t_age = now() - from_timestamp(transaction.attachment_timestamp / 1000)
    if transaction.address == receiver_addr and t_age.in_minutes() < 60:
        logger.warning(
            f"[{from_timestamp(transaction.timestamp)}] Payment of {transaction.value}i found on receiving address {addr[:8]}..."
        )
        try:
            data = extract_json(transaction)
            username, topic, password = parse_payload(data)
            payments["username"] = username
            payments["topic"] = topic
            payments["password"] = password
            payments["expires_after"] = 10 * (transaction.value //
                                              VALUE_PER_TEN_SECONDS)
            payments["t_hash"] = transaction.hash
            payments["t_value"] = transaction.value
        except Exception as e:
            logger.exception(e)
    return payments
Ejemplo n.º 46
0
      #############################################
      uri = 'https://api.forecast.io/forecast/{api_key}/{lat},{lng},{time}'.format(
            api_key=FORECAST_API_KEY,
            time=local_time.isoformat(),
            lat=lat,
            lng=lng)

      resp = requests.get(uri)
      weather = json.loads(resp.text)

      #############################################
      # First record to establish some daily data
      #############################################
      if initial:
        # These are all UTC.
        sunrise = pendulum.from_timestamp(weather['daily']['data'][0]['sunriseTime'])
        sunset = pendulum.from_timestamp(weather['daily']['data'][0]['sunsetTime'])

        day = Day(sunrise=sunrise, sunset=sunset, date=utc_time.date())

        country = Country(name=location['country'])
        state = State(name=location['state'])
        #mt = State(name='Montana', country=us)
        city = City(name=location['city'])
        #msla = City(name='Missoula', state=mt)

        import ipdb
        ipdb.set_trace()

        initial = False
def test_create_from_timestamp_with_timezone():
    d = pendulum.from_timestamp(0, timezone("America/Toronto"))
    assert d.timezone_name == "America/Toronto"
    assert_datetime(d, 1969, 12, 31, 19, 0, 0)
Ejemplo n.º 48
0
 def check_date(timestr: str) -> date:
     if not timestr:
         return pendulum.from_timestamp(0, "local")
     return pendulum.from_format(timestr, "MMMM D, YYYY", "local")
Ejemplo n.º 49
0
 def check_date(data: Dict[str, Union[str, int]], key: str) -> Union[DateTime, None]:
     timestamp = data.get(key)
     if timestamp:
         return pendulum.from_timestamp(timestamp, "UTC")
     else:
         return None
Ejemplo n.º 50
0
    def shape_forecast(forecast):
        """Reduce an API response object to wanted values"""

        result = defaultdict()

        timezone = forecast.get("timezone")

        daily_block = forecast.get("daily", {})
        days = daily_block.get("data", [{}, {}])

        today = days[0]

        currently = forecast.get("currently", {})
        hourly = forecast.get("hourly", {})

        result["current_summary"] = currently.get("summary")

        result["current_temperature"] = math.ceil(
            currently.get("temperature")
        )

        result["current_time"] = pendulum.from_timestamp(
            currently.get("time"), tz=timezone
        )

        result["current_humidity"] = currently.get("humidity", 0)

        result["summary"] = today.get("summary")

        result["temperature"] = math.ceil(today.get("temperature", 0))

        result["sunrise"] = pendulum.from_timestamp(
            today.get("sunriseTime"), tz=timezone
        )

        result["sunset"] = pendulum.from_timestamp(
            today.get("sunsetTime"), tz=timezone
        )

        result["humidity"] = currently.get("humidity", 0)

        result["high"] = math.ceil(today.get("temperatureHigh"))

        result["high_at"] = pendulum.from_timestamp(
            today.get("temperatureHighTime"), tz=timezone
        )

        result["low"] = math.ceil(today.get("temperatureLow"))

        result["low_at"] = pendulum.from_timestamp(
            today.get("temperatureLowTime"), tz=timezone
        )

        result["precip_prob"] = currently.get("precipProbability", 0) * 100

        result["precip_type"] = currently.get("precipType")

        if "data" in hourly:
            result["hourly"] = []
            for hour in hourly["data"][0:24]:
                hour_clone = copy.copy(hour)
                hour_clone["time"] = pendulum.from_timestamp(
                    hour_clone["time"], tz=timezone
                )
                result["hourly"].append(hour_clone)

        return result
def test_create_from_timestamp_returns_pendulum():
    d = pendulum.from_timestamp(pendulum.datetime(1975, 5, 21, 22, 32, 5).timestamp())
    assert_datetime(d, 1975, 5, 21, 22, 32, 5)
    assert d.timezone_name == "UTC"