Ejemplo n.º 1
0
 async def ff_stream(self, length: str):
     time = parse_timestamp(length)
     if time:
         current, _ = self._media_player.get_video_time()
         await self._seek_stream(current + time)
     else:
         await self._bot.say('Invalid parameter.')
Ejemplo n.º 2
0
def download_logs():
    """ Download mailgun logs and store them in the database """
    # use ordered dict to protect against new logs arriving while downloading logs
    logs = OrderedDict()
    skip = 0
    
    # Fetch all unsaved logs and add them to a LIFO queue
    fetch_more = True
    while fetch_more:
        print("fecthing logs skip={}".format(skip))
        logs_tmp = api.get_logs(limit=1000, skip=skip)['items']
        if len(logs_tmp) == 0:
            break
        for log in logs_tmp:
            log_data = json.dumps(log)
            log_hash = hashlib.sha256(log_data).hexdigest()
            if db.MailgunLog.objects.filter(log_hash=log_hash).exists():
                fetch_more = False
                break
            else:
                logs[log_hash] = (log_hash, log_data, parse_timestamp(log['created_at']))
        skip += 1000

    # take items from LIFO queue and save to db
    print("Saving {0} logs to database".format(len(logs)))
    for i, (log_hash, data, timestamp) in enumerate(logs.values()):
        db.MailgunLog(
            log_hash=log_hash,
            data=data,
            timestamp=timestamp
        ).save()
        if (i+1) % 100 == 0:
            transaction.commit()
    transaction.commit()
Ejemplo n.º 3
0
def download_logs():
    """ Download mailgun logs and store them in the database """
    # use ordered dict to protect against new logs arriving while downloading logs
    logs = OrderedDict()
    skip = 0

    # Fetch all unsaved logs and add them to a LIFO queue
    fetch_more = True
    while fetch_more:
        print("fecthing logs skip={}".format(skip))
        logs_tmp = api.get_logs(limit=1000, skip=skip)['items']
        if len(logs_tmp) == 0:
            break
        for log in logs_tmp:
            log_data = json.dumps(log)
            log_hash = hashlib.sha256(log_data).hexdigest()
            if db.MailgunLog.objects.filter(log_hash=log_hash).exists():
                fetch_more = False
                break
            else:
                logs[log_hash] = (log_hash, log_data,
                                  parse_timestamp(log['created_at']))
        skip += 1000

    # take items from LIFO queue and save to db
    print("Saving {0} logs to database".format(len(logs)))
    for i, (log_hash, data, timestamp) in enumerate(logs.values()):
        db.MailgunLog(log_hash=log_hash, data=data, timestamp=timestamp).save()
        if (i + 1) % 100 == 0:
            transaction.commit()
    transaction.commit()
    def query_by_timestamp(timestamp_lt, timestamp_gt):
        """
        Returns all the Trades within the "less than" and "greater than"
        timestamps
        """
        query = Trade.query()
        if timestamp_lt is not None:
            timestamp_lt, nanoseconds_lt = parse_timestamp(timestamp_lt)
            query = query.filter(
                ndb.OR(
                    Trade.timestamp < timestamp_lt,
                    ndb.AND(
                        Trade.timestamp == timestamp_lt,
                        Trade.timestamp_nanoseconds < nanoseconds_lt
                    )
                )
            )

        second_query = None
        if timestamp_gt is not None:
            timestamp_gt, nanoseconds_gt = parse_timestamp(timestamp_gt)
            args = [
                ndb.OR(
                    Trade.timestamp > timestamp_gt,
                    ndb.AND(
                        Trade.timestamp == timestamp_gt,
                        Trade.timestamp_nanoseconds > nanoseconds_gt
                    )
                )
            ]
            if timestamp_lt is None:
                query = query.filter(*args)
            else:
                second_query = Trade.query(*args)

        if second_query is None:
            return query
        else:
            # The Datastore rejects queries using inequality filtering
            # on more than one property. Therefore we need to evaluate
            # the diff between the two queries manually
            query_keys = [x for x in query.iter(keys_only=True)]
            second_query_keys = [x for x in second_query.iter(keys_only=True)]
            shared_keys = set(query_keys) & set(second_query_keys)
            return Trade.query(Trade.key.IN(shared_keys))
Ejemplo n.º 5
0
 def __init__(self, title=None, timestamp=None, author=None, base_url=None, msg=None, link=None):
     self.title = title
     self.author = author
     self.time = utils.parse_timestamp(timestamp)
     self.timestamp = str(self.time)
     self.msg = msg
     self.base_url = base_url
     self.filename = self.create_filename(self.title)
     self.link = self.create_link()
Ejemplo n.º 6
0
def collect_match_info(response, profile_dict, matched_ideas, run_time):
    """Prepare information about matches made to be added to the
    matches database table. Return a list of dicts, where each
    dict is a row to add to the database.
    """
    match_info = []
    for idea in matched_ideas:
        match_info.append({
            'participant_userid': profile_dict['userid'],
            'p_profile_pageid': profile_dict['profile_id'],
            'p_interest': None,
            'p_skill': None,
            'request_time': utils.parse_timestamp(profile_dict['cat_time']),
            'match_time': utils.parse_timestamp(response['newtimestamp']),
            'match_revid': response['newrevid'],
            'idea_pageid': idea['profileid'],
            'run_time': run_time
        })
    return match_info
Ejemplo n.º 7
0
    def setup_information(self):
        model = self.model.screen_controller.model
        self._body.part_text_set("author", "By: %s" % self.model.prop["author"])

        date = parse_timestamp(self.model.prop["date"])
        self._body.part_text_set("date", "Date: %s" % \
                                            date.strftime("%b %d, %Y %I:%M%p"))

        comment_panel = self.model.prop["content"]
        comment_panel = comment_panel.replace("\n", "<br>")
        self.description.text_set(comment_panel)
Ejemplo n.º 8
0
 def ping_recv(self, msg):
     """Parse ping 1 (without flag & seq) and send ping 2."""
     #logging.debug("recv ping1")
     self.cancel_job.cancel()
     time0 = parse_timestamp(msg[:11])
     self.latency = int(time() * 1000) - time0
     logging.debug("latency: %dms" % self.latency)
     raw_packet = "1" + "2" + msg[11:]
     to_write = self.cipher.encrypt(raw_packet) + self.split_char
     if self.transport:
         #logging.debug("send ping2")
         self.transport.write(to_write)
Ejemplo n.º 9
0
 def ping_recv(self, msg):
     """Parse ping 1 (without flag & seq) and send ping 2."""
     #logging.debug("recv ping1")
     self.cancel_job.cancel()
     time0 = parse_timestamp(msg[:11])
     self.latency = int(time() * 1000) - time0
     logging.debug("latency: %dms" % self.latency)
     raw_packet = "1" + "2" + msg[11:]
     to_write = self.cipher.encrypt(raw_packet) + self.split_char
     if self.transport:
         #logging.debug("send ping2")
         self.transport.write(to_write)
Ejemplo n.º 10
0
    def isclearstates(self, timestamp):
        """
    Define rules here to clear the server states
    """
        hour_to_clear = 3  # clear states after this hour in the day

        hour, minute, second = utils.parse_timestamp(timestamp)
        try:
            dH, dM, dS = utils.parse_timestamp(timestamp - self.hour_to_clear)
        except:
            # if this is the first start the server, hour_to_clear won't exist
            # then return True to tell the class to clear the states

            # set hour_to_clear to last hour_to_clear
            offset_to_htc = (hour - hour_to_clear) * 60 * 60
            self.hour_to_clear = timestamp - offset_to_htc
            return True

        # more than one day from last purge, return True
        if dH > 24:
            self.hour_to_clear = timestamp
            return True
        else:
            return False
Ejemplo n.º 11
0
def parse(row: OrderedDict) -> OrderedDict:
    """Parse data types"""

    data_types = dict(
        validity=int,
        verification=int,
        value=float,
        # Output timestamp in ISO 8601
        timestamp=lambda s: utils.parse_timestamp(s).isoformat(),
    )

    # Cast to new data type (or default to string)
    _row = OrderedDict()

    for key, value in row.items():
        DataType = data_types.get(key, str)

        _row[key] = DataType(value)

    return _row
Ejemplo n.º 12
0
 def readings(self, session, **params) -> iter:
     """
     Stream CSV data as rows (one dictionary per row)
     """
     endpoint = "{endpoint}/readings.csv".format(endpoint=self.endpoint)
     stream = session.call_iter(endpoint, params=params)
     # First row
     headers = next(csv.reader(stream))
     for row in csv.DictReader(stream, fieldnames=headers):
         # Parse
         try:
             row = OrderedDict(station=self.object_id,
                               measure=row['measure'],
                               timestamp=utils.parse_timestamp(
                                   row['dateTime']),
                               value=utils.parse_value(row['value']))
             yield row
         except ValueError:
             LOGGER.error(row)
             raise
Ejemplo n.º 13
0
    def get_archive(cls, session, date):
        """
        Historic Readings. The measurement readings are archived daily as dump files in CSV format.

        https://environment.data.gov.uk/flood-monitoring/doc/reference#historic-readings
        """
        for row in cls._get_archive(session=session, date=date):
            # Rename columns
            yield OrderedDict(
                timestamp=utils.parse_timestamp(row['dateTime']),
                station=row['station'],
                station_reference=row['stationReference'],
                measure=row['measure'],
                unit_name=row['unitName'],
                value=utils.parse_value(row['value']),
                datumType=row['datumType'],
                label=row['label'],
                parameter=row['parameter'],
                qualifier=row['qualifier'],
                period=row['period'],
                value_type=row['valueType'],
                observed_property=settings.PARAMETER_MAP[row['parameter']],
            )
Ejemplo n.º 14
0
 def log(json_data):
     logging.info('Received JSON: %s' % json_data)
     
     payload = json.loads(json_data)
     
     nickname = payload.get('nickname')
     server = payload.get('server', 'unknown')
     channel = payload.get('channel')
     message_type = payload.get('message_type')
     message_content = payload.get('message_content')
     timestamp = parse_timestamp(payload.get('timestamp'))
     
     user = get_or_create(User, server=server, nickname=nickname)
     user.last_seen_at = datetime.utcnow()
     user.put()
     
     channel = get_or_create(Channel, server=server, channel=channel)
     # store the message
     msg = Message(user=user, channel=channel, message_type=message_type,
         message_content=message_content, json=json_data, 
         timestamp=timestamp)
     msg.put()
     return msg
     
Ejemplo n.º 15
0
    def process_request(self, request_message):
        request_type, userId, userInfo, timestamp = self.parse_request(
            request_message)
        self.request_type = request_type
        self.request_userId = userId

        # reset the server states (renew user list and everything) at some point in a day
        if self.isclearstates(timestamp):
            hour, minute, second = utils.parse_timestamp(timestamp)
            print("Clear server states at {}:{}:{}".format(
                hour, minute, second))
            self.reset()

        # add to user list if the request came from new user
        broadcast_decision = False
        if userId not in self.userList:
            userInfo['reserving'] = False
            userInfo['acquiring'] = False
            userInfo['returns'] = 0
            self.userList.update({userId: userInfo})
            broadcast_decision = True

        # if new connection/user call for a status update, broadcast the system status
        if request_type is self._requestTypes['update']: return True

        #
        # NOTE: the request could be sent from other people, so the userId and the userInfo
        #       may not be the user of interest, we need to change them for further editing
        if None: pass
        elif request_type == self._requestTypes['force-return']:
            userId = self.find_user(lambda x: x['acquiring'])
            request_type = self._requestTypes[
                'return']  # pretend this is a normal request
        elif request_type == self._requestTypes['force-cancel']:
            userId = self.find_user(lambda x: x['reserving'])
            request_type = self._requestTypes[
                'cancel']  # pretend this is a normal request

        if not userId:
            print("can't find user of interest. request:({})".format(
                request_type))
            return broadcast_decision

        userInfo = self.userList[userId]

        #
        # stop log ill logics
        #
        if self.token_userId and request_type == self._requestTypes['acquire']:
            print("can't acquire when somebody is keeping it.")
            return broadcast_decision
        if self.reserve_userId and userId != self.reserve_userId and request_type == self._requestTypes[
                'acquire']:
            print("can't acquire when other has reserved it.")
            return broadcast_decision
        if self.reserve_userId and request_type == self._requestTypes[
                'reserve']:
            print("can't reserve when somebody has already reserved.")
            return broadcast_decision
        if userId == self.token_userId and request_type == self._requestTypes[
                'reserve']:
            print("can't reserve when keeping the token.")
            return broadcast_decision
        if not self.token_userId and request_type == self._requestTypes[
                'reserve']:
            print(
                "can't reserve when the token is free (why not acquiring it).")
            return broadcast_decision
        if userId != self.token_userId and request_type is self._requestTypes[
                'return']:
            print("can't return the token without keeping it.")
            return broadcast_decision
        if userId == self.reserve_userId and request_type == self._requestTypes[
                'reserve']:
            print("can't reserve twice.")
            return broadcast_decision
        if userId != self.reserve_userId and request_type == self._requestTypes[
                'cancel']:
            print("can't cancel without reserving.")
            return broadcast_decision

        #
        # process legitimate logics
        #
        if False: pass
        elif request_type is self._requestTypes['return']:
            userInfo['returns'] += 1  # record a completed acquiring
            userInfo['acquiring'] = False
            self.token_userId = False
            self.token_time = -1
            broadcast_decision = True
            # if someone reserved the token, start the reservation timer (initialize the timestamp)
            if self.find_user(lambda x: x['reserving']):
                self.reserve_time = timestamp

        elif request_type is self._requestTypes['acquire']:
            self.token_userId = userId
            self.token_time = timestamp
            userInfo['acquiring'] = True  # record keeping the token
            # if acquire request from reserve user, then reset reserve time and reserve_user
            if userId == self.reserve_userId:
                self.reserve_userId = False  # release reservation
                self.reserve_time = -1
                userInfo['reserving'] = False
            broadcast_decision = True

        elif request_type is self._requestTypes['reserve']:
            userInfo['reserving'] = True  # record reserving the token
            self.reserve_userId = userId
            broadcast_decision = True

        elif request_type is self._requestTypes['cancel']:
            # cancel reservation
            userInfo['reserving'] = False
            self.reserve_userId = False
            self.reserve_time = -1
            broadcast_decision = True

        else:
            print("Unkown request_type {}".format(request_type))

        return broadcast_decision
Ejemplo n.º 16
0
 async def seek_stream(self, timestamp: str):
     time = parse_timestamp(timestamp)
     if time:
         await self._seek_stream(time)
     else:
         await self._bot.say('Invalid parameter.')