Beispiel #1
0
    def test_format_timestamp(self):
        # specific datetime
        now = datetime.datetime(2013, 4, 28, 10, 0, 0)
        self.assertEqual(utils.format_timestamp(now),
                         "20130428 100000")

        # timedelta
        now -= datetime.timedelta(seconds=30)
        self.assertEqual(utils.format_timestamp(now),
                         "20130428 095930")

        # string
        self.assertEqual(utils.format_timestamp("20130428 095930"),
                         "20130428 095930")
    def to_json(self):
        import json

        return json.dumps(
            self,
            default=lambda o: o.__dict__
            if type(o) is not datetime else utils.format_timestamp(o))
Beispiel #3
0
def transform_metric_data(metric_data, task_cnt=3):
    if metric_data["scores"]:
        metric_data["top_tasks"] = CustomSorter.sort_scores(metric_data["scores"])[:task_cnt]
    if "timestamp" in metric_data:
        metric_data["timestamp_formatted"] = utils.format_timestamp(metric_data["timestamp"])
        if "previous_timestamp" in metric_data:
            metric_data["time_diff"] = utils.timediff(metric_data["timestamp"], metric_data["previous_timestamp"])
    return metric_data
Beispiel #4
0
def generate_river_obj(redis_client, river_name):
    """
    Return a dict that matches the river.js spec.
    """
    river_key = 'rivers:%s' % river_name
    river_updates = [cPickle.loads(update) for update in redis_client.lrange(river_key, 0, -1)]
    return {
        'updatedFeeds': {
            'updatedFeed': river_updates,
        },
        'metadata': {
            'docs': 'http://riverjs.org/',
            'whenGMT': format_timestamp(arrow.utcnow()),
            'whenLocal': format_timestamp(arrow.utcnow().to('local')),
            'version': '3',
            'secs': '',
        },
    }
Beispiel #5
0
    def decode(socket):
        fixed = bytearray(6)
        socket.recv_into(fixed, flags=MSG_WAITALL)
        file_path_length = byte_utils.bytes_to_char(fixed, 0)
        file_is_directory = byte_utils.bytes_to_boolean(fixed, 1)
        file_last_modified = byte_utils.bytes_to_unsigned_int(fixed, 2)
        file_size = None
        if not file_is_directory:
            fixed = bytearray(4)
            socket.recv_into(fixed, flags=MSG_WAITALL)
            file_size = byte_utils.bytes_to_unsigned_int(fixed, 0)
        strings = bytearray(file_path_length)
        socket.recv_into(strings, flags=MSG_WAITALL)
        file_path = byte_utils.bytes_to_string(strings, file_path_length, 0)
        if utils.DEBUG_LEVEL >= 3:
            utils.log_message("DEBUG", "Decoded send file packet: ")
            utils.log_message("DEBUG",
                              "File path length: " + str(file_path_length))
            utils.log_message("DEBUG",
                              "Is directory: " + str(file_is_directory))
            utils.log_message(
                "DEBUG", "Last modified: " +
                str(utils.format_timestamp(file_last_modified)))
            utils.log_message("DEBUG", "File size: " + str(file_size))
            utils.log_message("DEBUG", "File Path: " + str(file_path))
        # parse file's contents to File().write() 1024 chunks if is not directory
        if not file_is_directory:
            chunk_size = min(SendFilePacket.CHUNK_SIZE, file_size)
            remaining = file_size
            file_wrapper = File()
            received_bytes_acc = 0
            while remaining > 0:
                if utils.DEBUG_LEVEL >= 3:
                    utils.log_message("DEBUG",
                                      "Chunk size: " + str(chunk_size))
                chunk = bytearray(chunk_size)
                received_bytes = socket.recv_into(chunk, flags=MSG_WAITALL)
                received_bytes_acc += received_bytes
                file_wrapper.write(chunk)
                remaining -= received_bytes
                chunk_size = min(chunk_size, remaining)
            file_wrapper.close()
            if utils.DEBUG_LEVEL >= 1:
                utils.log_message(
                    "DEBUG", "File size is " + str(file_size) +
                    " and received bytes are " + str(received_bytes_acc))
                utils.log_message(
                    "DEBUG",
                    "File is located in " + str(file_wrapper.get_path()))
        else:
            file_wrapper = Directory()

        packet = SendFilePacket(
            FileInfo(file_path, file_is_directory, file_last_modified,
                     file_size, file_wrapper))
        return packet
Beispiel #6
0
 def pretty_print_result(cls, result, points):
     for rec in result:
         if len(points) == 0:
             print(rec)
         else:
             print(utils.format_timestamp(rec[PointEnum.ts]))
             for point in points:
                 print("{point}: {value}".format(point=point,
                                                 value=rec[point]))
             print(os.linesep)
 def receive_object(send_file_packet):
     """Receives a send file packet, and processes it"""
     info = send_file_packet.file_info
     utils.log_message("INFO", "Receiving object: " + info.path)
     info.file_wrapper.move(os.path.join(self.directory.get_path(), info.path))
     info.file_wrapper.set_timestamp(info.last_modified)
     if utils.DEBUG_LEVEL >= 3:
         utils.log_message("DEBUG", "Object has been moved to: " + str(info.file_wrapper.get_path()))
         utils.log_message("DEBUG", "Timestamp has been set to: " + str(utils.format_timestamp(info.file_wrapper.get_timestamp())))
     return 0
Beispiel #8
0
    def decode(socket):
        fixed = bytearray(6)
        socket.recv_into(fixed, flags=MSG_WAITALL)
        username_length = byte_utils.bytes_to_char(fixed, 0)
        directory_name_length = byte_utils.bytes_to_char(fixed, 1)
        files_count = byte_utils.bytes_to_unsigned_int(fixed, 2)
        dynamic = bytearray(username_length + directory_name_length)
        socket.recv_into(dynamic, flags=MSG_WAITALL)
        username = byte_utils.bytes_to_string(dynamic, username_length, 0)
        directory_name = byte_utils.bytes_to_string(dynamic,
                                                    directory_name_length,
                                                    username_length)
        if utils.DEBUG_LEVEL >= 3:
            utils.log_message("DEBUG", "Decoded login packet: ")
            utils.log_message("DEBUG",
                              "Username length: " + str(username_length))
            utils.log_message(
                "DEBUG",
                "Directory name length: " + str(directory_name_length))
            utils.log_message("DEBUG", "Files count: " + str(files_count))
            utils.log_message("DEBUG", "Username: "******"DEBUG",
                              "Directory name: " + str(directory_name))
            utils.log_message("DEBUG", "Files: ")
        # Parse all file info
        files = []
        for count in range(files_count):
            if utils.DEBUG_LEVEL >= 2:
                utils.log_message(
                    "DEBUG", "Waiting for file info " + str(count) + "/" +
                    str(files_count))
            fixed = bytearray(6)
            socket.recv_into(fixed, flags=MSG_WAITALL)
            file_path_length = byte_utils.bytes_to_char(fixed, 0)
            file_is_directory = byte_utils.bytes_to_boolean(fixed, 1)
            file_last_modified = byte_utils.bytes_to_unsigned_int(fixed, 2)
            strings = bytearray(file_path_length)
            socket.recv_into(strings, flags=MSG_WAITALL)
            file_path = byte_utils.bytes_to_string(strings, file_path_length,
                                                   0)
            if utils.DEBUG_LEVEL >= 3:
                utils.log_message("DEBUG",
                                  "File path length: " + str(file_path_length))
                utils.log_message("DEBUG",
                                  "Is directory: " + str(file_is_directory))
                utils.log_message(
                    "DEBUG", "File timestamp: " +
                    str(utils.format_timestamp(file_last_modified)))
                utils.log_message("DEBUG", "File path: " + str(file_path))
            files.append(
                FileInfo(file_path, file_is_directory, file_last_modified))

        packet = LoginPacket(username, directory_name, files)
        return packet
Beispiel #9
0
    def newgroups(self, since):
        """Send a NEWGROUPS command.

        Get all newsgroups created after ``since``.

        ``since`` can be a datetime, timedelta, or pre-formatted
        string (e.g., 'yymmdd hhmmss').

        If it's a timedelta, subtract from datetime.utcnow first.
        """
        ts = utils.format_timestamp(since)
        self.last_response, groups = self._server.longcmd("NEWGROUPS %s" % ts)
        return make_group_result(groups)
Beispiel #10
0
    def populate_feed_update(self, entry):
        obj = {
            'id': str(self.redis_client.incr('id-generator')),
            'pubDate': format_timestamp(self.entry_timestamp(entry)),
        }

        # If both <title> and <description> exist:
        #   title -> <title>
        #   body -> <description>
        if entry.get('title') and entry.get('description'):
            obj['title'] = self.clean_text(entry.get('title'))
            obj['body'] = self.clean_text(entry.get('description'))

            # Drop the body if it's just a duplicate of the title.
            if obj['title'] == obj['body']:
                obj['body'] == ''

        # If <description> exists but <title> doesn't:
        #   title -> <description>
        #   body -> ''
        #
        # See http://scripting.com/2014/04/07/howToDisplayTitlelessFeedItems.html
        # for an ad-hoc spec.
        elif not entry.get('title') and entry.get('description'):
            obj['title'] = self.clean_text(entry.get('description'))
            obj['body'] = ''

        # If neither of the above work but <title> exists:
        #   title -> <title>
        #   body -> ''
        #
        # A rare occurance -- just about everybody uses both <title>
        # and <description> and those with title-less feeds just use
        # <description> (in keeping with the RSS spec) -- but the
        # Nieman Journalism Lab's RSS feed [1] needs this conditional
        # so I assume it's not the only one out there.
        #
        # [1] http://www.niemanlab.org/feed/
        elif entry.get('title'):
            obj['title'] = self.clean_text(entry.get('title'))
            obj['body'] = ''

        if entry.get('link'):
            obj['link'] = entry.get('link')

        if entry.get('comments'):
            obj['comments'] = entry.get('comments')

        return obj
Beispiel #11
0
    def set_last_session_timedelta(self):
        # Display the last session and total timedelta for the selected project.
        project_name = self.ui.projects_combo_box.currentText()
        if len(project_name) == 0: # when a task is removed currentText() returns ''
            self.current_index = 0
            self.ui.projects_combo_box.setCurrentIndex(self.current_index)
            project_name = self.ui.projects_combo_box.currentText()

        total = self.state.get_total_timedelta(project_name)
        timestamp, ongoing = self.state.get_last_session_timedelta(project_name)
        timestamp, font_size = format_timestamp(str(timestamp))
        self.ui.label_time_spent_value.setText(timestamp)
        self.ui.label_time_spent_value.setStyleSheet(f"font-size: {font_size}px")
        self.ui.label_total.setText(f"Total: {total}")
        self.toggle_labels(ongoing)
Beispiel #12
0
 def receive_object(send_file_packet):
     """Receives a send file packet, and processes it"""
     info = send_file_packet.file_info
     utils.log_message("INFO", "Receiving object: " + info.path)
     info.file_wrapper.move(
         os.path.join(self.directory.get_path(), info.path))
     info.file_wrapper.set_timestamp(info.last_modified)
     if utils.DEBUG_LEVEL >= 3:
         utils.log_message(
             "DEBUG", "Object has been moved to: " +
             str(info.file_wrapper.get_path()))
         utils.log_message(
             "DEBUG", "Timestamp has been set to: " + str(
                 utils.format_timestamp(
                     info.file_wrapper.get_timestamp())))
     return 0
Beispiel #13
0
    def decode(socket):
        fixed = bytearray(6)
        socket.recv_into(fixed, flags=MSG_WAITALL)
        file_path_length = byte_utils.bytes_to_char(fixed, 0)
        file_is_directory = byte_utils.bytes_to_boolean(fixed, 1)
        file_last_modified = byte_utils.bytes_to_unsigned_int(fixed, 2)
        file_size = None
        if not file_is_directory:
            fixed = bytearray(4)
            socket.recv_into(fixed, flags=MSG_WAITALL)
            file_size = byte_utils.bytes_to_unsigned_int(fixed, 0)
        strings = bytearray(file_path_length)
        socket.recv_into(strings, flags=MSG_WAITALL)
        file_path = byte_utils.bytes_to_string(strings, file_path_length, 0)
        if utils.DEBUG_LEVEL >= 3:
            utils.log_message("DEBUG", "Decoded send file packet: ")
            utils.log_message("DEBUG", "File path length: " + str(file_path_length))
            utils.log_message("DEBUG", "Is directory: " + str(file_is_directory))
            utils.log_message("DEBUG", "Last modified: " + str(utils.format_timestamp(file_last_modified)))
            utils.log_message("DEBUG", "File size: " + str(file_size))
            utils.log_message("DEBUG", "File Path: " + str(file_path))
        # parse file's contents to File().write() 1024 chunks if is not directory
        if not file_is_directory:
            chunk_size = min(SendFilePacket.CHUNK_SIZE, file_size)
            remaining = file_size
            file_wrapper = File()
            received_bytes_acc = 0
            while remaining > 0:
                if utils.DEBUG_LEVEL >= 3:
                    utils.log_message("DEBUG", "Chunk size: " + str(chunk_size))
                chunk = bytearray(chunk_size)
                received_bytes = socket.recv_into(chunk, flags=MSG_WAITALL)
                received_bytes_acc += received_bytes
                file_wrapper.write(chunk)
                remaining -= received_bytes
                chunk_size = min(chunk_size, remaining)
            file_wrapper.close()
            if utils.DEBUG_LEVEL >= 1:
                utils.log_message("DEBUG", "File size is " + str(file_size) + " and received bytes are " + str(
                    received_bytes_acc))
                utils.log_message("DEBUG", "File is located in " + str(file_wrapper.get_path()))
        else:
            file_wrapper = Directory()

        packet = SendFilePacket(FileInfo(file_path, file_is_directory, file_last_modified, file_size, file_wrapper))
        return packet
Beispiel #14
0
    def decode(socket):
        fixed = bytearray(6)
        socket.recv_into(fixed, flags=MSG_WAITALL)
        username_length = byte_utils.bytes_to_char(fixed, 0)
        directory_name_length = byte_utils.bytes_to_char(fixed, 1)
        files_count = byte_utils.bytes_to_unsigned_int(fixed, 2)
        dynamic = bytearray(username_length + directory_name_length)
        socket.recv_into(dynamic, flags=MSG_WAITALL)
        username = byte_utils.bytes_to_string(dynamic, username_length, 0)
        directory_name = byte_utils.bytes_to_string(dynamic, directory_name_length, username_length)
        if utils.DEBUG_LEVEL >= 3:
            utils.log_message("DEBUG", "Decoded login packet: ")
            utils.log_message("DEBUG", "Username length: " + str(username_length))
            utils.log_message("DEBUG", "Directory name length: " + str(directory_name_length))
            utils.log_message("DEBUG", "Files count: " + str(files_count))
            utils.log_message("DEBUG", "Username: "******"DEBUG", "Directory name: " + str(directory_name))
            utils.log_message("DEBUG", "Files: ")
        # Parse all file info
        files = []
        for count in range(files_count):
            if utils.DEBUG_LEVEL >= 2:
                utils.log_message("DEBUG", "Waiting for file info " + str(count) + "/" + str(files_count))
            fixed = bytearray(6)
            socket.recv_into(fixed, flags=MSG_WAITALL)
            file_path_length = byte_utils.bytes_to_char(fixed, 0)
            file_is_directory = byte_utils.bytes_to_boolean(fixed, 1)
            file_last_modified = byte_utils.bytes_to_unsigned_int(fixed, 2)
            strings = bytearray(file_path_length)
            socket.recv_into(strings, flags=MSG_WAITALL)
            file_path = byte_utils.bytes_to_string(strings, file_path_length, 0)
            if utils.DEBUG_LEVEL >= 3:
                utils.log_message("DEBUG", "File path length: " + str(file_path_length))
                utils.log_message("DEBUG", "Is directory: " + str(file_is_directory))
                utils.log_message("DEBUG", "File timestamp: " + str(utils.format_timestamp(file_last_modified)))
                utils.log_message("DEBUG", "File path: " + str(file_path))
            files.append(FileInfo(file_path, file_is_directory, file_last_modified))

        packet = LoginPacket(username, directory_name, files)
        return packet
Beispiel #15
0
    def run(self):
        while True:
            feed_url = self.inbox.get()
            try:
                feed_content = self.request_feed(feed_url)
            except requests.exceptions.RequestException as ex:
                logger.exception('Failed to check %s' % feed_url)
                future = arrow.utcnow() + timedelta(seconds=60*60)
                fmt = format_timestamp(future.to('local'))
                logger.info('Next check for %s: %s (%d seconds)' % (feed_url, fmt, 60*60))
                self.redis_client.zadd('next_check', feed_url, future.timestamp)
            else:
                try:
                    feed_parsed = feedparser.parse(feed_content)
                except ValueError as ex:
                    logger.exception('Failed to parse %s' % feed_url)
                    break

                feed_key = '%s:entries' % feed_url
                new_feed = (self.redis_client.llen(feed_key) == 0)

                feed_updates = []
                timestamps = []

                for entry in feed_parsed.entries:
                    # We must keep track of feed updates so they're only seen
                    # once. Here's how that happens:
                    #
                    # Redis stores a list at `feed_key` that contains
                    # the 1000 (by default) most recently seen feed
                    # update fingerprints. See self.entry_fingerprint
                    # for how the fingerprint is calculated.
                    #
                    # If the fingerprint hasn't been seen before, add it
                    # to `feed_key`.
                    #
                    # If it has, this feed update has already been seen
                    # so we can skip it.
                    if self.new_entry(feed_key, entry):
                        self.add_feed_entry(feed_key, entry)
                    else:
                        continue

                    update = self.populate_feed_update(entry)
                    feed_updates.append(update)
                    timestamps.append(self.entry_timestamp(entry))

                timestamp_key = '%s:timestamps' % feed_url

                # Add any new timestamps found during this check
                if timestamps:
                    logger.info('%d new entries for %s' % (len(timestamps), feed_url))
                    new_timestamps = [obj.timestamp for obj in timestamps]
                    self.redis_client.lpush(timestamp_key, *new_timestamps)
                    self.redis_client.sort(timestamp_key, desc=True, store=timestamp_key)
                    self.redis_client.ltrim(timestamp_key, 0, 99)
                else:
                    logger.info('No new entries for %s' % feed_url)

                history = self.redis_client.lrange(timestamp_key, 0, 9 if timestamps else 8)
                if not timestamps:
                    # See http://goo.gl/X6QhWN for why we do this
                    history.insert(0, arrow.utcnow().timestamp)

                delta = self.average_update_interval(history)

                # Don't check more than once a minute
                if delta.seconds < 60:
                    delta = timedelta(seconds=60)

                # Cap the next check at two hours.
                elif delta.seconds > (2*60*60):
                    logger.debug('Randomly scheduling %s' % feed_url)
                    delta = timedelta(seconds=random.uniform(60*60, 2*60*60))

                future_update = arrow.utcnow() + delta
                fmt = format_timestamp(future_update.to('local'))

                logger.info('Next check for %s: %s (%d seconds)' % (feed_url, fmt, delta.seconds))
                self.redis_client.zadd('next_check', feed_url, future_update.timestamp)

                # Keep --initial most recent updates if this is the
                # first time we've seen the feed
                if new_feed:
                    feed_updates = feed_updates[:self.cli_args.initial]

                if feed_updates:
                    river_update = {
                        'feedDescription': feed_parsed.feed.get('description', ''),
                        'feedTitle': feed_parsed.feed.get('title', ''),
                        'feedUrl': feed_url,
                        'item': feed_updates,
                        'websiteUrl': feed_parsed.feed.get('link', ''),
                        'whenLastUpdate': format_timestamp(arrow.utcnow()),
                    }

                    for river_name in self.redis_client.smembers('%s:rivers' % feed_url):
                        river_key = 'rivers:%s' % river_name
                        self.redis_client.lpush(river_key, cPickle.dumps(river_update))
                        self.redis_client.ltrim(river_key, 0, self.cli_args.entries - 1)
                        self.redis_client.sadd('updated_rivers', river_name)

                    firehose_key = 'rivers:firehose'
                    self.redis_client.lpush(firehose_key, cPickle.dumps(river_update))
                    self.redis_client.ltrim(firehose_key, 0, self.cli_args.entries - 1)
                    self.redis_client.sadd('updated_rivers', 'firehose')

            finally:
                self.inbox.task_done()