Exemplo n.º 1
0
        def read_process_queues_and_report(outs_q, errs_q):
            outputs = get_nowait_from_queue(outs_q)
            for chunk in outputs:
                logger.debug('Command output: %s', chunk.strip('\n'))
                if chunk.strip('\n') == 'unknown':
                    worker.close()
                    raise RuntimeError(
                        'Phone "%s" has an unknown state. Please check device authorization and state'
                        % self.source)

            errors = get_nowait_from_queue(errs_q)
            if errors:
                worker.close()
                # FIXME
                # keep in mind this Sierra bug:
                # https://discussions.apple.com/thread/7690618
                # that causes:
                # RuntimeError:
                # There were errors trying to test connection to the phone 0x819CE30D080A6.
                # Errors :
                # ['cfgutil: error: Error Domain=NSCocoaErrorDomain Code=4097
                # "connection to service named com.apple.configurator.xpc.InternetService"
                # UserInfo={NSDebugDescription=connection to service named com.apple.configurator.xpc.InternetService}
                # FIXME
                raise RuntimeError(
                    'There were errors trying to test connection to the phone %s. Errors :%s'
                    % (self.source, errors))
Exemplo n.º 2
0
 def _collect_data(self, end=False):
     """
     Collect data, cache it and send to listeners
     """
     data = get_nowait_from_queue(self.results)
     stats = get_nowait_from_queue(self.stats_results)
     logger.debug("Data timestamps: %s" % [d.get('ts') for d in data])
     logger.debug("Stats timestamps: %s" % [d.get('ts') for d in stats])
     for item in data:
         ts = item['ts']
         if ts in self.stat_cache:
             # send items
             data_item = item
             stat_item = self.stat_cache.pop(ts)
             self.__notify_listeners(data_item, stat_item)
         else:
             self.data_cache[ts] = item
     for item in stats:
         ts = item['ts']
         if ts in self.data_cache:
             # send items
             data_item = self.data_cache.pop(ts)
             stat_item = item
             self.__notify_listeners(data_item, stat_item)
         else:
             self.stat_cache[ts] = item
     if end and len(self.data_cache) > 0:
         logger.info('Timestamps without stats:')
         for ts, data_item in sorted(self.data_cache.items(),
                                     key=lambda i: i[0]):
             logger.info(ts)
             self.__notify_listeners(data_item,
                                     StatsReader.stats_item(ts, 0, 0))
Exemplo n.º 3
0
 def read_process_queues_and_report(outs_q, errs_q):
     outputs = get_nowait_from_queue(outs_q)
     for chunk in outputs:
         logger.debug('Command \'%s\' output: %s', cmd, chunk.strip())
     errors = get_nowait_from_queue(errs_q)
     for err_chunk in errors:
         logger.warning('Errors in command \'%s\' output: %s', cmd, err_chunk.strip())
Exemplo n.º 4
0
 def _collect_data(self, end=False):
     """
     Collect data, cache it and send to listeners
     """
     data = get_nowait_from_queue(self.results)
     stats = get_nowait_from_queue(self.stats_results)
     logger.debug("Data timestamps: %s" % [d.get('ts') for d in data])
     logger.debug("Stats timestamps: %s" % [d.get('ts') for d in stats])
     for item in data:
         ts = item['ts']
         if ts in self.stat_cache:
             # send items
             data_item = item
             stat_item = self.stat_cache.pop(ts)
             self.__notify_listeners(data_item, stat_item)
         else:
             self.data_cache[ts] = item
     for item in stats:
         ts = item['ts']
         if ts in self.data_cache:
             # send items
             data_item = self.data_cache.pop(ts)
             stat_item = item
             self.__notify_listeners(data_item, stat_item)
         else:
             self.stat_cache[ts] = item
     if end and len(self.data_cache) > 0:
         logger.info('Timestamps without stats:')
         for ts, data_item in sorted(self.data_cache.items(), key=lambda i: i[0]):
             logger.info(ts)
             self.__notify_listeners(data_item, StatsReader.stats_item(ts, 0, 0))
Exemplo n.º 5
0
 def _read_chunk(self):
     data = get_nowait_from_queue(self.source)
     if not data:
         time.sleep(1)
     else:
         ready_to_go_chunks = []
         for chunk in data:
             if isinstance(chunk, bytes):
                 chunk = chunk.decode(
                     'utf-8')  # not sure if this is a good practice
             match = self.log_fmt_regexp.match(chunk)
             # we need this for multiline log entries concatenation
             if match:
                 if not self.buffer:
                     self.buffer.append(match.groupdict())
                 else:
                     ready_to_go_chunk = self.buffer.pop(0)
                     self.buffer.append(match.groupdict())
                     ready_to_go_chunks.append(ready_to_go_chunk)
             else:
                 if not self.buffer:
                     logger.warning(
                         'Trash data in logs, dropped data: \n%s', chunk)
                 else:
                     self.buffer[0][
                         'value'] = self.buffer[0]['value'] + str(chunk)
         return ready_to_go_chunks
Exemplo n.º 6
0
    def run(self):
        while not self._interrupted.is_set():
            self.data_buffer = {}
            self.metrics_buffer = {}
            data = get_nowait_from_queue(self.source)
            for df, metric in data:
                if metric.type in self.data_buffer:
                    # append df to existing buffer
                    self.data_buffer[metric.type] = pd.concat(
                        [self.data_buffer[metric.type], df])
                else:
                    # create buffer for new metric type
                    self.data_buffer[metric.type] = df
                    self.metrics_buffer[metric.type] = metric
                if self._interrupted.is_set():
                    break
            logger.debug('Buffer after routing: %s', self.data_buffer)
            for client in self.clients:
                for type_ in self.data_buffer:
                    client.put(self.data_buffer[type_],
                               self.metrics_buffer[type_])

            if self._interrupted.is_set():
                break
            time.sleep(1)
        self._finished.set()
Exemplo n.º 7
0
        def read_process_queues_and_report(outs_q, errs_q):
            outputs = get_nowait_from_queue(outs_q)
            for chunk in outputs:
                logger.debug('Command output: %s', chunk.strip('\n'))
                if chunk.strip('\n') == 'unknown':
                    worker.close()
                    raise RuntimeError(
                        'Phone "%s" has an unknown state. Please check device authorization and state'
                        % self.source)

            errors = get_nowait_from_queue(errs_q)
            if errors:
                worker.close()
                raise RuntimeError(
                    'There were errors trying to test connection to the phone %s. Errors :%s'
                    % (self.source, errors))
Exemplo n.º 8
0
 def __get_from_queue_prepare_and_send(self):
     pending_batch = self.__prepare_batch_of_chunks(
         get_nowait_from_queue(self.uploader.inner_queue))
     for type_ in self.uploader.data_types_to_tables:
         if pending_batch[type_]:
             prepared_body = "".join(key for key in pending_batch[type_])
             url = "{addr}/?query={query}".format(
                 addr=self.uploader.addr,
                 query="INSERT INTO {db}.{type} FORMAT TSV".format(
                     db=self.uploader.dbname,
                     type=self.uploader.data_types_to_tables[type_]))
             try:
                 send_chunk(url, prepared_body)
             except RetryError:
                 logger.warning(
                     'Failed to send chunk via uploader. Dropped')
                 logger.debug(
                     'Failed to send chunk via uploader. Dropped: %s %s',
                     url, prepared_body)
Exemplo n.º 9
0
 def _read_chunk(self):
     data = get_nowait_from_queue(self.source)
     if not data:
         time.sleep(1)
     else:
         ready_to_go_chunks = []
         for chunk in data:
             match = self.log_fmt_regexp.match(chunk)
             # we need this for multiline log entries concatenation
             if match:
                 if not self.buffer:
                     self.buffer.append(match.groupdict())
                 else:
                     ready_to_go_chunk = self.buffer.pop(0)
                     self.buffer.append(match.groupdict())
                     ready_to_go_chunks.append(ready_to_go_chunk)
             else:
                 if not self.buffer:
                     logger.warn('Trash data in logs, dropped data: \n%s',
                                 chunk)
                 else:
                     self.buffer[0][
                         'value'] = self.buffer[0]['value'] + str(chunk)
         return ready_to_go_chunks
Exemplo n.º 10
0
 def get(self):
     while True:
         yield get_nowait_from_queue(self.incoming_session_data)
         if self.finished:
             break