def _data_handler(self):
        logger.debug(
            f"{self.__class__.__name__} Started with event {id(self._event)}")
        while True:
            if self.queue.empty():
                if self._event.isSet():
                    break
                else:
                    continue

            item = self.queue.get()
            try:
                logger.debug(
                    f"Deque item: '{item}' (Current queue size {self.queue.qsize()})"
                )
                insert_sql_str, rows = item.sql_data
                result = self.execute(
                    insert_sql_str,
                    rows) if rows else self.execute(insert_sql_str)
                item.result = result
                logger.debug("Insert item: {}\n\t{}\n\t{}".format(
                    type(item).__name__, insert_sql_str,
                    '\n\t'.join([str(r) for r in rows])))
            except Exception as e:
                f, l = get_error_info()
                logger.error(
                    f"Unexpected error occurred on {type(item).__name__}: {e}; File: {f}:{l}"
                )
            else:
                logger.debug(
                    f"Item {type(item).__name__} successfully handled")
        logger.debug(f"Background task stopped invoked")
    def on_connection(self):
        try:
            with self._lock:
                self.login()

                yield self.content_object
        except RunnerError as e:
            self._session_errors.append(e)
            logger.warn(
                "Non critical error {name}; Reason: {error} (Attempt {real} from {allowed})"
                .format(
                    name=self.host_alias,
                    error=e,
                    real=len(self._session_errors),
                    allowed=self._fault_tolerance,
                ))
        except Exception as e:
            logger.error(
                "Critical Error {name}; Reason: {error} (Attempt {real} from {allowed})"
                .format(
                    name=self.host_alias,
                    error=e,
                    real=len(self._session_errors),
                    allowed=self._fault_tolerance,
                ))
            GlobalErrors().append(e)
        else:
            if len(self._session_errors):
                logger.debug(
                    f"Host '{self}': Runtime errors occurred during tolerance period cleared"
                )
            self._session_errors.clear()
        finally:
            self.exit()
    def __call__(self, outputs, datetime=None) -> bool:
        command_out = outputs.get('stdout', None)
        time_output = outputs.get('stderr', None)
        rc = outputs.get('rc')
        try:
            exp_rc = self.options.get('rc', None)
            if exp_rc:
                if rc not in [int(_rc) for _rc in re.split(r'\s*\|\s*', exp_rc)]:
                    raise AssertionError(
                        f"Result return rc {rc} not match expected\nStdOut:\n\t{command_out}\nStdErr:\n\t{time_output}")
            data = time_output.split(',')
            row_dict = DotDict(**{k: v.replace('%', '') for (k, v) in [entry.split(':', 1) for entry in data]})
            for k in row_dict.keys():
                if k == 'Command':
                    continue
                row_dict.update({k: float(row_dict[k])})
            logger.info(f"Command: {row_dict.get('Command')} [Rc: {row_dict.get('Rc')}]")

            row = self.table.template(self.host_id, None, *tuple(list(row_dict.values()) + [-1]))
            du = model.data_factory(self.table, row, output=command_out, datetime=datetime)

            self.data_handler(du)
            return True
        except Exception as e:
            f, li = get_error_info()
            logger.error(f"{self.__class__.__name__}: {e}; File: {f}:{li}")
            raise RunnerError(f"{self}", f"{e}; File: {f}:{li}")
    def __call__(self, output: dict) -> bool:
        out = output.get('stdout', None)
        err = output.get('stderr', None)

        total_output = f'{out}' if out else ''
        total_output += ('\n' if len(total_output) > 0 else '') + (f'{err}' if
                                                                   err else '')

        rc = output.get('rc', -1)

        exp_rc = self.options.get('rc', None)
        expected = self.options.get('expected', None)
        prohibited = self.options.get('prohibited', None)

        errors = []
        if exp_rc:
            if not any(
                [int(_rc) == rc for _rc in re.split(r'\s*\|\s*', exp_rc)]):
                errors.append(f"Rc [{rc}] not match expected - {exp_rc}")
        if expected:
            if not any([pattern in total_output for pattern in re.split(r'\s*\|\s*', expected)]) or \
                    not all([pattern in total_output for pattern in re.split(r'\s*\&\s*', expected)]):
                errors.append(
                    "Output not contain expected pattern [{}]".format(
                        expected))
        if prohibited:
            if any([pattern in total_output for pattern in re.split(r'\s*\|\s*', prohibited)]) or \
                    not all([pattern not in total_output for pattern in re.split(r'\s*\&\s*', prohibited)]):
                errors.append("Output contain prohibited pattern [{}]".format(
                    prohibited))

        if len(errors) > 0:
            st = 'False'
            msg = "\nErrors:\n\t{}\n\tRC: {}\nOutput:\n\t{}".format(
                '\n\t'.join(errors), rc,
                '\n\t'.join(total_output.splitlines()))
            logger.error(msg)
        else:
            st = 'Pass'
            msg = 'Output:\n\t{}'.format('\n\t'.join(
                total_output.splitlines()))
        output_ref = db.CacheLines().upload(msg)
        du = model.data_factory(
            self.table,
            self.table.template(self.host_id, None,
                                self.options.get('command'), rc, st,
                                output_ref))
        self.data_handler(du)

        if st != 'Pass':
            if self._tolerance == -1:
                pass
            elif self._tolerance_counter == self._tolerance:
                raise RunnerError(
                    f"{self}: Error count reach tolerance ({self._tolerance})",
                    msg)
            else:
                self._tolerance_counter += 1

        return True if st == 'Pass' else False
 def execute(self, sql_text, *rows):
     try:
         return self._db.execute(sql_text, *rows)
     except Exception as e:
         logger.error("DB execute error: {}\n{}\n{}".format(
             e, sql_text, '\n\t'.join([r for r in rows])))
         raise
 def __format__(self, format_spec):
     old_format = self.__str_format
     try:
         if format_spec != '':
             self.__str_format = "{{:{new_format}}}{{}}".format(
                 new_format=format_spec)
         return str(self)
     except Exception as e:
         logger.error(e)
     finally:
         self.__str_format = old_format
 def stop(self, timeout=5):
     if self._event:
         self._event.set()
     while len(self._threads) > 0:
         th = self._threads.pop(0)
         try:
             th.join(timeout)
             logger.debug(f"Thread '{th.name}' gracefully stopped")
         except Exception as e:
             logger.error(
                 f"Thread '{th.name}' gracefully stop failed; Error raised: {e}"
             )
 def is_continue_expected(self):
     if not self._evaluate_tolerance():
         self.parameters.event.set()
         logger.error(f"Stop requested due of critical error")
         return False
     if self.parameters.event.isSet():
         logger.info(f"Stop requested by external source")
         return False
     if self._internal_event.isSet():
         logger.info(f"Stop requested internally")
         return False
     return True
 def _evaluate_tolerance(self):
     if len(self._session_errors) == self._fault_tolerance:
         e = PlugInError(
             f"{self}",
             "PlugIn stop invoked; Errors count arrived to limit ({})".
             format(
                 self.host_alias,
                 self._fault_tolerance,
             ), *self._session_errors)
         logger.error(f"{e}")
         GlobalErrors().append(e)
         return False
     return True
 def __parse(self, bitrate_str: str):
     try:
         m = BITRATE_REGEX.match(str(bitrate_str))
         if m is None:
             raise AttributeError(
                 "Wrong bitrate format ({})".format(bitrate_str))
         self.__number = float(m.groups()[0])
         self.__format_number()
         self.set_format(m.groups()[1])
     except Exception as e:
         logger.error(
             "Cannot parse PacketSize value string '{}' with error: {}".
             format(bitrate_str, e))
         raise e
    def start(self, event=Event()):
        if self._db.is_new:
            for name, table in TableSchemaService().tables.items():
                try:
                    assert not self._db.table_exist(
                        table.name), f"Table '{name}' already exists"
                    self._db.execute(
                        sql_engine.create_table_sql(table.name, table.fields,
                                                    table.foreign_keys))
                except AssertionError as e:
                    logger.warn(f"{e}")
                except Exception as e:
                    logger.error(f"Cannot create table '{name}' -> Error: {e}")
                    raise
        self._event = event

        dh = Thread(name='DataHandler', target=self._data_handler, daemon=True)
        dh.start()
        self._threads.append(dh)
Ejemplo n.º 12
0
def upload_file_to_portal(link_title, file_path):
    if not PORTAL:
        return

    try:
        _, file_name = os.path.split(file_path)
        with open(file_path, 'rb') as file_reader:
            file_data = file_reader.read()
        portal_logger.info(link_title, attachment={
            'name': file_name,
            'data': file_data,
            'mime': 'image/png'
        })
        return True
    except RobotServiceException as e:
        logger.error(f"Cannot upload file '{file_path}'; Reason: {e}")
    except Exception as e:
        logger.error(f"Unexpected error during upload file '{file_path}'; Reason: {e}")
    return False
Ejemplo n.º 13
0
    def generate_module_statistics(self, period=None, plugin=None, alias=None, **options):
        """
        Generate Chart for present monitor data in visual style

        Arguments:
        - period:
        - plugin:
        - alias:
        - options:
        :Return - html link to chart file

        Note: In case report portal used chart files will be uploaded into lunch report (See in `Report Portal integration`)
        """
        if not os.path.exists(self._image_path):
            os.makedirs(self._image_path, exist_ok=True)

        module: HostModule = HostRegistryCache().get_connection(alias)
        chart_plugins = module.get_plugin(plugin, **options)
        chart_title = self._create_chart_title(period, plugin, f"{module}", **options)
        marks = _get_period_marks(period, module.host_id) if period else {}

        body_data = []
        for plugin in chart_plugins:
            for chart in plugin.affiliated_charts():
                try:
                    sql_query = chart.compose_sql_query(host_name=plugin.host_alias, **marks)
                    logger.debug("{}{}\n{}".format(plugin.type, f'_{period}' if period is not None else '', sql_query))
                    sql_data = db.DataHandlerService().execute(sql_query)
                    for picture_name, file_path in generate_charts(chart, sql_data, self._image_path, prefix=chart_title):
                        relative_image_path = os.path.relpath(file_path, os.path.normpath(
                            os.path.join(self._output_dir, self._log_path)))
                        body_data.append((picture_name, relative_image_path))
                        upload_file_to_portal(picture_name, file_path)
                except Exception as e:
                    logger.error(f"Error: {e}")

        html_link_path = create_html(self._output_dir, self._log_path, chart_title, *body_data)
        html_link_text = f"Chart for <a href=\"{html_link_path}\">'{chart_title}'</a>"
        logger.warn(html_link_text, html=True)
        return html_link_text
Ejemplo n.º 14
0
 def generate_chart_data(self, query_results: Iterable[Iterable]) \
         -> List[Tuple[str, Iterable, Iterable, Iterable[Iterable]]]:
     result = []
     for type_ in set([
             i[0] for i in query_results if any(
                 [i[0].startswith(section) for section in self._sections])
     ]):
         try:
             data = [i[1:] for i in query_results if i[0] == type_]
             x_axes = self.x_axes(data, 1)
             y_axes = self.y_axes(data)
             data = [i[2:] for i in data]
             data = [u[0:len(y_axes)] for u in data]
             chart_data = f"{type_}", x_axes, y_axes, data
             logger.debug(
                 "Create chart data: {}\n{}\n{}\n{} entries".format(
                     type_, x_axes, y_axes, len(data)))
             result.append(chart_data)
         except Exception as e:
             f, l = get_error_info()
             logger.error(f"Chart generation error: {e}; File: {f}:{l}")
     return result
Ejemplo n.º 15
0
    def __call__(self, output) -> bool:
        # table_template = self.table.template
        try:
            stdout = output.get('stdout')
            stderr = output.get('stderr')
            rc = output.get('rc')
            assert rc == 0, f"Last {self.__class__.__name__} ended with rc: {rc}\n{stderr}"
            for atop_portion in [
                    e.strip() for e in stdout.split('ATOP') if e.strip() != ''
            ]:
                lines = atop_portion.splitlines()
                f_line = lines.pop(0)
                ts = '_'.join(re.split(
                    r'\s+', f_line)[2:4]) + f".{datetime.now().strftime('%S')}"
                system_portion, process_portion = '\n'.join(lines).split(
                    'PID', 1)
                process_portion = 'PID\t' + process_portion
                if ts not in self._ts_cache:
                    self._ts_cache.append(ts)
                    self.data_handler(
                        aTopSystem_DataUnit(self.table['system'], self.host_id,
                                            *system_portion.splitlines()))
                    if ProcessMonitorRegistry().is_active:
                        data_portion = self._data_unit_class(
                            self.table['process'],
                            self.host_id,
                            *process_portion.splitlines()[1:],
                            processes_id=self.id)
                        self.data_handler(data_portion)

        except Exception as e:
            f, li = get_error_info()
            logger.error(
                f"{self.__class__.__name__}: Unexpected error: {type(e).__name__}: {e}; File: {f}:{li}"
            )
        else:
            return True
        return False
 def time_string_reformat(time_stamp):
     try:
         return datetime.strptime(time_stamp,
                                  from_format).strftime(to_format)
     except Exception as e:
         logger.error(f"Cannot convert time string: {time_stamp}")
Ejemplo n.º 17
0
 def _generate_atop_system_level(input_text, columns_template, *defaults):
     header_regex = re.compile(r'(.+)\|(.+)\|(.+)\|(.+)\|(.+)\|(.+)\|')
     res = []
     row_mapping = namedtuple(
         'ROW', ('Col1', 'Col2', 'Col3', 'Col4', 'Col5', 'SUB_ID'))
     for line in header_regex.findall(input_text):
         try:
             type_, data_ = aTopParser._normalize_line(*line)
             sub_id = type_
             pattern = OrderedDict()
             if type_ in ('PRC', 'PAG'):
                 pattern.update(
                     **{
                         k: aTopParser.try_time_string_to_secs(v)
                         for k, v in
                         [re.split(r'\s+', s.strip(), 2) for s in data_]
                     })
             elif type_ in ['CPU', 'cpu']:
                 pattern.update(
                     **{
                         k: v.replace('%', '')
                         for k, v in
                         [re.split(r'\s+', s.strip(), 1) for s in data_]
                     })
                 if type_ == 'cpu':
                     for k, v in pattern.items():
                         if k.startswith('cpu'):
                             _cpu_str, _wait = re.split(r'\s+', v, 1)
                             pattern.pop(k)
                             pattern.update({'wait': _wait})
                             sub_id = k.replace('cpu', 'cpu_').upper()
                             break
                     type_ = 'CPU'
                 else:
                     sub_id = 'CPU_All'
             elif type_ == 'CPL':
                 pattern.update(
                     **{
                         k: v
                         for k, v in
                         [re.split(r'\s+', s.strip(), 1) for s in data_]
                     })
             elif type_ in ['MEM', 'SWP']:
                 pattern.update(
                     **{
                         k: v
                         for k, v in
                         [re.split(r'\s+', s.strip(), 1) for s in data_]
                     })
                 for k in pattern.keys():
                     pattern[k] = Size(pattern[k]).set_format('M').number
             elif type_ in ['LVM', 'DSK', 'NET']:
                 items = [re.split(r'\s+', s.strip()) for s in data_]
                 for item in items:
                     if len(item) == 1 or item[1] == '----':
                         pattern.update({'source': '-1'})
                         sub_id = f"{type_}_{item[0]}"
                     elif len(item) >= 2:
                         pattern.update({item[0]: item[1].replace('%', '')})
                     else:
                         pattern.update(
                             {item[0]: re.sub(r'[\sKbpms%]+', '', item[1])})
             else:
                 raise TypeError(f"Unknown line type: {' '.join(line)}")
             pattern.update(SUB_ID=sub_id)
             res.append(
                 columns_template(*[
                     *defaults, type_,
                     json.dumps(row_mapping(
                         *pattern.keys()), indent=True), *pattern.values()
                 ]))
         except ValueError as e:
             logger.error(f"aTop parse error: {e}")
         except Exception as e:
             f, l = get_error_info()
             logger.error(
                 "aTop unknown parse error: {}; File: {}:{}\n{}".format(
                     e, f, l, line))
             raise
     return res