Пример #1
0
def generate_charts(chart: ChartAbstract,
                    sql_data,
                    abs_image_path,
                    prefix=None,
                    **marks):
    try:
        errors = []
        title = ''
        for data in chart.generate_chart_data(sql_data):
            try:
                title, x, y, chart_data = data
                file_name = chart.file_name.format(name=title.lower())
                file_name = f"{prefix}_{file_name}" if prefix else file_name
                file_path = os.path.join(abs_image_path,
                                         re.sub(r'\s+|@|:', '_', file_name))
                if os.path.exists(file_path):
                    os.remove(file_path)
                plt.style.use('classic')
                df = pd.DataFrame(chart_data, columns=y, index=x)
                y_limit = _get_y_limit(chart_data)
                df.cumsum()
                mp = df.plot(legend=True)
                for label in mp.axes.get_xticklabels():
                    label.set_rotation(25)
                    label.set_x(10)
                plt.ylim(0, y_limit * 1.3)
                plt.xlabel('Time')
                plt.title(title)
                # TODO: Add vertical mark line on chart
                # if len(marks) > 0:
                #     fig, ax = plt.subplots()
                #     for mark, time in marks.items():
                #         ax.axvline(df.index.searchsorted(time),
                #         color='red', linestyle="--", lw=2, label="lancement")
                #     plt.tight_layout()
                plt.savefig(file_path)
                yield title.upper(), file_path
            except Exception as e:
                f, li = get_error_info()
                errors.append(f"{e}; File; {f}:{li}")
    except Exception as e:
        f, l = get_error_info()
        raise RuntimeError(
            f"Probably SQL query failed; Reason: {e}; File: {f}:{l}")
    else:
        if len(errors) > 0:
            raise RuntimeError(
                "Following sub charts creation error:\n\t{}".format(
                    '\n\t'.join([f"{i}. {e}" for i, e in enumerate(errors)])))
    def __call__(self, outputs, datetime=None) -> bool:
        command_out = outputs.get('stdout', None)
        time_output = outputs.get('stderr', None)
        rc = outputs.get('rc')
        try:
            exp_rc = self.options.get('rc', None)
            if exp_rc:
                if rc not in [int(_rc) for _rc in re.split(r'\s*\|\s*', exp_rc)]:
                    raise AssertionError(
                        f"Result return rc {rc} not match expected\nStdOut:\n\t{command_out}\nStdErr:\n\t{time_output}")
            data = time_output.split(',')
            row_dict = DotDict(**{k: v.replace('%', '') for (k, v) in [entry.split(':', 1) for entry in data]})
            for k in row_dict.keys():
                if k == 'Command':
                    continue
                row_dict.update({k: float(row_dict[k])})
            logger.info(f"Command: {row_dict.get('Command')} [Rc: {row_dict.get('Rc')}]")

            row = self.table.template(self.host_id, None, *tuple(list(row_dict.values()) + [-1]))
            du = model.data_factory(self.table, row, output=command_out, datetime=datetime)

            self.data_handler(du)
            return True
        except Exception as e:
            f, li = get_error_info()
            logger.error(f"{self.__class__.__name__}: {e}; File: {f}:{li}")
            raise RunnerError(f"{self}", f"{e}; File: {f}:{li}")
    def cache_line(line_data):
        order_id, line = line_data
        output_ref = None
        is_output_required = True
        try:
            hash_tag = hashlib.md5(line.encode('utf-8')).hexdigest()
            entry = DataHandlerService().execute(
                f"SELECT LINE_ID FROM LinesCache WHERE HashTag == '{hash_tag}'"
            )
            if len(entry) == 0:
                DataHandlerService().execute(
                    insert_sql('LinesCache', ['LINE_ID', 'HashTag', 'Line']),
                    *(None, hash_tag, line))
                line_ref = DataHandlerService().get_last_row_id
            else:
                line_ref = entry[0][0]

            if is_output_required:
                entry1 = DataHandlerService().execute(
                    f"SELECT ORDER_ID FROM LinesCacheMap WHERE LINE_REF == {line_ref}"
                )
                if len(entry1) != 0:
                    output_ref = entry1[0][0]
                    if output_ref != order_id:
                        is_output_required = False

        except Exception as e:
            f, li = get_error_info()
            raise type(e)(f"Unexpected error: {e}; File: {f}:{li}")
        return output_ref, order_id, line_ref
    def _data_handler(self):
        logger.debug(
            f"{self.__class__.__name__} Started with event {id(self._event)}")
        while True:
            if self.queue.empty():
                if self._event.isSet():
                    break
                else:
                    continue

            item = self.queue.get()
            try:
                logger.debug(
                    f"Deque item: '{item}' (Current queue size {self.queue.qsize()})"
                )
                insert_sql_str, rows = item.sql_data
                result = self.execute(
                    insert_sql_str,
                    rows) if rows else self.execute(insert_sql_str)
                item.result = result
                logger.debug("Insert item: {}\n\t{}\n\t{}".format(
                    type(item).__name__, insert_sql_str,
                    '\n\t'.join([str(r) for r in rows])))
            except Exception as e:
                f, l = get_error_info()
                logger.error(
                    f"Unexpected error occurred on {type(item).__name__}: {e}; File: {f}:{l}"
                )
            else:
                logger.debug(
                    f"Item {type(item).__name__} successfully handled")
        logger.debug(f"Background task stopped invoked")
Пример #5
0
    def start_monitor_plugin(self, plugin_name, *args, alias=None, **options):
        """
        Start plugin by its name on host queried by options keys

        Arguments:
        - plugin_names: name must be one for following in loaded table, column 'Class'
        - alias: host monitor alias (Default: Current if omitted)
        - options: interval=... , persistent=yes/no,

        extra parameters relevant for particular plugin can be found in `BuiltIn plugins` section

        """
        try:
            monitor: HostModule = self._modules.get_connection(alias)
            monitor.plugin_start(plugin_name, *args, **options)
        except Exception as e:
            f, li = get_error_info()
            raise BuiltIn().fatal_error(f"{e}; File: {f}:{li}")
Пример #6
0
 def generate_chart_data(self, query_results: Iterable[Iterable]) \
         -> List[Tuple[str, Iterable, Iterable, Iterable[Iterable]]]:
     result = []
     for type_ in set([
             i[0] for i in query_results if any(
                 [i[0].startswith(section) for section in self._sections])
     ]):
         try:
             data = [i[1:] for i in query_results if i[0] == type_]
             x_axes = self.x_axes(data, 1)
             y_axes = self.y_axes(data)
             data = [i[2:] for i in data]
             data = [u[0:len(y_axes)] for u in data]
             chart_data = f"{type_}", x_axes, y_axes, data
             logger.debug(
                 "Create chart data: {}\n{}\n{}\n{} entries".format(
                     type_, x_axes, y_axes, len(data)))
             result.append(chart_data)
         except Exception as e:
             f, l = get_error_info()
             logger.error(f"Chart generation error: {e}; File: {f}:{l}")
     return result
Пример #7
0
    def __call__(self, output) -> bool:
        # table_template = self.table.template
        try:
            stdout = output.get('stdout')
            stderr = output.get('stderr')
            rc = output.get('rc')
            assert rc == 0, f"Last {self.__class__.__name__} ended with rc: {rc}\n{stderr}"
            for atop_portion in [
                    e.strip() for e in stdout.split('ATOP') if e.strip() != ''
            ]:
                lines = atop_portion.splitlines()
                f_line = lines.pop(0)
                ts = '_'.join(re.split(
                    r'\s+', f_line)[2:4]) + f".{datetime.now().strftime('%S')}"
                system_portion, process_portion = '\n'.join(lines).split(
                    'PID', 1)
                process_portion = 'PID\t' + process_portion
                if ts not in self._ts_cache:
                    self._ts_cache.append(ts)
                    self.data_handler(
                        aTopSystem_DataUnit(self.table['system'], self.host_id,
                                            *system_portion.splitlines()))
                    if ProcessMonitorRegistry().is_active:
                        data_portion = self._data_unit_class(
                            self.table['process'],
                            self.host_id,
                            *process_portion.splitlines()[1:],
                            processes_id=self.id)
                        self.data_handler(data_portion)

        except Exception as e:
            f, li = get_error_info()
            logger.error(
                f"{self.__class__.__name__}: Unexpected error: {type(e).__name__}: {e}; File: {f}:{li}"
            )
        else:
            return True
        return False
Пример #8
0
    def __init__(self, parameters, data_handler, *monitor_processes,
                 **user_options):
        try:
            SSH_PlugInAPI.__init__(self, parameters, data_handler,
                                   *monitor_processes, **user_options)

            self.file = 'atop.dat'
            self.folder = '~/atop_temp'
            self._time_delta = None
            self._os_name = None
            with self.on_connection() as ssh:
                self._os_name = self._get_os_name(ssh)

            self._name = f"{self.name}-{self._os_name}"

            self.set_commands(
                FlowCommands.Setup,
                SSHLibraryCommand(SSHLibrary.execute_command,
                                  'killall -9 atop',
                                  sudo=self.sudo_expected,
                                  sudo_password=self.sudo_password_expected),
                SSHLibraryCommand(SSHLibrary.execute_command,
                                  f'rm -rf {self.folder}',
                                  sudo=True,
                                  sudo_password=True),
                SSHLibraryCommand(SSHLibrary.execute_command,
                                  f'mkdir -p {self.folder}',
                                  sudo=self.sudo_expected,
                                  sudo_password=self.sudo_password_expected),
                SSHLibraryCommand(
                    SSHLibrary.start_command,
                    "{nohup} atop -a -w {folder}/{file} {interval} &".format(
                        nohup='' if self.persistent else 'nohup',
                        folder=self.folder,
                        file=self.file,
                        interval=int(self.interval)),
                    sudo=self.sudo_expected,
                    sudo_password=self.sudo_password_expected))

            self.set_commands(
                FlowCommands.Command,
                SSHLibraryCommand(
                    SSHLibrary.execute_command,
                    f"atop -r {self.folder}/{self.file} -b `date +{self.OS_DATE_FORMAT[self.os_name]}`",
                    sudo=True,
                    sudo_password=True,
                    return_rc=True,
                    return_stderr=True,
                    parser=aTopParser(
                        self.id,
                        host_id=self.host_id,
                        table={
                            'system': self.affiliated_tables()[0],
                            'process': self.affiliated_tables()[1]
                        },
                        data_handler=self._data_handler,
                        counter=self.iteration_counter,
                        interval=self.parameters.interval,
                        data_unit=process_data_unit_factory(self._os_name))))

            self.set_commands(
                FlowCommands.Teardown,
                SSHLibraryCommand(SSHLibrary.execute_command,
                                  'killall -9 atop',
                                  sudo=True,
                                  sudo_password=True))
        except Exception as e:
            f, l = get_error_info()
            raise type(e)(f"{e}; File: {f}:{l}")
Пример #9
0
 def _generate_atop_system_level(input_text, columns_template, *defaults):
     header_regex = re.compile(r'(.+)\|(.+)\|(.+)\|(.+)\|(.+)\|(.+)\|')
     res = []
     row_mapping = namedtuple(
         'ROW', ('Col1', 'Col2', 'Col3', 'Col4', 'Col5', 'SUB_ID'))
     for line in header_regex.findall(input_text):
         try:
             type_, data_ = aTopParser._normalize_line(*line)
             sub_id = type_
             pattern = OrderedDict()
             if type_ in ('PRC', 'PAG'):
                 pattern.update(
                     **{
                         k: aTopParser.try_time_string_to_secs(v)
                         for k, v in
                         [re.split(r'\s+', s.strip(), 2) for s in data_]
                     })
             elif type_ in ['CPU', 'cpu']:
                 pattern.update(
                     **{
                         k: v.replace('%', '')
                         for k, v in
                         [re.split(r'\s+', s.strip(), 1) for s in data_]
                     })
                 if type_ == 'cpu':
                     for k, v in pattern.items():
                         if k.startswith('cpu'):
                             _cpu_str, _wait = re.split(r'\s+', v, 1)
                             pattern.pop(k)
                             pattern.update({'wait': _wait})
                             sub_id = k.replace('cpu', 'cpu_').upper()
                             break
                     type_ = 'CPU'
                 else:
                     sub_id = 'CPU_All'
             elif type_ == 'CPL':
                 pattern.update(
                     **{
                         k: v
                         for k, v in
                         [re.split(r'\s+', s.strip(), 1) for s in data_]
                     })
             elif type_ in ['MEM', 'SWP']:
                 pattern.update(
                     **{
                         k: v
                         for k, v in
                         [re.split(r'\s+', s.strip(), 1) for s in data_]
                     })
                 for k in pattern.keys():
                     pattern[k] = Size(pattern[k]).set_format('M').number
             elif type_ in ['LVM', 'DSK', 'NET']:
                 items = [re.split(r'\s+', s.strip()) for s in data_]
                 for item in items:
                     if len(item) == 1 or item[1] == '----':
                         pattern.update({'source': '-1'})
                         sub_id = f"{type_}_{item[0]}"
                     elif len(item) >= 2:
                         pattern.update({item[0]: item[1].replace('%', '')})
                     else:
                         pattern.update(
                             {item[0]: re.sub(r'[\sKbpms%]+', '', item[1])})
             else:
                 raise TypeError(f"Unknown line type: {' '.join(line)}")
             pattern.update(SUB_ID=sub_id)
             res.append(
                 columns_template(*[
                     *defaults, type_,
                     json.dumps(row_mapping(
                         *pattern.keys()), indent=True), *pattern.values()
                 ]))
         except ValueError as e:
             logger.error(f"aTop parse error: {e}")
         except Exception as e:
             f, l = get_error_info()
             logger.error(
                 "aTop unknown parse error: {}; File: {}:{}\n{}".format(
                     e, f, l, line))
             raise
     return res