Example #1
0
def monitoringExtenders(network_list, network_setup, polling_frequency,
                        influxServer, dest_file, logger, system_command_lst):
    extender_csv_file_list = []
    csv_header = []
    """Monitoring Extender poll for polling_frequency all the extender part of the network_list. Then will store the 
    results into the dest_file in csv format.
    A dest_file will be created per extender, to allow to have different follow up"""
    #Create CSV Header file
    csv_header.append('DATE')
    for command, command_type in system_command_lst:
        if "WIFI_CHANIM" in command_type:
            for chanim in chanim_info:
                csv_header.append(command_type + "-" + chanim)
        elif "VMSTAT" in command_type:
            for vmstat in vmstat_info:
                csv_header.append(command_type + "-" + vmstat)
        elif "LOADAVG" in command_type:
            for loadavg in loadavg_info:
                csv_header.append(command_type + "-" + loadavg)
        else:
            csv_header.append(command_type)
    #Create root file name by concatenating the root filename and the extender name
    for extender in network_list:
        extender_csv_name = dest_file + '-' + extender['name'].strip(
            " ") + ".csv"
        #Open the file
        extender_csv_file = open(extender_csv_name, 'w+')
        extender['CSVFile'] = extender_csv_file
        #Write header into csv files and define dictionary to manage and check row
        csv_writer = csv.DictWriter(extender['CSVFile'], fieldnames=csv_header)
        extender['CSVWriter'] = csv_writer
        extender['CSVWriter'].writeheader()

        logger.info("Creating file {:20} mode {:2}".format(
            extender['CSVFile'].name, extender['CSVFile'].mode))

    logger.info("Creating Data Base {}".format(influxServer["Server_name"]))
    os.environ['NO_PROXY'] = influxServer["Server_name"]
    client = InfluxDBClient(host=influxServer["Server_name"],
                            port=influxServer["Server_port"],
                            ssl=False,
                            proxies=None)
    client.create_database(influxServer["DB_name"])
    logger.info("Creation of Data Base {} {}".format(
        influxServer["Server_name"], client.get_list_database()))

    #Start looping for monitoring extender
    while 1:
        try:
            #Launch the command
            DoExtenderMonitoring(network_list, network_setup, logger,
                                 system_command_lst, client)
            #Sleep for polling frequency
            time.sleep(int(polling_frequency))

        except KeyboardInterrupt:
            #If Keyboard interruption then stop polling
            logger.info("KEYBOARD interrupt stop monitoring")
            break
        else:
            logger.debug("Out of polling wait launch commands")

    #End of monitoring close opened files
    for extender in network_list:
        logger.info("Closing file {}".format(extender['CSVFile'].name))
        extender['CSVFile'].close()

    return
Example #2
0
class GlobalContainer(object):

    log_root = "logs"
    log_level = 30
    log_size = 5 * 1024 * 1024
    log_number = 10
    debug_mode = "True"

    influx_url = "http://*****:*****@localhost/{db_schema}')
            cmd = (
                f'mysql+mysqlconnector://{self.mysql_user}:{self.mysql_password}@{self.mysql_host}/{self.mysql_db}'
            )
            self.logger.debug(cmd)
            self.eng = create_engine(cmd)
            #Base = declarative_base()

            #base.Base.metadata.bind = eng
            #base.Base.metadata.create_all()
            common.base.Base.metadata.create_all(self.eng, checkfirst=True)

            Session = sessionmaker(bind=self.eng)
            self.ses = Session()
        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)
            sys.exit(99)

    def connectInfluxDatabase(self):

        try:
            # prepare database
            self.logger.debug(
                f'Connecting to Influx with: Host:{self.influx_host}, Port: {self.influx_port}, User: {self.influx_user}, DB: {self.influx_db}'
            )
            if (self.influx_version == 1):
                pass
                self.influxClient = DataFrameClient(self.influx_host,
                                                    self.influx_port,
                                                    self.influx_user,
                                                    self.influx_pwd,
                                                    self.influx_db)

            elif (self.influx_version == 2):

                retries = WritesRetry(total=20,
                                      backoff_factor=1,
                                      exponential_base=1)

                self.influxClient = InfluxDBClient(
                    url=f"http://{self.influx_host}:{self.influx_port}",
                    token=self.influx_token,
                    org=self.influx_org,
                    retries=retries,
                    timeout=180_000)

                self.influx_query_api = self.influxClient.query_api()

                self.influx_write_api = self.influxClient.write_api(
                    write_options=WriteOptions(
                        batch_size=500,
                        write_type=WriteType.synchronous,
                        flush_interval=10_000,
                        jitter_interval=2_000,
                        retry_interval=30_000,
                        max_retries=25,
                        max_retry_delay=60_000,
                        exponential_base=2))
                #self.influx_write_api = self.influxClient.write_api(write_options=SYNCHRONOUS)

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)
            sys.exit(99)

    def resetDatabases(self):
        try:
            self.logger.warning("Resetting Databases")

            self.resetMySQLDatabases()
            self.resetInfluxDatabases()

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def resetMySQLDatabases(self):
        try:
            self.logger.warning("Resetting MySQL-Database")

            #Base = declarative_base()
            common.base.Base.metadata.drop_all(self.eng, checkfirst=True)
            common.base.Base.metadata.create_all(self.eng, checkfirst=True)

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def resetInfluxDatabases(self):
        try:
            self.logger.warning("Resetting Influx-Database")

            if (self.influx_version == 1):
                self.influxClient.drop_database(self.influx_db)
                self.influxClient.create_database(self.influx_db)
            else:

                with InfluxDBClient(
                        url=f"http://{self.influx_host}:{self.influx_port}",
                        token=self.influx_token,
                        org=self.influx_org,
                        timeout=180_000) as client:

                    buckets_api = client.buckets_api()

                    my_bucket = buckets_api.find_bucket_by_name(self.influx_db)

                    if (my_bucket is not None):
                        buckets_api.delete_bucket(my_bucket)

                    org_name = self.influx_org
                    org = list(
                        filter(
                            lambda it: it.name == org_name,
                            self.influxClient.organizations_api().
                            find_organizations()))[0]
                    retention_rules = BucketRetentionRules(
                        type="forever",
                        every_seconds=0,
                        shard_group_duration_seconds=60 * 60 * 24 *
                        90)  #3600*24*365*200
                    created_bucket = buckets_api.create_bucket(
                        bucket_name=self.influx_db,
                        retention_rules=retention_rules,
                        org_id=org.id)

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)
            sys.exit(-99)

    def writeJobStatus(self,
                       Status,
                       StartDate=None,
                       EndDate=None,
                       statusMessage=None,
                       SuccessDate=None):
        try:
            jobStatus = None
            res = self.ses.query(ScriptStatus).filter(
                ScriptStatus.Name == self.jobName)

            if (res.count() == 0):
                self.logger.debug(
                    f'ScriptStatus {self.jobName} not found, creating...')
                jobStatus = ScriptStatus(self.jobName)

                self.ses.add(jobStatus)
                self.ses.commit()
            else:
                jobStatus = res.first()

            jobStatus.StatusDateTime = datetime.datetime.now()
            jobStatus.Status = Status

            if SuccessDate is not None:
                jobStatus.LastSuccessDateTime = SuccessDate

            if StartDate is not None:
                jobStatus.StartDateTime = StartDate

            if EndDate is not None:
                jobStatus.EndDateTime = EndDate

            if statusMessage is not None:
                jobStatus.StatusMessage = statusMessage

            jobStatus.ErrorNumbers = self.numErrors
            jobStatus.ErrorMessage = self.errMsg

            jobStatus.WarningNumbers = self.numWarnings
            jobStatus.WarningMessage = self.warnMsg

            self.ses.add(jobStatus)
            self.ses.commit()

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def chunk(self, seq, size):
        return (seq[pos:pos + size] for pos in range(0, len(seq), size))

    def writeJobMessage(self, logType, logObject, logObjectId, message):
        """ Writes a message into the log table
                logType: Error, Warning, Info, Debug
                logObject: Stock, Depot, Script,...
                LogObjectId: ISIN, Depot-Name,...
                Message: Message
        """
        try:
            jobMessage = LogMessage(self.runId, logType, logObject,
                                    logObjectId, message)

            self.ses.add(jobMessage)
            self.ses.commit()

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def iQuery(self, qry):
        """Executes the flow query against the innodb"""

        loc = locals()
        logger = logging.getLogger(__name__)
        res = None

        try:
            msg = f"Starting iQuery with {loc}"
            logger.debug(msg)
            self.writeJobStatus("Running", statusMessage=msg)

            with InfluxDBClient(
                    url=f"http://{self.influx_host}:{self.influx_port}",
                    token=self.influx_token,
                    org=self.influx_org,
                    timeout=180_000) as client:
                res = client.query_api().query_data_frame(qry)

            self.writeJobStatus("Running", statusMessage=msg + " - DONE")
            logger.debug(msg + " - DONE")

            return res

        except Exception as e:
            logger.exception(f'Crash iQuery with {loc}!', exc_info=e)
            self.numErrors += 1
            self.errMsg += f"Crash iQuery with {loc}; "