예제 #1
0
파일: db.py 프로젝트: franzmelchiori/alyvix
class DbManager():

    def __init__(self):

        self._perf_manager = PerfManager()
        self._info_manager = InfoManager()
        self._db_home = os.path.split(sys.executable)[0] + os.sep + "share" + os.sep + "alyvix"
        self._db_name = "alyvix_data"

        if self._info_manager.get_info("ROBOT CONTEXT") is True:

            self._db_home = os.path.dirname(os.path.abspath(self._info_manager.get_info("SUITE SOURCE")))

            self._db_name = self._info_manager.get_info("SUITE NAME")

            """
            if self._info_manager.get_info("TEST CASE NAME") is not None:
                self._db_name = self._db_name + "_" + self._info_manager.get_info("TEST CASE NAME").replace(" ", "_")
            """

        self._db_name = self._db_name + ".db"

        self._connection = None
        self._cursor = None
        self._db_is_new = False

    def connect(self):
        self._connection = sqlite3.connect(self._db_home + os.sep + self._db_name)
        self._connection.row_factory = sqlite3.Row
        self._cursor = self._connection.cursor()

    def close(self):
        self._connection.commit()
        self._connection.close()

    def _create_tables(self):
        self._create_runs_table()
        self._create_thresholds_table()
        self._create_sorting_table()

    def _create_runs_table(self):
        query = "CREATE TABLE runs (start_time integer primary key"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + " integer"
        query += ")"
        self._cursor.execute(query)

    def _create_thresholds_table(self):
        query = "CREATE TABLE thresholds (start_time integer primary key"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + "_warn integer, " + perf.name + "_crit integer"
        query += ")"
        self._cursor.execute(query)

    def _create_sorting_table(self):
        query = "CREATE TABLE sorting (start_time integer primary key"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + "_index integer"
        query += ")"
        self._cursor.execute(query)

    def _check_runs_columns(self):
        query = "PRAGMA table_info(runs);"
        rows = self._cursor.execute(query).fetchall()

        for perf in self._perf_manager.get_all_perfdata():

            perf_name_present = False

            for row in rows:

                if row[1] == perf.name:
                    perf_name_present = True
                    break

            #check and add new columns
            if perf_name_present is False:

                query = "ALTER TABLE runs ADD COLUMN " + perf.name + " integer;"
                self._cursor.execute(query)

    def _check_thresholds_columns(self):
        query = "PRAGMA table_info(thresholds);"
        rows = self._cursor.execute(query).fetchall()

        for perf in self._perf_manager.get_all_perfdata():

            perf_warn_present = False
            perf_crit_present = False

            for row in rows:

                if row[1] == perf.name + "_warn":
                    perf_warn_present = True

                if row[1] == perf.name + "_crit":
                    perf_crit_present = True

            #check and add new columns
            if perf_warn_present is False:

                query = "ALTER TABLE thresholds ADD COLUMN " + perf.name + "_warn integer;"
                self._cursor.execute(query)

            #check and add new columns
            if perf_crit_present is False:

                query = "ALTER TABLE thresholds ADD COLUMN " + perf.name + "_crit integer;"
                self._cursor.execute(query)

    def _check_sorting_columns(self):
        query = "PRAGMA table_info(sorting);"
        rows = self._cursor.execute(query).fetchall()

        for perf in self._perf_manager.get_all_perfdata():

            perf_name_present = False

            for row in rows:

                if row[1] == perf.name + "_index":
                    perf_name_present = True
                    break

            #check and add new columns
            if perf_name_present is False:

                query = "ALTER TABLE sorting ADD COLUMN " + perf.name + "_index integer;"
                self._cursor.execute(query)


    def _insert_runs(self):

        #check and add new columns
        self._check_runs_columns()

        start_time = self._info_manager.get_info("START TIME")
        query = "INSERT INTO runs (start_time"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name
        query = query + ") VALUES (" + str(start_time)

        for perf in self._perf_manager.get_all_perfdata():
            if perf.value is not None and perf.value != "":
                query = query + ", " + str(int(perf.value * 1000))
            else:
                query = query + ", null"

        query += ")"
        self._cursor.execute(query)

    def _insert_thresholds(self):

        #check and add new columns
        self._check_thresholds_columns()

        #get last row of thresholds table
        query = "select * from thresholds where start_time ORDER BY start_time DESC LIMIT 1"
        last_rows = self._cursor.execute(query).fetchone()

        total_null = 0

        #count null values
        if last_rows is not None:
            for row in last_rows:
                if row is None:
                    total_null += 1

        #get perfdata of current run
        current_perfdata = self._perf_manager.get_all_perfdata()

        for perf in current_perfdata:

            #if warning or critical are empty, then we dont have to compare warning or critical column
            if perf.warning_threshold == "" or perf.warning_threshold is None and total_null > 0:
                total_null -= 1

            if perf.critical_threshold == "" or perf.critical_threshold is None and total_null > 0:
                total_null -= 1

        if last_rows is not None:
            total_columns = len(last_rows) - 1
        else:
            total_columns = 0

        start_time = self._info_manager.get_info("START TIME")
        query = "INSERT INTO thresholds (start_time"

        for perf in current_perfdata:
            query = query + ", " + perf.name + "_warn, " + perf.name + "_crit"
        query = query + ") VALUES (" + str(start_time)

        different_from_last = self._db_is_new

        #check if perfdata items of current run are > (or <) than last row columns
        if len(current_perfdata) * 2 != (total_columns - total_null):
            different_from_last = True

        for perf in self._perf_manager.get_all_perfdata():

            if perf.warning_threshold is not None and perf.warning_threshold != "":
                query = query + ", " + str(int(perf.warning_threshold * 1000))
                if last_rows is not None and last_rows[perf.name + "_warn"] != int(perf.warning_threshold * 1000):
                    different_from_last = True
            else:
                query = query + ", null"
                if last_rows is not None and last_rows[perf.name + "_warn"] is not None:
                    different_from_last = True

            if perf.critical_threshold is not None and perf.critical_threshold != "":
                query = query + ", " + str(int(perf.critical_threshold * 1000))
                if last_rows is not None and last_rows[perf.name + "_crit"] != int(perf.critical_threshold * 1000):
                    different_from_last = True
            else:
                query = query + ", null"
                if last_rows is not None and last_rows[perf.name + "_crit"] is not None:
                    different_from_last = True

        if different_from_last is True:

            query = query + ")"
            self._cursor.execute(query)

    def _insert_sorting(self):

        #check and add new columns
        self._check_sorting_columns()

        #get last row of sorting table
        query = "select * from sorting where start_time ORDER BY start_time DESC LIMIT 1"
        last_rows = self._cursor.execute(query).fetchone()

        total_null = 0

        #count null values
        if last_rows is not None:
            for row in last_rows:
                if row is None:
                    total_null += 1

        if last_rows is not None:
            total_columns = len(last_rows) - 1
        else:
            total_columns = 0

        start_time = self._info_manager.get_info("START TIME")

        query = "INSERT INTO sorting (start_time"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + "_index"
        query = query + ") VALUES (" + str(start_time)

        different_from_last = self._db_is_new

        #check if perfdata items of current run are > (or <) than last row columns
        if len(self._perf_manager.get_all_perfdata()) != (total_columns - total_null):
            different_from_last = True

        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + str(perf.counter)

            if last_rows is not None and last_rows[perf.name + "_index"] != perf.counter:
                different_from_last = True

        if different_from_last is True:
            query = query + ")"
            self._cursor.execute(query)

    def store_perfdata(self, dbname=None):

        if dbname != None and dbname != "":
            self._db_home = os.path.split(dbname)[0]
            self._db_name = os.path.split(dbname)[1]

            self._info_manager.set_info("DB FILE", self._db_home + os.sep + self._db_name)

        #if not os.path.isfile(self._db_home + os.sep + self._db_name):
        if not os.path.isdir(self._db_home):
            os.makedirs(self._db_home)
            self.connect()
            self._create_tables()
            self._db_is_new = True
        elif not os.path.isfile(self._db_home + os.sep + self._db_name):
            self.connect()
            self._create_tables()
            self._db_is_new = True
        else:
            self.connect()

        self._insert_runs()
        self._insert_thresholds()
        self._insert_sorting()

        self.close()

    def publish_perfdata(self, type="csv", start_date=None, end_date=None, filename=None,
                         testcase_name=None, max_age=24):

        if type == "perfmon":
            try:
                full_file_name = get_python_lib() + os.sep + "alyvix" + os.sep + "extra" + os.sep + "alyvixservice.ini"

                config = ConfigParser.ConfigParser()
                config.read(full_file_name)

                db_file = self._info_manager.get_info("DB FILE")

                if db_file is not None:

                    self._db_home = os.path.split( self._info_manager.get_info("DB FILE"))[0]
                    self._db_name = os.path.split( self._info_manager.get_info("DB FILE"))[1]

                if testcase_name is None or testcase_name == "":
                    testcase_name = self._info_manager.get_info('TEST CASE NAME')

                if testcase_name is None or testcase_name == "":
                    testcase_name = self._info_manager.get_info('SUITE NAME')

                try:
                    if not config.has_section('db_path'):
                        config.add_section('db_path')
                except:
                    pass

                try:
                    if not config.has_section('db_max_age'):
                        config.add_section('db_max_age')
                except:
                    pass

                try:
                    config.get('general', "polling_frequency")
                except:
                    try:
                        config.add_section('general')
                    except:
                        config.set('general', 'polling_frequency', '500')

                try:
                    config.get('general', "push_frequency")
                except:
                    try:
                        config.add_section('general')
                    except:
                        config.set('general', 'push_frequency', '2')

                config.set('db_path', testcase_name, self._db_home + os.sep + self._db_name)

                config.set('db_max_age', testcase_name, str(max_age))

                with open(full_file_name, 'w') as configfile:
                    config.write(configfile)
            except:
                pass

        if type == "csv":

            start_date_dt = None
            end_date_dt = None

            for fmt in ('%Y-%m-%d', '%Y-%m-%d %H:%M' , '%Y-%m-%d %H:%M:%S'):
                try:
                    start_date_dt = datetime.datetime.strptime(start_date, fmt)
                except ValueError:
                    pass

            for fmt in ('%Y-%m-%d', '%Y-%m-%d %H:%M' , '%Y-%m-%d %H:%M:%S'):
                try:
                    end_date_dt = datetime.datetime.strptime(end_date, fmt)
                except ValueError:
                    pass

            if start_date_dt is None:
                raise Exception('invalid start date!')

            if end_date_dt is None:
                raise Exception('invalid end date!')

            if start_date_dt >= end_date_dt:
                raise Exception('end date must be greate than start date!')

            db_file = self._info_manager.get_info("DB FILE")

            if db_file is not None:

                self._db_home = os.path.split( self._info_manager.get_info("DB FILE"))[0]
                self._db_name = os.path.split( self._info_manager.get_info("DB FILE"))[1]


            csv_name = filename

            if csv_name is None or filename == "":
                csv_home = os.path.split(sys.executable)[0] + os.sep + "share" + os.sep + "alyvix"
                csv_name = "alyvix_data"

                if self._info_manager.get_info("ROBOT CONTEXT") is True:

                    csv_home = os.path.dirname(os.path.abspath(self._info_manager.get_info("SUITE SOURCE")))

                    csv_name = self._info_manager.get_info("SUITE NAME")

                csv_name = csv_home + os.sep + csv_name + ".csv"


            csv_file = open(csv_name, 'w')
            csv_writer = csv.writer(csv_file)

            self.connect()

            #get last row of sorting table
            query = "select * from sorting ORDER BY start_time DESC LIMIT 1"
            last_sorting_rows = self._cursor.execute(query).fetchone()

            #print self._cursor.description

            perf_to_query = []

            for key in last_sorting_rows.keys():

                if key == "start_time":
                    continue

                value = last_sorting_rows[key]

                if value is not None:
                    perf_to_query.append(key.replace("_index", ""))

            query = "select datetime(start_time, 'unixepoch','localtime') as start_time"

            for column in perf_to_query:
                query = query + ", " + column

            query = query + " from runs where CAST(strftime('%s', datetime(start_time, 'unixepoch', 'localtime')) AS INT) between CAST(strftime('%s', '" + start_date + "') AS INT) and CAST(strftime('%s', '" + end_date + "') AS INT)"

            rows = self._cursor.execute(query).fetchall()

            csv_header = []
            csv_header.append("start_time")

            for perf_column in perf_to_query:
                csv_header.append(perf_column)

            csv_writer.writerow(csv_header)

            for row in rows:

                csv_row = []

                #start_date_dt = datetime.datetime.utcfromtimestamp(row["start_time"])

                #start_date_str = start_date_dt.strftime("%Y-%m-%d %H:%M:%S")

                csv_row.append(row["start_time"])

                for perf_column in perf_to_query:
                    csv_row.append(row[perf_column])

                csv_writer.writerow(csv_row)

            self.close()

            csv_file.close()
예제 #2
0
파일: db.py 프로젝트: franzmelchiori/alyvix
class DbManager():
    def __init__(self):

        self._perf_manager = PerfManager()
        self._info_manager = InfoManager()
        self._db_home = os.path.split(
            sys.executable)[0] + os.sep + "share" + os.sep + "alyvix"
        self._db_name = "alyvix_data"

        if self._info_manager.get_info("ROBOT CONTEXT") is True:

            self._db_home = os.path.dirname(
                os.path.abspath(self._info_manager.get_info("SUITE SOURCE")))

            self._db_name = self._info_manager.get_info("SUITE NAME")
            """
            if self._info_manager.get_info("TEST CASE NAME") is not None:
                self._db_name = self._db_name + "_" + self._info_manager.get_info("TEST CASE NAME").replace(" ", "_")
            """

        self._db_name = self._db_name + ".db"

        self._connection = None
        self._cursor = None
        self._db_is_new = False

    def connect(self):
        self._connection = sqlite3.connect(self._db_home + os.sep +
                                           self._db_name)
        self._connection.row_factory = sqlite3.Row
        self._cursor = self._connection.cursor()

    def close(self):
        self._connection.commit()
        self._connection.close()

    def _create_tables(self):
        self._create_runs_table()
        self._create_thresholds_table()
        self._create_sorting_table()

    def _create_runs_table(self):
        query = "CREATE TABLE runs (start_time integer primary key"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + " integer"
        query += ")"
        self._cursor.execute(query)

    def _create_thresholds_table(self):
        query = "CREATE TABLE thresholds (start_time integer primary key"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + "_warn integer, " + perf.name + "_crit integer"
        query += ")"
        self._cursor.execute(query)

    def _create_sorting_table(self):
        query = "CREATE TABLE sorting (start_time integer primary key"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + "_index integer"
        query += ")"
        self._cursor.execute(query)

    def _check_runs_columns(self):
        query = "PRAGMA table_info(runs);"
        rows = self._cursor.execute(query).fetchall()

        for perf in self._perf_manager.get_all_perfdata():

            perf_name_present = False

            for row in rows:

                if row[1] == perf.name:
                    perf_name_present = True
                    break

            #check and add new columns
            if perf_name_present is False:

                query = "ALTER TABLE runs ADD COLUMN " + perf.name + " integer;"
                self._cursor.execute(query)

    def _check_thresholds_columns(self):
        query = "PRAGMA table_info(thresholds);"
        rows = self._cursor.execute(query).fetchall()

        for perf in self._perf_manager.get_all_perfdata():

            perf_warn_present = False
            perf_crit_present = False

            for row in rows:

                if row[1] == perf.name + "_warn":
                    perf_warn_present = True

                if row[1] == perf.name + "_crit":
                    perf_crit_present = True

            #check and add new columns
            if perf_warn_present is False:

                query = "ALTER TABLE thresholds ADD COLUMN " + perf.name + "_warn integer;"
                self._cursor.execute(query)

            #check and add new columns
            if perf_crit_present is False:

                query = "ALTER TABLE thresholds ADD COLUMN " + perf.name + "_crit integer;"
                self._cursor.execute(query)

    def _check_sorting_columns(self):
        query = "PRAGMA table_info(sorting);"
        rows = self._cursor.execute(query).fetchall()

        for perf in self._perf_manager.get_all_perfdata():

            perf_name_present = False

            for row in rows:

                if row[1] == perf.name + "_index":
                    perf_name_present = True
                    break

            #check and add new columns
            if perf_name_present is False:

                query = "ALTER TABLE sorting ADD COLUMN " + perf.name + "_index integer;"
                self._cursor.execute(query)

    def _insert_runs(self):

        #check and add new columns
        self._check_runs_columns()

        start_time = self._info_manager.get_info("START TIME")
        query = "INSERT INTO runs (start_time"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name
        query = query + ") VALUES (" + str(start_time)

        for perf in self._perf_manager.get_all_perfdata():
            if perf.value is not None and perf.value != "":
                query = query + ", " + str(int(perf.value * 1000))
            else:
                query = query + ", null"

        query += ")"
        self._cursor.execute(query)

    def _insert_thresholds(self):

        #check and add new columns
        self._check_thresholds_columns()

        #get last row of thresholds table
        query = "select * from thresholds where start_time ORDER BY start_time DESC LIMIT 1"
        last_rows = self._cursor.execute(query).fetchone()

        total_null = 0

        #count null values
        if last_rows is not None:
            for row in last_rows:
                if row is None:
                    total_null += 1

        #get perfdata of current run
        current_perfdata = self._perf_manager.get_all_perfdata()

        for perf in current_perfdata:

            #if warning or critical are empty, then we dont have to compare warning or critical column
            if perf.warning_threshold == "" or perf.warning_threshold is None and total_null > 0:
                total_null -= 1

            if perf.critical_threshold == "" or perf.critical_threshold is None and total_null > 0:
                total_null -= 1

        if last_rows is not None:
            total_columns = len(last_rows) - 1
        else:
            total_columns = 0

        start_time = self._info_manager.get_info("START TIME")
        query = "INSERT INTO thresholds (start_time"

        for perf in current_perfdata:
            query = query + ", " + perf.name + "_warn, " + perf.name + "_crit"
        query = query + ") VALUES (" + str(start_time)

        different_from_last = self._db_is_new

        #check if perfdata items of current run are > (or <) than last row columns
        if len(current_perfdata) * 2 != (total_columns - total_null):
            different_from_last = True

        for perf in self._perf_manager.get_all_perfdata():

            if perf.warning_threshold is not None and perf.warning_threshold != "":
                query = query + ", " + str(int(perf.warning_threshold * 1000))
                if last_rows is not None and last_rows[
                        perf.name + "_warn"] != int(
                            perf.warning_threshold * 1000):
                    different_from_last = True
            else:
                query = query + ", null"
                if last_rows is not None and last_rows[perf.name +
                                                       "_warn"] is not None:
                    different_from_last = True

            if perf.critical_threshold is not None and perf.critical_threshold != "":
                query = query + ", " + str(int(perf.critical_threshold * 1000))
                if last_rows is not None and last_rows[
                        perf.name + "_crit"] != int(
                            perf.critical_threshold * 1000):
                    different_from_last = True
            else:
                query = query + ", null"
                if last_rows is not None and last_rows[perf.name +
                                                       "_crit"] is not None:
                    different_from_last = True

        if different_from_last is True:

            query = query + ")"
            self._cursor.execute(query)

    def _insert_sorting(self):

        #check and add new columns
        self._check_sorting_columns()

        #get last row of sorting table
        query = "select * from sorting where start_time ORDER BY start_time DESC LIMIT 1"
        last_rows = self._cursor.execute(query).fetchone()

        total_null = 0

        #count null values
        if last_rows is not None:
            for row in last_rows:
                if row is None:
                    total_null += 1

        if last_rows is not None:
            total_columns = len(last_rows) - 1
        else:
            total_columns = 0

        start_time = self._info_manager.get_info("START TIME")

        query = "INSERT INTO sorting (start_time"
        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + perf.name + "_index"
        query = query + ") VALUES (" + str(start_time)

        different_from_last = self._db_is_new

        #check if perfdata items of current run are > (or <) than last row columns
        if len(self._perf_manager.get_all_perfdata()) != (total_columns -
                                                          total_null):
            different_from_last = True

        for perf in self._perf_manager.get_all_perfdata():
            query = query + ", " + str(perf.counter)

            if last_rows is not None and last_rows[perf.name +
                                                   "_index"] != perf.counter:
                different_from_last = True

        if different_from_last is True:
            query = query + ")"
            self._cursor.execute(query)

    def store_perfdata(self, dbname=None):

        if dbname != None and dbname != "":
            self._db_home = os.path.split(dbname)[0]
            self._db_name = os.path.split(dbname)[1]

            self._info_manager.set_info("DB FILE",
                                        self._db_home + os.sep + self._db_name)

        #if not os.path.isfile(self._db_home + os.sep + self._db_name):
        if not os.path.isdir(self._db_home):
            os.makedirs(self._db_home)
            self.connect()
            self._create_tables()
            self._db_is_new = True
        elif not os.path.isfile(self._db_home + os.sep + self._db_name):
            self.connect()
            self._create_tables()
            self._db_is_new = True
        else:
            self.connect()

        self._insert_runs()
        self._insert_thresholds()
        self._insert_sorting()

        self.close()

    def publish_perfdata(self,
                         type="csv",
                         start_date=None,
                         end_date=None,
                         filename=None,
                         testcase_name=None,
                         max_age=24):

        if type == "perfmon":
            try:
                full_file_name = get_python_lib(
                ) + os.sep + "alyvix" + os.sep + "extra" + os.sep + "alyvixservice.ini"

                config = ConfigParser.ConfigParser()
                config.read(full_file_name)

                db_file = self._info_manager.get_info("DB FILE")

                if db_file is not None:

                    self._db_home = os.path.split(
                        self._info_manager.get_info("DB FILE"))[0]
                    self._db_name = os.path.split(
                        self._info_manager.get_info("DB FILE"))[1]

                if testcase_name is None or testcase_name == "":
                    testcase_name = self._info_manager.get_info(
                        'TEST CASE NAME')

                if testcase_name is None or testcase_name == "":
                    testcase_name = self._info_manager.get_info('SUITE NAME')

                try:
                    if not config.has_section('db_path'):
                        config.add_section('db_path')
                except:
                    pass

                try:
                    if not config.has_section('db_max_age'):
                        config.add_section('db_max_age')
                except:
                    pass

                try:
                    config.get('general', "polling_frequency")
                except:
                    try:
                        config.add_section('general')
                    except:
                        config.set('general', 'polling_frequency', '500')

                try:
                    config.get('general', "push_frequency")
                except:
                    try:
                        config.add_section('general')
                    except:
                        config.set('general', 'push_frequency', '2')

                config.set('db_path', testcase_name,
                           self._db_home + os.sep + self._db_name)

                config.set('db_max_age', testcase_name, str(max_age))

                with open(full_file_name, 'w') as configfile:
                    config.write(configfile)
            except:
                pass

        if type == "csv":

            start_date_dt = None
            end_date_dt = None

            for fmt in ('%Y-%m-%d', '%Y-%m-%d %H:%M', '%Y-%m-%d %H:%M:%S'):
                try:
                    start_date_dt = datetime.datetime.strptime(start_date, fmt)
                except ValueError:
                    pass

            for fmt in ('%Y-%m-%d', '%Y-%m-%d %H:%M', '%Y-%m-%d %H:%M:%S'):
                try:
                    end_date_dt = datetime.datetime.strptime(end_date, fmt)
                except ValueError:
                    pass

            if start_date_dt is None:
                raise Exception('invalid start date!')

            if end_date_dt is None:
                raise Exception('invalid end date!')

            if start_date_dt >= end_date_dt:
                raise Exception('end date must be greate than start date!')

            db_file = self._info_manager.get_info("DB FILE")

            if db_file is not None:

                self._db_home = os.path.split(
                    self._info_manager.get_info("DB FILE"))[0]
                self._db_name = os.path.split(
                    self._info_manager.get_info("DB FILE"))[1]

            csv_name = filename

            if csv_name is None or filename == "":
                csv_home = os.path.split(
                    sys.executable)[0] + os.sep + "share" + os.sep + "alyvix"
                csv_name = "alyvix_data"

                if self._info_manager.get_info("ROBOT CONTEXT") is True:

                    csv_home = os.path.dirname(
                        os.path.abspath(
                            self._info_manager.get_info("SUITE SOURCE")))

                    csv_name = self._info_manager.get_info("SUITE NAME")

                csv_name = csv_home + os.sep + csv_name + ".csv"

            csv_file = open(csv_name, 'w')
            csv_writer = csv.writer(csv_file)

            self.connect()

            #get last row of sorting table
            query = "select * from sorting ORDER BY start_time DESC LIMIT 1"
            last_sorting_rows = self._cursor.execute(query).fetchone()

            #print self._cursor.description

            perf_to_query = []

            for key in last_sorting_rows.keys():

                if key == "start_time":
                    continue

                value = last_sorting_rows[key]

                if value is not None:
                    perf_to_query.append(key.replace("_index", ""))

            query = "select datetime(start_time, 'unixepoch','localtime') as start_time"

            for column in perf_to_query:
                query = query + ", " + column

            query = query + " from runs where CAST(strftime('%s', datetime(start_time, 'unixepoch', 'localtime')) AS INT) between CAST(strftime('%s', '" + start_date + "') AS INT) and CAST(strftime('%s', '" + end_date + "') AS INT)"

            rows = self._cursor.execute(query).fetchall()

            csv_header = []
            csv_header.append("start_time")

            for perf_column in perf_to_query:
                csv_header.append(perf_column)

            csv_writer.writerow(csv_header)

            for row in rows:

                csv_row = []

                #start_date_dt = datetime.datetime.utcfromtimestamp(row["start_time"])

                #start_date_str = start_date_dt.strftime("%Y-%m-%d %H:%M:%S")

                csv_row.append(row["start_time"])

                for perf_column in perf_to_query:
                    csv_row.append(row[perf_column])

                csv_writer.writerow(csv_row)

            self.close()

            csv_file.close()
예제 #3
0
class ImageFinder(BaseFinder):
    def __init__(self, name=None):
        """
        init the class

        :type name: string
        :param name: the object name
        """

        #self._main_component = None
        #self._sub_components = []
        self.is_textfinder = False
        self.__threshold = 0.7
        self.__timed_out_sub_extra_images = []

        self._info_manager = InfoManager()
        self._overlapping_factor = self._info_manager.get_info(
            "OVERLAPPING TOLERANCE FACTOR")

        try:
            self._overlapping_factor = int(self._overlapping_factor)
        except:
            self._overlapping_factor = 10

        the_name = "template_finder"

        if name is not None:
            the_name = name

        super(ImageFinder, self).__init__(the_name)

    # BaseFinder.__init__(self, the_name)

    def set_main_component(self, template_dict, roi_dict=None):
        """
        Set the template image that the find method has to find into the source Image.

        :type main_template: numpy.ndarray
        :param main_template: image of the template
        """
        main_template = _Template(template_dict)

        if roi_dict is not None:
            roi = Roi(roi_dict)
        else:
            roi = None

        self._main_component = (main_template, roi)
        #print "main component:",self._main_component

    def add_sub_component(self, template_dict, roi_dict):
        """
        Add a template that the find method has to find if it has also find the main template.
        A roi (region of interest) will be cropped from the source image according to the parameters
        roi_x, roi_y, roi_width and roi_height. roi_x and roi_y are relative to the main template x,y coordinates.
        The find method will search the sub template only inside the roi area.

        :type template_image: numpy.ndarray
        :param template_image: image of the template
        :type roi_x: int
        :param roi_x: x-coordinate of roi
        :type roi_y: int
        :param roi_y: y-coordinate of roi
        :type roi_width: int
        :param roi_width: roi width
        :type roi_height: int
        :param roi_height: roi height
        """
        roi = Roi(roi_dict)
        sub_template = _Template(template_dict)
        self._sub_components.append((sub_template, roi))

    def _SetThreshold(self, threshold):
        """
        Set the threshold of template matching algorithm. Min value is 0.0, max value is 1.0
        threshold=1.0 means that the source image must contain an exact copy of the template image.
        threshold=0.0 means that the source image can even not contain the template (with the risk
        of false positives).
        The default value for the threshold is 0.7

        :type threshold: float
        :param threshold: threshold value for template matching algorithm
        """
        self.__threshold = threshold

    def find(self):
        """
        find the main template and sub templates into the source image.

        :rtype: list[(int, int, int, int)]
        :return: x, y, height, width of main template(s)
        """
        #tzero = time.time()
        time_before_find = time.time()
        try:

            #x = 1 / 0

            #print "main comp:",self._main_component

            self._timedout_main_components = []
            self._timedout_sub_components = []

            self._main_extra_img_log = None
            self._sub_extra_imgages_log = []

            source_img_auto_set = False

            self._objects_found = []

            if self._source_image_gray is None or self._source_image_color is None:
                screen_capture = ScreenManager()
                src_img_color = screen_capture.grab_desktop(
                    screen_capture.get_color_mat)
                self.set_source_image_color(src_img_color)
                src_img_gray = cv2.cvtColor(src_img_color, cv2.COLOR_BGR2GRAY)
                self.set_source_image_gray(src_img_gray)
                source_img_auto_set = True

            if str(self._info_manager.get_info('channel')).lower() != 'all':
                img_b, img_g, img_r = cv2.split(self._source_image_color)

                if str(self._info_manager.get_info('channel')).lower() == 'b':
                    self._source_image_color = cv2.cvtColor(
                        img_b, cv2.COLOR_GRAY2BGR)
                elif str(
                        self._info_manager.get_info('channel')).lower() == 'g':
                    self._source_image_color = cv2.cvtColor(
                        img_g, cv2.COLOR_GRAY2BGR)
                elif str(
                        self._info_manager.get_info('channel')).lower() == 'r':
                    self._source_image_color = cv2.cvtColor(
                        img_r, cv2.COLOR_GRAY2BGR)

                self._source_image_gray = cv2.cvtColor(
                    self._source_image_color, cv2.COLOR_BGR2GRAY)

            self.__find_log_folder = datetime.datetime.now().strftime(
                "%H_%M_%S") + "_" + "searching"

            offset_x = 0
            offset_y = 0

            main_template = self._main_component[0]
            #print "main templ:", main_template
            roi = self._main_component[1]

            if roi is not None:

                y1 = roi.y
                y2 = y1 + roi.height
                x1 = roi.x
                x2 = x1 + roi.width

                res = self._info_manager.get_info("RESOLUTION")

                y1 = roi.y
                y2 = y1 + roi.height

                x1 = roi.x
                x2 = x1 + roi.width

                if roi.unlimited_up is True:
                    y1 = 0
                    y2 = roi.y + roi.height

                if roi.unlimited_down is True:
                    y2 = res[1]

                if roi.unlimited_left is True:
                    x1 = 0
                    x2 = roi.x + roi.width

                if roi.unlimited_right is True:
                    x2 = res[0]

                offset_x = x1
                offset_y = y1

                source_img_height, source_img_width = self._source_image_gray.shape

                if y1 < 0:
                    y1 = 0
                elif y1 > source_img_height:
                    y1 = source_img_height

                if y2 < 0:
                    y2 = 0
                elif y2 > source_img_height:
                    y2 = source_img_height

                if x1 < 0:
                    x1 = 0
                elif x1 > source_img_width:
                    x1 = source_img_width

                if x2 < 0:
                    x2 = 0
                elif x2 > source_img_width:
                    x2 = source_img_width

                #print x1,x2,y1,y2
                source_image = self._source_image_gray[y1:y2, x1:x2]
            else:
                source_image = self._source_image_gray

            if self._log_manager.is_log_enable() is True:
                self._log_manager.save_image(self.__find_log_folder,
                                             "source_img.png", source_image)
                self._log_manager.save_image(self.__find_log_folder,
                                             "main_template.png",
                                             main_template.image_data)

            #self._timed_out_images.append(source_image.copy())

            objects_found = []
            analyzed_points = []
            self._objects_found = []

            w, h = main_template.image_data.shape[::-1]
            src_w, src_h = source_image.shape[::-1]
            tpl_w = w
            tpl_h = h

            if src_h < tpl_h or src_w < tpl_w:
                self._flag_thread_have_to_exit = False
                """
                self._flag_thread_started = False
                self._source_image_gray = None
                self._source_image_color = None
                """
                return []

            result = None

            res = cv2.matchTemplate(source_image, main_template.image_data,
                                    cv2.TM_CCOEFF_NORMED)
            #min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(resacascascsacas)

            res_norm = (res * 255).round().clip(min=0).astype(
                numpy.uint8
            )  #numpy.array(res * 255, dtype = numpy.float32) #(res * 255) #.round().astype(numpy.int8)
            res_norm = cv2.resize(
                res_norm, (source_image.shape[1], source_image.shape[0]),
                interpolation=cv2.INTER_CUBIC)

            #cv2.imwrite("c:\\log\\res_norm.png",res_norm)
            #cv2.imwrite("c:\\log\\res.png",res)

            #res_norm.resize(res_norm.shape[0], res_norm.shape[0], 3L, refcheck=False)

            if roi is not None:
                self._main_extra_img_log = (res_norm, (x1, y1, x2, y2))
            else:
                self._main_extra_img_log = (res_norm, None)

            loc = numpy.where(res >= main_template.threshold)

            cnt = 0
            for point in zip(*loc[::-1]):

                object_found = []
                object_found.append([])
                object_found.append([])
                self.__timed_out_sub_extra_images = []
                """
                if self._flag_thread_have_to_exit is True:
                    self._flag_thread_have_to_exit = False
                    self._flag_thread_started = False
                    self._source_image_gray = None
                    self._source_image_color = None
                    return []
                """

                x = offset_x + point[0]
                y = offset_y + point[1]

                is_already_found = False

                for point_already_analyzed in analyzed_points:

                    #tolerance_region_w = (tpl_w/2)  + (20 * self._scaling_factor)
                    #tolerance_region_h = (tpl_h/2) + (20 * self._scaling_factor)

                    tolerance_region_w = (tpl_w / 2) + (
                        self._overlapping_factor * self._scaling_factor)
                    tolerance_region_h = (tpl_h / 2) + (
                        self._overlapping_factor * self._scaling_factor)

                    if (x >= point_already_analyzed[0] - tolerance_region_w and
                                x <= point_already_analyzed[0] + tolerance_region_w) and\
                            (y >= point_already_analyzed[1] - tolerance_region_h and
                                     y <= point_already_analyzed[1] + tolerance_region_h):

                        is_already_found = True
                        #print point[0],point_already_analyzed[0],point[1],point_already_analyzed[1]

                if is_already_found == False:

                    #hist_blue = cv2.calcHist([self._source_image_color[y:y+h, x:x+w]], [0], None, [256], [0, 256])
                    #hist_green = cv2.calcHist([self._source_image_color[y:y+h, x:x+w]], [1], None, [256], [0, 256])
                    #hist_red = cv2.calcHist([self._source_image_color[y:y+h, x:x+w]], [2], None, [256], [0, 256])

                    hist_rgb = cv2.calcHist(
                        [self._source_image_color[y:y + h, x:x + w]],
                        [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])
                    hist_rgb = cv2.normalize(hist_rgb).flatten()

                    #comp_blue = cv2.compareHist(hist_blue,main_template.hist_blue,cv2.cv.CV_COMP_BHATTACHARYYA)
                    #comp_green = cv2.compareHist(hist_green, main_template.hist_green, cv2.cv.CV_COMP_BHATTACHARYYA)
                    #comp_red = cv2.compareHist(hist_red, main_template.hist_red, cv2.cv.CV_COMP_BHATTACHARYYA)
                    comp_rgb = cv2.compareHist(hist_rgb,
                                               main_template.hist_rgb,
                                               cv2.cv.CV_COMP_BHATTACHARYYA)

                    analyzed_points.append((x, y, w, h))

                    #if (comp_blue > 0.3 or comp_green > 0.3 or comp_red > 0.3) and main_template.match_colors is True:
                    if comp_rgb > 0.2 and main_template.match_colors is True:
                        continue

                    self._timedout_main_components.append(
                        MatchResult((x, y, w, h)))

                    #self._log_manager.set_main_object_points((x, y, w, h))
                    if self._log_manager.is_log_enable() is True:
                        img_copy = source_image.copy()
                        cv2.rectangle(img_copy,
                                      ((x - offset_x), (y - offset_y)),
                                      ((x - offset_x) + w, (y - offset_y) + h),
                                      (0, 0, 255), 2)
                        self._log_manager.save_image(self.__find_log_folder,
                                                     "object_found.png",
                                                     img_copy)

                    sub_templates_len = len(self._sub_components)

                    if sub_templates_len == 0:
                        main_object_result = MatchResult((x, y, w, h))
                        object_found[0] = main_object_result
                        object_found[1] = None
                        objects_found.append(object_found)
                    else:
                        #print sub_templates_len
                        total_sub_template_found = 0
                        sub_objects_found = []
                        timed_out_objects = []
                        timed_out_sub_extra_images = []
                        for sub_template in self._sub_components:
                            """
                            if self._flag_thread_have_to_exit is True:
                                self._flag_thread_have_to_exit = False
                                self._flag_thread_started = False
                                self._source_image_gray = None
                                self._source_image_color = None
                                return []
                            """

                            sub_template_coordinates = self._find_sub_template(
                                (x, y), sub_template)

                            if sub_template_coordinates is not None:
                                sub_objects_found.append(
                                    sub_template_coordinates)
                                total_sub_template_found = total_sub_template_found + 1
                                timed_out_objects.append(
                                    (sub_template_coordinates,
                                     sub_template[1]))
                            else:
                                timed_out_objects.append(
                                    (None, sub_template[1]))

                            #timed_out_sub_extra_images.append()

                            if total_sub_template_found == sub_templates_len:
                                #good_points.append((x, y, w, h))

                                main_object_result = MatchResult((x, y, w, h))
                                object_found[0] = main_object_result

                                object_found[1] = sub_objects_found

                                objects_found.append(object_found)
                        self._timedout_sub_components.append(timed_out_objects)
                        self._sub_extra_imgages_log.append(
                            self.__timed_out_sub_extra_images)
                    #self._log_manager.save_object_image("img_" + str(cnt) + ".png")
                cnt = cnt + 1

            if len(objects_found) > 0:
                self._objects_found = copy.deepcopy(objects_found)
                if self._is_object_finder is True:
                    self._objects_found_of_sub_object_finder.extend(
                        copy.deepcopy(objects_found))
                    #if wait_disappear is False:

                    #if self._info_manager.get_info('LOG OBJ IS FOUND') is False:
                    if self._info_manager.get_info(
                            'LOG OBJ FINDER TYPE') is None:
                        self._info_manager.set_info('LOG OBJ FINDER TYPE', 0)

                    self._log_manager.save_objects_found(
                        self._name,
                        self.get_source_image_gray(),
                        self._objects_found,
                        [x[1] for x in self._sub_components],
                        self.main_xy_coordinates,
                        self.sub_xy_coordinates,
                        finder_type=0)

                self._cacheManager.SetLastObjFoundFullImg(
                    self._source_image_gray)

            if source_img_auto_set is True:
                self._source_image_gray = None
                self._source_image_color = None
                source_img_auto_set = False
            """
            if self._flag_check_before_exit is True:
                self._flag_checked_before_exit = True
            """

            #time.sleep(40)

            self._flag_thread_started = False

            if self._calc_last_finder_time is True:
                self._last_finder_time = time.time() - time_before_find
                self._calc_last_finder_time = False

            #print time.time() - tzero

            return self._objects_found

        except Exception, err:
            self._log_manager.save_exception(
                "ERROR", "an exception has occurred: " + str(err) +
                " on line " + str(sys.exc_traceback.tb_lineno))
            self._flag_thread_started = False
            return []
예제 #4
0
class PerfManager:

    def __init__(self):
        self.performance_desc_string = ""
        self._info_manager = InfoManager()
        self._info_manager.tiny_update()

        self._last_filled_perf = None
        self.not_ok_perfdata = 0

    def clear_perfdata(self):
        global perfdata_list
        perfdata_list = []


    def add_perfdata(self, name, value=None, warning_threshold=None, critical_threshold=None, state=None, inittimestamp=False):

        global perfdata_list
        global perf_counter
        global declaration_counter

        declaration_counter = declaration_counter + 1

        perf_data = _PerfData()
        perf_data.name = str(name)

        try:
            perf_data.value = float(value)
            if perf_data.counter == -1:
                perf_data.counter = perf_counter
                perf_counter = perf_counter + 1
        except:
            perf_data.value = ""

        try:
            perf_data.warning_threshold = float(warning_threshold)
        except:
            perf_data.warning_threshold = ""

        try:
            perf_data.critical_threshold = float(critical_threshold)
        except:
             perf_data.critical_threshold = ""


        if state is not None:
            try:
                perf_data.state = int(state)
            except:
                perf_data.state = 2 #3
        else:
            try:
                perf_data.state = int(os.getenv("exitcode"))
            except:
                perf_data.state = 2 #3

        if perf_data.value != "" and perf_data.critical_threshold != "" and perf_data.value >= perf_data.critical_threshold:
            perf_data.state = 2
        elif perf_data.value != "" and perf_data.warning_threshold != "" and perf_data.value >= perf_data.warning_threshold:
            perf_data.state = 1
        elif perf_data.value != "":
            perf_data.state = 0

        initts = False

        if inittimestamp == True:
            initts = True

        try:
            if str(inittimestamp).lower() == "true":
                initts = True
        except:
            pass

        if  initts == True:
            perf_data.timestamp = int(time.time() * 1000)

            current_keyword_timestamp_array = self._info_manager.get_info('KEYWORD TIMESTAMP')

            # current_keyword_timestamp_array_copy = copy.deepcopy(current_keyword_timestamp_array)

            timestamp_modified = False
            for cnt_kts in xrange(len(current_keyword_timestamp_array)):
                if current_keyword_timestamp_array[cnt_kts][0] == name:
                    timestamp_modified = True
                    current_keyword_timestamp_array[cnt_kts] = (name, perf_data.timestamp)
                    break

            if timestamp_modified is False:
                current_keyword_timestamp_array.append((name, perf_data.timestamp))

            self._info_manager.set_info('KEYWORD TIMESTAMP',
                                        current_keyword_timestamp_array)

        else:
            perf_data.timestamp = None

        keywords_timestamp_array = self._info_manager.get_info('KEYWORD TIMESTAMP')

        for cnt_kts in xrange(len(keywords_timestamp_array)):
            if keywords_timestamp_array[cnt_kts][0] == name:
                perf_data.timestamp = keywords_timestamp_array[cnt_kts][1]
                break

        perf_data.timeout_threshold = None

        keywords_timeout_array = self._info_manager.get_info('KEYWORD TIMEOUT')

        for cnt_ktout in xrange(len(keywords_timeout_array)):
            if keywords_timeout_array[cnt_ktout][0] == name:
                perf_data.timeout_threshold = keywords_timeout_array[cnt_ktout][1]
                break

        cnt = 0

        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == perf_data.name:
                perf_data.custom_tags = perfdata_list[cnt].custom_tags
                perf_data.custom_fields = perfdata_list[cnt].custom_fields

                perfdata_list[cnt] = perf_data
                return

            cnt = cnt + 1

        perfdata_list.append(perf_data)

    def rename_perfdata(self, old_name, new_name, warning_threshold="", critical_threshold=""):

        global perfdata_list

        perfdata_list_copy = copy.deepcopy(perfdata_list)

        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == str(old_name):
                if perf_data_in_list.value == "":
                    raise Exception("The Keyword value is None")

        keywords_timestamp_array = copy.deepcopy(self._info_manager.get_info('KEYWORD TIMESTAMP'))

        cnt = 0

        for cnt_kts in xrange(len(keywords_timestamp_array)):
            if keywords_timestamp_array[cnt_kts][0] == str(new_name):
                del keywords_timestamp_array[cnt_kts]

        old_name_exists = False
        for perf_data_in_list in perfdata_list:

            for cnt_kts in xrange(len(keywords_timestamp_array)):
                if keywords_timestamp_array[cnt_kts][0] == str(old_name):
                    keywords_timestamp_array[cnt_kts] = (new_name, keywords_timestamp_array[cnt_kts][1])

            self._info_manager.set_info('KEYWORD TIMESTAMP', keywords_timestamp_array)

            if perf_data_in_list.name == str(new_name):
                deleted_on_rename_list.append(copy.deepcopy(perfdata_list_copy[cnt]))
                del perfdata_list_copy[cnt]
                cnt = cnt - 1
            elif perf_data_in_list.name == str(old_name):

                old_name_exists = True

                perfdata_list_copy[cnt].name = str(new_name)

                try:
                    new_warning_threshold = float(warning_threshold)
                    perfdata_list_copy[cnt].warning_threshold = new_warning_threshold
                except:
                    pass

                try:
                    new_critical_threshold = float(critical_threshold)
                    perfdata_list_copy[cnt].critical_threshold = new_critical_threshold
                except:
                    pass

            cnt = cnt + 1

        if old_name_exists is False:
            raise Exception("The keyword name does not exist")

        perfdata_list = copy.deepcopy(perfdata_list_copy)



        cnt = 0
        scraper_list_copy = copy.deepcopy(self._info_manager.get_info('SCRAPER COLLECTION'))

        for sc_in_list in self._info_manager.get_info('SCRAPER COLLECTION'):

            name = sc_in_list[0]

            if name == old_name:
                scraper_list_copy[cnt] = (new_name, sc_in_list[1], sc_in_list[2])

            cnt += 1

        self._info_manager.set_info('SCRAPER COLLECTION', copy.deepcopy(scraper_list_copy))


    def set_perfdata_extra(self, name, extra):

        global perfdata_list

        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == str(name):
                perf_data_in_list.extra = str(extra)

    def add_perfdata_tag(self, perf_name, tag_name, tag_value):

        global perfdata_list

        keyword_exist = False

        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == str(perf_name) or str(perf_name) == "all":
                perf_data_in_list.custom_tags[str(tag_name)] = str(tag_value)
                keyword_exist = True

        if keyword_exist is False:
            raise Exception("The keyword name does not exist")

    def add_perfdata_field(self, perf_name, field_name, field_value):

        global perfdata_list

        keyword_exist = False

        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == str(perf_name) or str(perf_name) == "all":
                perf_data_in_list.custom_fields[str(field_name)] = str(field_value)
                keyword_exist = True

        if keyword_exist is False:
            raise Exception("The keyword name does not exist")

    def get_perfdata(self, name, delete_perfdata=False):

        keyword_exist = False

        global perfdata_list
        ret_val = None
        perfdata_list_copy = copy.deepcopy(perfdata_list)

        cnt = 0
        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == name:

                keyword_exist = True

                if perf_data_in_list.value == "" or perf_data_in_list.value is None:
                    raise Exception('The performance measure is None')

                if delete_perfdata is True:
                    del perfdata_list_copy[cnt]

                ret_val = perf_data_in_list.value
            cnt = cnt + 1

        perfdata_list = copy.deepcopy(perfdata_list_copy)
        perfdata_list_copy = []

        if keyword_exist is False:
            raise Exception("The keyword name does not exist")

        return ret_val

    def get_all_perfdata(self):
        global perfdata_list
        return copy.deepcopy(perfdata_list)

    def delete_perfdata(self, name):

        global perfdata_list

        perfdata_list_copy = copy.deepcopy(perfdata_list)

        cnt = 0
        for perf_data_in_list in perfdata_list:
            if perf_data_in_list.name == name:

                del perfdata_list_copy[cnt]

            cnt = cnt + 1

        perfdata_list = copy.deepcopy(perfdata_list_copy)
        perfdata_list_copy = []

    def sum_perfdata(self, *names, **kwargs):

        global perfdata_list

        sum = None

        value_to_sum = []
        index_to_delete = []
        perfdata_list_copy = []

        delete_perf = False
        perf_name = ""
        warning_threshold  = None
        critical_threshold = None

        try:
            delete_perf = kwargs['delete_perfdata']
        except:
            pass

        try:
            perf_name = kwargs['name']
        except:
            pass

        try:
            warning_threshold = float(kwargs['warning_threshold'])
        except:
            pass

        try:
            critical_threshold = float(kwargs['critical_threshold'])
        except:
            pass

        biggest_timestamp = 0
        smallest_timestamp = 10413792000000 #2300/01/01
        value_of_last_perf = None
        timeout_of_last_perf = None

        all_name_exist = True




        for name in names:
            name_exists = False
            for perf_data_in_list in perfdata_list:
                if perf_data_in_list.name == name:
                    name_exists = True

            if name_exists is False:
                raise Exception("The keyword name (" + name + ") does not exist")

        cnt = 0
        for perf_data_in_list in perfdata_list:

            for name in names:

                if perf_data_in_list.name == name and perf_data_in_list.value != ""\
                        and perf_data_in_list.value is not None:
                    value_to_sum.append(perf_data_in_list.value)
                    sum = 0 #init sum

                    if perf_data_in_list.timestamp is None:
                        raise Exception(name + ": The performance timestamp is None")

                    if perf_data_in_list.timestamp < smallest_timestamp:
                        smallest_timestamp = perf_data_in_list.timestamp

                    if perf_data_in_list.timestamp > biggest_timestamp:
                        biggest_timestamp = perf_data_in_list.timestamp
                        value_of_last_perf = perf_data_in_list.value
                        timeout_of_last_perf = perf_data_in_list.timeout_threshold

                    if delete_perf is True:
                        index_to_delete.append(cnt)

                elif perf_data_in_list.name == name and (perf_data_in_list.value == ""\
                        or perf_data_in_list.value is None):
                    raise Exception("The performance measure (" + name + ") is None")

            cnt = cnt + 1

        cnt = 0
        for perf in perfdata_list:

            if cnt not in index_to_delete:
                perfdata_list_copy.append(perf)

            cnt = cnt + 1

        perfdata_list = copy.deepcopy(perfdata_list_copy)
        perfdata_list_copy = []

        for perf in value_to_sum:

            sum = sum + perf

        if perf_name != "":

            self.add_perfdata(perf_name, sum, warning_threshold, critical_threshold)

            for perf in perfdata_list:
                if perf.name == perf_name:

                    perf.timestamp = smallest_timestamp

                    current_keyword_timestamp_array = self._info_manager.get_info('KEYWORD TIMESTAMP')

                    # current_keyword_timestamp_array_copy = copy.deepcopy(current_keyword_timestamp_array)

                    timestamp_modified = False
                    for cnt_kts in xrange(len(current_keyword_timestamp_array)):
                        if current_keyword_timestamp_array[cnt_kts][0] == perf_name:
                            timestamp_modified = True
                            current_keyword_timestamp_array[cnt_kts] = (perf_name, smallest_timestamp)
                            break

                    if timestamp_modified is False:
                        current_keyword_timestamp_array.append((perf_name, smallest_timestamp))

                    self._info_manager.set_info('KEYWORD TIMESTAMP',
                                                current_keyword_timestamp_array)

                    try:
                        end_timestamp_only_for_summed_perf = (float(biggest_timestamp)/1000) + value_of_last_perf
                        perf.end_timestamp_only_for_summed_perf = int(end_timestamp_only_for_summed_perf*1000)
                    except:
                        try:
                            end_timestamp_only_for_summed_perf = (float(biggest_timestamp)/1000) + timeout_of_last_perf
                            perf.end_timestamp_only_for_summed_perf = int(end_timestamp_only_for_summed_perf*1000)
                        except:
                            perf.end_timestamp_only_for_summed_perf = biggest_timestamp

        return sum

    def get_last_filled(self):
        #self.order_perfdata()
        return self._last_filled_perf

    def order_perfdata(self):
        global perfdata_list
        perfdata_ok_list = []
        perfdata_notok_list = []

        for perf_data_in_list in perfdata_list:

            if perf_data_in_list.counter != -1:
                perfdata_ok_list.append(copy.deepcopy(perf_data_in_list))
            else:
                perfdata_notok_list.append(copy.deepcopy(perf_data_in_list))
                self.not_ok_perfdata = self.not_ok_perfdata + 1

        perfdata_ok_list.sort(key=lambda x: x.counter, reverse=False)

        if len(perfdata_ok_list) > 0:
            self._last_filled_perf = perfdata_ok_list[-1].name

        perfdata_list = []
        perfdata_list = perfdata_ok_list + perfdata_notok_list

    def get_perfdata_string(self):

        global perfdata_list

        ret_string = ""
        cnt = 0
        for perfdata in perfdata_list:

            name = perfdata.name

            if perfdata.value == '' or perfdata.value is None:
                value = ''
            else:
                value = ("%.3f" % perfdata.value)

            if perfdata.warning_threshold == '' or perfdata.warning_threshold is None:
                warning = ''
            else:
                warning = ("%.3f" % perfdata.warning_threshold)

            if perfdata.critical_threshold == '' or perfdata.critical_threshold is None:
                critical = ''
            else:
                critical = ("%.3f" % perfdata.critical_threshold)

            if cnt == 0:
                ret_string = ret_string + name + "=" + value + "s;" + warning + ";" + critical + ";;"
            else:
                ret_string = ret_string + " " + name + "=" + value + "s;" + warning + ";" + critical + ";;"

            cnt = cnt + 1

        return ret_string.replace("=s;;;;","=;;;;")

    def get_output(self, message=None, print_output=True):

        prefix_robot_framework = ""

        global perf_counter
        global declaration_counter
        global perfdata_list
        global timedout_finders

        self.order_perfdata()

        exitcode = self.get_exitcode()
        performanceData = self.get_perfdata_string()

        if performanceData is not "":
            performanceData = "|" + performanceData
        else:
            performanceData = ""

        if self._info_manager.get_info("RESOLUTION BGS OK") is False:
            self.performance_desc_string = self.performance_desc_string + \
               "Alyvix Background Service is installed but the screen resolution doesn't match with the config file"\
               + performanceData + os.linesep

        elif message is not None:
            self.performance_desc_string = self.performance_desc_string + message + performanceData + os.linesep
        elif exitcode == 3 and self.not_ok_perfdata == len(perfdata_list):
            self.performance_desc_string = self.performance_desc_string + \
                                           "UNKNOWN: some error occurred, no measure was taken" + \
                                           performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 3 and self.not_ok_perfdata > 0:
            self.performance_desc_string = self.performance_desc_string + \
                                           "UNKNOWN: one transaction breaks the test case, the last received measure is " + \
                                           self._last_filled_perf + " " + performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 2 and self.not_ok_perfdata == len(perfdata_list):
            self.performance_desc_string = self.performance_desc_string + \
                                           "CRITICAL: some error occurred, no measure was taken" + \
                                           performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 2 and self.not_ok_perfdata > 0:
            self.performance_desc_string = self.performance_desc_string +\
                                               "CRITICAL: one transaction breaks the test case, the last received measure is " +\
                                               self._last_filled_perf + " " + performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 2:
            self.performance_desc_string = self.performance_desc_string +\
                                           "CRITICAL: one or more transactions run critical" +\
                                           performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 1 and self.not_ok_perfdata == len(perfdata_list):
            self.performance_desc_string = self.performance_desc_string + \
                                           "WARNING: some error occurred, no measure was taken" + \
                                           performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 1 and self.not_ok_perfdata > 0:
            self.performance_desc_string = self.performance_desc_string +\
                                               "WARNING: one transaction breaks the test case, the last received measure is " +\
                                               self._last_filled_perf + " " + performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 1:
            self.performance_desc_string = self.performance_desc_string +\
                                           "WARNING: one or more transactions run in warning" +\
                                           performanceData + os.linesep
            prefix_robot_framework = "*WARN*"
        elif exitcode == 0 and self.not_ok_perfdata == len(perfdata_list):
            self.performance_desc_string = self.performance_desc_string + \
                                           "Ok: some error occurred, no measure was taken" + \
                                           performanceData + os.linesep
        elif exitcode == 0 and self.not_ok_perfdata > 0:
            self.performance_desc_string = self.performance_desc_string +\
                                               "OK: one transaction breaks the test case, the last received measure is " +\
                                               self._last_filled_perf + " " + performanceData + os.linesep
        else:
            self.performance_desc_string = self.performance_desc_string +\
                                           "OK: all transactions run ok" +\
                                           performanceData + os.linesep

        if declaration_counter == 0 and perf_counter == 0:
            self.performance_desc_string = self.performance_desc_string.replace("some error occurred, no measure was taken",
                                                                                "no transaction is declared")

        for perfdata in perfdata_list:

            name = perfdata.name

            if perfdata.value != "" and perfdata.critical_threshold != "" and perfdata.value >= perfdata.critical_threshold:
                perfdata.state = 2
            elif perfdata.value != "" and perfdata.warning_threshold != "" and perfdata.value >= perfdata.warning_threshold:
                perfdata.state = 1
            elif perfdata.value != "":
                perfdata.state = 0

            state = perfdata.state

            if perfdata.value == '' or perfdata.value is None:
                value = perfdata.value
            else:
                value = ("%.3f" % perfdata.value)


            if state == 0 and value == "":
                self.performance_desc_string = self.performance_desc_string +\
                                               "OK: " + name + " measures None" + os.linesep
            elif state == 0:
                self.performance_desc_string = self.performance_desc_string +\
                                               "OK: " + name + " measures " + value + "s" + os.linesep
            elif state == 1 and value == "":
                self.performance_desc_string = self.performance_desc_string +\
                                               "WARNING: " + name + " measures None" + os.linesep
            elif state == 1:
                self.performance_desc_string = self.performance_desc_string +\
                                               "WARNING: " + name + " measures " + value + "s" + os.linesep
            elif state == 2 and value == "":
                self.performance_desc_string = self.performance_desc_string +\
                                               "CRITICAL: " + name + " measures None" + os.linesep
            elif state == 2:
                self.performance_desc_string = self.performance_desc_string + \
                                               "CRITICAL: " + name + " measures " + value + "s" + \
                                               os.linesep
            else:
                self.performance_desc_string = self.performance_desc_string +\
                                                   "UNKNOWN: " + name + " time is null." + os.linesep


        if self._info_manager.get_info("ROBOT CONTEXT") is True:

            suite_source = self._info_manager.get_info("SUITE SOURCE")

            file_name = suite_source.split(os.sep)[-1].split(".")[0]

            result_dir = tempfile.gettempdir() + os.sep + "alyvix_pybot" + os.sep + file_name + os.sep + "result"
        else:
            result_dir = "."

        if not os.path.exists(result_dir):
            os.makedirs(result_dir)

        text_file = open(result_dir + os.sep + "message.txt", "w")
        text_file.write(self.performance_desc_string)
        text_file.close()

        text_file = open(result_dir + os.sep + "exitcode.txt", "w")
        text_file.write(str(exitcode))
        text_file.close()

        if print_output is True:
            print prefix_robot_framework + self.performance_desc_string
        return exitcode

    def get_exitcode(self):

        global perfdata_list

        exitcode = 0

        not_ok_exitcode = None

        for perfdata in perfdata_list:

            if perfdata.value is None or perfdata.value == "":
                if not_ok_exitcode is None:
                    not_ok_exitcode = perfdata.state
                elif perfdata.state > not_ok_exitcode:
                    not_ok_exitcode = perfdata.state


        if len(perfdata_list) == 0 and len(deleted_on_rename_list) != 0:
            perfdata_list = copy.deepcopy(deleted_on_rename_list)

        for perfdata in perfdata_list:

            if perfdata.value is None or perfdata.value == "":
                if perfdata.state > exitcode:
                    exitcode = perfdata.state
            if perfdata.critical_threshold is not None and perfdata.critical_threshold != "":
                if perfdata.value >= int(perfdata.critical_threshold):
                    if 2 > exitcode:
                        exitcode = 2
            if perfdata.warning_threshold is not None and perfdata.warning_threshold != "":
                if perfdata.value >= int(perfdata.warning_threshold):
                    if 1 > exitcode:
                        exitcode = 1

            """
            state = perfdata.state

            if state == 0 and self.not_ok_perfdata == 0:
                #we are in the init step
                if exitcode == 2: #3
                    exitcode = 0
            elif state == 1 or state == 2:
                if exitcode == 2: #3
                    exitcode = state
                elif state > exitcode:
                    exitcode = state

            if exitcode == 2:
                break
            """

        if not_ok_exitcode != None and self.not_ok_perfdata > 0:
            exitcode = not_ok_exitcode

        if self.not_ok_perfdata > 0:
            try:
                exitcode = int(os.getenv("exitcode"))
            except:
                pass

        return exitcode
예제 #5
0
# Alyvix allows you to automate and monitor all types of applications
# Copyright (C) 2015 Alan Pipitone
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
# Developer: Alan Pipitone (Violet Atom) - http://www.violetatom.com/
# Supporter: Wuerth Phoenix - http://www.wuerth-phoenix.com/
# Official website: http://www.alyvix.com/


from alyvix.tools.info import InfoManager

#update all info
info_manager = InfoManager()
info_manager.update()

info_manager.set_info('OVERLAPPING TOLERANCE FACTOR', 10)

info_manager.set_info('ACTIONS DELAY', 2)
예제 #6
0
# Alyvix allows you to automate and monitor all types of applications
# Copyright (C) 2015 Alan Pipitone
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
# Developer: Alan Pipitone (Violet Atom) - http://www.violetatom.com/
# Supporter: Wuerth Phoenix - http://www.wuerth-phoenix.com/
# Official website: http://www.alyvix.com/

from alyvix.tools.info import InfoManager

#update all info
info_manager = InfoManager()
info_manager.update()

info_manager.set_info('OVERLAPPING TOLERANCE FACTOR', 10)

info_manager.set_info('ACTIONS DELAY', 2)