class ProcessManager(object):
    r"""
    valf internal class to provide essential processing for observers

    - initialize

        - start logger
        - initialize data_manager
        - search classes based on class BaseComponentInterface

    - load configuration

        - import declared observer modules
        - set data ports

    - run validation

        - call all methods of all observers sequentially
        - use bpl_reader or similar to run through all recordings

    This class also is responsible to read out configuration and interpretation from config file.

    general used ports on bus ``Global``:

        - set "ConfigFileVersions"
            dict with file name as key and version as value for each loaded config file
        - read "FileCount"
            to show progress bar
        - read "IsFinished"
            to continue with next state when all sections of a recording are validated (set by `SignalExtractor`)

    Also setting ports as defined in ``InputData``  for the named bus.

    """
    def __init__(self, plugin_dir, fail_on_error=False):
        """init essencials

        :param plugin_dir: path or list of paths where to start search for observers
        :type plugin_dir:  string or list of strings

        :param fail_on_error: flag to break immediately if an exception is found
        :type fail_on_error:  boolean
        """
        self._logger = Logger(self.__class__.__name__)
        self._logger.debug()

        self._component_list = []

        self._version = "$Revision: 1.11 $"

        self._progressbar = None
        self._file_count = 0
        self._object_map_list = []
        self._config_file_loaded = False
        self._fail_on_error = fail_on_error
        self._configfiles = []  # used as stack to load configs recursively
        self._config_file_versions = {}

        self._uncrepl = UncRepl()

        plugin_dir.extend([
            self._uncrepl(dir_) for dir_ in OBS_DIRS if dir_ not in plugin_dir
        ])

        self._logger.info("Searching for plug-ins. Please wait...")
        class_map_list, self._plugin_error_list = find_class(
            bci, plugin_dir, with_error_list=True)
        if class_map_list is None:
            self._logger.error("No plug-ins found.")
            return

        self._logger.debug("%d plug-ins found: %s." %
                           (len(class_map_list), ", ".join(
                               [i['name'] for i in class_map_list])))
        self._plugin_map = {
            plugin['name']: plugin["type"]
            for plugin in class_map_list
        }

        # Create data manager object
        try:
            self._data_manager = DataManager()
        except:
            self._logger.exception("Couldn't instantiate 'DataManager' class.")
            if self._fail_on_error:
                raise
            sexit(bci.RET_VAL_ERROR)

    def _initialize(self):
        """calls initialize and post_initialize of ordered observers
        """
        self._logger.debug()

        # Calls Initialize for each component in the list
        for component in self._component_list:
            try:
                if component.Initialize() != bci.RET_VAL_OK:
                    self._logger.error(
                        "Class '%s' returned with error from Initialize() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except:
                self._logger.exception(
                    'EXCEPTION during Initialize of %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        # Calls PostInitialize for each component in the list
        for component in self._component_list:
            try:
                if component.PostInitialize() != bci.RET_VAL_OK:
                    self._logger.error(
                        "Class '%s' returned with error from PostInitialize() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except:
                self._logger.exception(
                    'EXCEPTION during PostInitialize of %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        self._file_count = self.get_data_port("FileCount")
        if self._file_count > 0:
            self._progressbar = ProgressBar(0,
                                            self._file_count,
                                            multiline=True)
        else:
            self._file_count = 0

        self._logger.debug("all components ready to run!")
        self._logger.mem_usage()
        return bci.RET_VAL_OK

    def _process_data(self):
        """calls load_data, process_data as well as post_process_data of ordered observers
        """
        self._logger.debug()

        if self._file_count == 0:
            self._logger.debug(
                str(_getframe().f_code.co_name) + "No files to process.")
            return RET_VAL_OK

        ret = bci.RET_VAL_ERROR
        counter = 0

        while not self.get_data_port("IsFinished"):
            # update progressbar position
            self._progressbar(counter)

            counter += 1

            # Calls LoadData for each component in the list
            for component in self._component_list:
                try:
                    ret = component.LoadData()
                    if ret is bci.RET_VAL_ERROR:
                        self._logger.error(
                            "Class '%s' returned with error from LoadData() method, "
                            "continue with next sim file." %
                            component.__class__.__name__)
                        break
                except:
                    self._logger.exception(
                        'exception raised during LoadData of %s:\n%s, '
                        'continue with next sim file.' %
                        (component.__class__.__name__, format_exc()))
                    ret = bci.RET_VAL_ERROR
                    if self._fail_on_error:
                        raise
                    break

            if ret is bci.RET_VAL_ERROR:
                continue

            # Calls ProcessData for each component in the list
            for component in self._component_list:
                try:
                    ret = component.ProcessData()
                    if ret is bci.RET_VAL_ERROR:
                        self._logger.error(
                            "Class '%s' returned with error from ProcessData() method, "
                            "continue with next sim file." %
                            component.__class__.__name__)
                        break
                except:
                    self._logger.exception(
                        'EXCEPTION during ProcessData of %s:\n%s, '
                        'continue with next sim file.' %
                        (component.__class__.__name__, format_exc()))
                    ret = bci.RET_VAL_ERROR
                    if self._fail_on_error:
                        raise
                    break

            if ret is bci.RET_VAL_ERROR:
                continue

            # Calls PostProcessData for each component in the list
            for component in self._component_list:
                try:
                    ret = component.PostProcessData()
                    if ret is bci.RET_VAL_ERROR:
                        self._logger.error(
                            "Class '%s' returned with error from PostProcessData() method, "
                            "continue with next sim file." %
                            component.__class__.__name__)
                        break
                except:
                    self._logger.exception(
                        'EXCEPTION during PostProcessData of %s:\n%s, '
                        'continue with next sim file.' %
                        (component.__class__.__name__, format_exc()))
                    ret = bci.RET_VAL_ERROR
                    if self._fail_on_error:
                        raise
                    break

            if ret is bci.RET_VAL_ERROR:
                continue

            # we have processed correctly at least a file,
            # set _process_data return value to OK in order to finish it's process

            self._logger.mem_usage()
            ret = bci.RET_VAL_OK

        if counter > 0:
            self._progressbar(counter)

        return ret

    def _terminate(self):
        """calls pre_terminate and terminate of ordered observers
        """
        self._logger.debug()

        # Calls PreTerminate for each component in the list
        for component in self._component_list:
            try:
                if component.PreTerminate() != bci.RET_VAL_OK:
                    self._logger.error(
                        "Class '%s' returned with error from PreTerminate() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except Exception:
                self._logger.exception(
                    'EXCEPTION during PreTerminate of observer %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        # Calls Terminate for each component in the list
        for component in self._component_list:
            try:
                if component.Terminate() != bci.RET_VAL_OK:
                    self._logger.exception(
                        "Class '%s' returned with error from Terminate() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except:
                self._logger.exception(
                    'EXCEPTION during Terminate of observer %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        return bci.RET_VAL_OK

    def get_data_port(self, port_name, bus_name="Global"):
        """gets data from a bus/port

        :param port_name: port name to use
        :param bus_name: bus name to use
        :return: data from bus/port
        """
        return self._data_manager.get_data_port(port_name, bus_name)

    def set_data_port(self, port_name, port_value, bus_name="Global"):
        """sets data to a bus/port

        :param port_name: port name to use
        :param port_value: data value to be set
        :param bus_name: bus name to use
        :return: data from bus/port
        """
        self._data_manager.set_data_port(port_name, port_value, bus_name)

    def _get_err_trace(self):
        """returns error trace from error list
        """
        if self._plugin_error_list:
            err_trace = '\n'.join('++ file: {0}.py -- {1}\n'.format(
                e[0], e[1].replace('\n', '\n--> '))
                                  for e in self._plugin_error_list)
        else:
            err_trace = 'no detailed info about failure'

        return err_trace

    def load_configuration(self, configfile):
        """loads configuration from cfg-file

        see more details in `Valf.LoadConfig`

        :param configfile: path/to/file.cfg
        :return: success (bool)
        """
        configfile = self._uncrepl(configfile)
        cls_obj = None

        if not opath.exists(configfile):
            raise ValfError(
                "Configuration file '%s' doesn't exist or is invalid." %
                configfile)
            # self._logger.error("Configuration file '%s' doesn't exist or is invalid." % configfile)
            # return False

        self.set_data_port(CFG_FILE_VERSION_PORT_NAME,
                           self._config_file_versions)
        autoorder = [-1]
        component_map = self._read_config(configfile)
        self._logger.info(
            "loading version: '%s' of config file '%s'" %
            (self._config_file_versions.get(configfile, ""), configfile))
        for componentname in component_map:
            try:  # retrieve details
                class_name = eval(component_map[componentname].get(
                    "ClassName", "None"))
                # port_in_list = component_map[componentname].get("PortIn")
                port_out_list = eval(component_map[componentname].get(
                    "PortOut", "[]"))
                input_data_list = eval(component_map[componentname].get(
                    "InputData", "[]"))
                connect_bus_list = eval(component_map[componentname].get(
                    "ConnectBus", "Bus#1"))
                order = component_map[componentname].get(
                    "Order",
                    max(autoorder) + 1)
                if order in autoorder:
                    self._logger.info(
                        "order %d for component %s already in use!" %
                        (order, componentname))
                autoorder.append(order)
                # check them, they should be there all!
                if (componentname != "Global" and
                    (class_name is None or port_out_list is None
                     or input_data_list is None or connect_bus_list is None)):
                    msg = "Invalid port value or syntax wrong on component: '%s' with parsed settings\n" \
                          "ClassName: %s, PortOut: %s,\n" \
                          "InputData: %s, \n" \
                          "ConnectBus: %s\n"\
                          "  only ClassName for 'Global' can be None, compare parsed settings with defines in config." \
                          % (componentname, class_name, port_out_list, input_data_list, connect_bus_list)
                    raise ValueError(msg)
            except Exception, err:
                self._logger.error(err)
                if self._fail_on_error:
                    raise
                continue

            if type(connect_bus_list) not in (list, tuple):
                connect_bus_list = [connect_bus_list]

            if class_name in self._plugin_map:
                # Observer can be loaded -> Everything fine.
                # self._logger.debug("Loading plug-in: '%s'." % componentname)
                cls_obj = self._plugin_map[class_name](self._data_manager,
                                                       componentname,
                                                       connect_bus_list)
            elif componentname != "Global":
                # Observer can NOT be loaded -> Create Log Entry and raise Exception !
                err_trace = self._get_err_trace()

                # Create Log Entry
                self._logger.error('some python modules have coding errors')
                self._logger.error(
                    'Please check following list for more details:')
                self._logger.error(err_trace)

                msg = "Observer with ClassName %s not found, please check log for more info!" % class_name
                self._logger.error(msg)
                self._logger.error("File: \"valf.log\"")
                raise ValfError(msg, ValfError.ERR_OBSERVER_CLASS_NOT_FOUND)

            for port_out in port_out_list:
                for bus_name in connect_bus_list:
                    tmp = "Register port: Provider="
                    tmp += "'%s', PortName='%s', Bus='%s'." % (
                        componentname, port_out, bus_name)
                    self._logger.debug(tmp)
                    self.set_data_port(port_out, None, bus_name)

            if type(input_data_list) == list:  # do it the usual way
                for input_data in input_data_list:
                    param_name = input_data[0]
                    param_value = input_data[1]
                    for bus_name in connect_bus_list:
                        tmp = "Setting input data.[Component='%s', " % componentname
                        tmp += "Bus='%s', PortName='%s', " % (bus_name,
                                                              param_name)
                        tmp += "PortValue=%s]" % str(param_value)
                        self._logger.debug(tmp)
                        self.set_data_port(param_name, param_value, bus_name)
            elif type(input_data_list
                      ) == dict:  # we've got key value pairs already
                for param_name, param_value in input_data_list.iteritems():
                    for bus_name in connect_bus_list:
                        tmp = "Setting input data.[Component='%s', " % componentname
                        tmp += "Bus='%s', PortName='%s', " % (bus_name,
                                                              param_name)
                        tmp += "PortValue=%s]" % str(param_value)
                        self._logger.debug(tmp)
                        self.set_data_port(param_name, param_value, bus_name)

            if componentname != "Global":
                self._object_map_list.append({
                    "Order": order,
                    "ComponentName": componentname,
                    "ClsObj": cls_obj
                })

        # If whole Observer loading is done successfully,
        # we write anyway all found coding errors into the Log File as warnings
        if self._plugin_error_list:
            err_trace = self._get_err_trace()
            self._logger.warning('some python modules have coding errors')
            self._logger.warning(
                'Please check following list for more details:')
            self._logger.warning(err_trace)

        self._component_list = []
        if len(self._object_map_list):
            self._object_map_list.sort(key=lambda x: x["Order"])

            for object_map in self._object_map_list:
                self._component_list.append(object_map["ClsObj"])

        if not self._component_list:
            self._logger.error(
                "No component loaded. Please check config file '%s'." %
                str(configfile))
            return False

        self._config_file_loaded = True

        return True
class PluginManager(object):
    """
    class to search for pluging classes based on 'BaseComponentInterface'
    to be used as observer components

    can check for dublicated class names to throw an error if it finds one
    """
    def __init__(self, folder_path_list, cls):
        """
        initialise a new object, adds existing folders of folder_path_list to sys.path

        :param folder_path_list: list [] of folders to check recursively
        :param cls: base class of which to find subclasses
        """
        self._uncrepl = UncRepl()
        self.__folder_path_list = [
            self._uncrepl(fpl) for fpl in folder_path_list
        ]
        self.__cls = cls

        self.__logger = Logger(self.__class__.__name__)

        self.__folder_path_list = folder_path_list
        for folder_path in self.__folder_path_list:
            if folder_path not in sys.path:
                sys.path.append(folder_path)

    def __get_plugin_list(self, module_name_list):
        """
        returns list with plugins

        :param module_name_list: list of modules to search in
        :return: list of plugin classes

        """
        plugin_list = []

        for module_name in module_name_list:
            self.__logger.debug("Checking: %s.py..." % module_name)
            try:
                # use relative or absolute (for all stk modules) import method
                if isinstance(module_name, (list, tuple)):
                    module = __import__(module_name[0], globals(), locals(),
                                        module_name[1], 0)
                else:
                    module = __import__(module_name)
            except Exception as msg:
                self.__logger.warning(
                    "Couldn't import module '%s' due to '%s'" %
                    (str(module_name), str(msg)))
                continue

            # look through this dictionary for classes
            # that are subclass of PluginInterface but are not PluginInterface itself
            module_candidates = list(module.__dict__.items())

            for class_name, entry in module_candidates:
                if class_name == self.__cls.__name__:
                    continue

                if entry is None:
                    continue

                if str(entry).find("PyQt4") > -1:
                    continue

                try:
                    if issubclass(entry, self.__cls):
                        self.__logger.debug(
                            "Found plugin.[Module: '%s', Class: '%s']." %
                            (module_name, class_name))
                        plugin_list.append({"type": entry, "name": class_name})
                except TypeError:
                    # this happens when a non-type is passed in to issubclass. We
                    # don't care as it can't be a subclass of PluginInterface if
                    # it isn't a type
                    continue

        if len(plugin_list) > 0:
            return plugin_list

        return None

    def get_plugin_class_list(self, remove_duplicates=False):
        """searches stk path to find classes

        :param remove_duplicates: wether duplicates should be removed
        :return: list of classes
        """
        module_name_list = []
        for folder_path in self.__folder_path_list:
            try:
                file_list = os.listdir(folder_path)
            except OSError:
                continue

            # For all modules within the stk use absolute module path to
            # avoid problems with dublicate package names
            lst = []
            stk_found = False
            path = folder_path
            module_path = ""
            while stk_found is False:
                head, tail = os.path.split(path)

                if tail == '':
                    if head != '':
                        lst.insert(0, head)
                    break
                else:
                    lst.insert(0, tail)
                    path = head
                    if tail == 'stk':
                        stk_found = True
                        for p_k in lst:
                            module_path += p_k + "."

            for file_name in file_list:
                if file_name.endswith(".py") and not file_name.startswith(
                        "__") and not file_name.startswith("stk"):
                    module_name = file_name.rsplit('.', 1)[0]
                    if module_path == "":
                        module_name_list.append(module_name)
                    else:
                        # add stk path to module name
                        module_name_list.append(
                            [module_path + module_name, module_name])

        plugin_list = self.__get_plugin_list(module_name_list)
        if len(plugin_list) > 0:
            check_duplicates = self.__check_for_duplicate_classes(plugin_list)
            if check_duplicates == -1 and remove_duplicates is True:
                plugin_list = self.__remove_duplicate_classes(plugin_list)
                return plugin_list
            elif check_duplicates == 0:
                return plugin_list

        return None

    def __check_for_duplicate_classes(self, plugin_list):
        """ Check if there are any duplicates in the class list and throw an error if found.
        @param plugin_list: A list of the plugins found.
        @return: 0 for success and -1 if duplicate is found.
        """
        num_modules = len(plugin_list)
        for idx, module_name in enumerate(plugin_list):
            for i in range(idx + 1, num_modules):
                if module_name["name"] == plugin_list[i]["name"]:
                    self.__logger.error("Duplicate class name found: %s" %
                                        (module_name["name"]))
                    return -1
        return 0

    @staticmethod
    def __remove_duplicate_classes(plugin_list):
        """removes duplicate classes form plugin list
        """
        temp_mem = []
        copy_plugin_list = []

        for idx, module_name in enumerate(plugin_list):
            if module_name['name'] not in temp_mem:
                copy_plugin_list.append(plugin_list[idx])
                temp_mem.append(module_name['name'])

        return copy_plugin_list
Esempio n. 3
0
class ValEventList(object):
    """
    ValEventLoader Class - loads Event details from Database
    """
    def __init__(self, plugin_folder_list=None, ev_filter=None):
        """class for loading events form database

        :param plugin_folder_list: list of Plugin folders i.e. location where event class definition are located.
                               If folders are not provided or definition were not found by plugin manager
                               then typed class will be generated runtime inherited from `ValBaseEvent`.
                               **Pass this argument only if you have defined additional method.**
        :type plugin_folder_list: list
        :param ev_filter: Instance of Event Filter
        :type ev_filter: `ValEventFilter`
        """
        self._log = Logger(self.__class__.__name__)

        if plugin_folder_list is not None:
            self.__plugin_folders = plugin_folder_list
        else:
            self.__plugin_folders = None  # EVENT_PLUGIN_FOLDER_LIST
        self.__plugin_manager = None
        self.__event_types_list = None
        self.__event_list = []
        self.__event_inst_created = []
        self.__filter = ev_filter

    def __del__(self):
        """clean up
        """
        self.__event_list = []

    def _init_event_types(self, plugin_folders=None):
        """ Init the Plugin """
        new_plugin = False

        if plugin_folders is not None:
            new_plugin = True
            self.__plugin_folders = plugin_folders
        if self.__plugin_manager is None or new_plugin:
            if self.__plugin_folders is not None:
                self.__plugin_manager = PluginManager(self.__plugin_folders, ValBaseEvent)

        if self.__event_types_list is None and self.__plugin_folders is not None:
            self.__event_types_list = self.__plugin_manager.get_plugin_class_list(remove_duplicates=True)
        else:
            self.__event_types_list = []

    def Load(self, dbi_val, dbi_gbl, testrun_id, coll_id=None, meas_id=None,  # pylint: disable=C0103
             rd_id=None, obs_name=None, level=ValSaveLoadLevel.VAL_DB_LEVEL_BASIC,
             beginabsts=None, endabsts=None, asmt_state=None, filter_cond=None, plugin_folders=None, cons_key=None):
        """
        Load Events

        :param dbi_val: Validation Result Database interface
        :type dbi_val: `OracleValResDB` or `SQLite3ValResDB`
        :param dbi_gbl: Validation Global Database interface
        :type dbi_gbl: `OracleGblDB` or `SQLite3GblDB`
        :param testrun_id: Testrun Id as mandatory field
        :type testrun_id: Integer
        :param coll_id:  Not Used. It is useless to pass any values. This information is taken
                        from database using rd_id
        :type coll_id: Integer
        :param meas_id: Measurement Id load event only for specific recording
        :type meas_id: Integer
        :param rd_id: Result Descriptor Id as mandatory field
        :type rd_id: Integer or List
        :param obs_name: Not Used. It is useless to pass any values.
                        This information is taken from database with testrun_id
        :type obs_name: String
        :param level: Load Level to specify to which level the event data should be level
                      with following possibilities::

                        VAL_DB_LEVEL_STRUCT = Events
                        VAL_DB_LEVEL_BASIC = Events + Assessment
                        VAL_DB_LEVEL_INFO = Events + Assessment + Attribute
                        VAL_DB_LEVEL_ALL = Events + Assessment + Attribute + Image

        :type level: `ValSaveLoadLevel`
        :param beginabsts: Basic filter. Begin Absolute Time stamp i.e. Start of the events
        :type beginabsts: Integer
        :param endabsts: End Absolute Time stamp i.e. End of the events
        :type endabsts: Integer
        :param asmt_state: Assessment State
        :type asmt_state: String
        :param filter_cond: Advance filter feature which can filter events based on event attributes;
                            filter map name specified in XML config file of custom filters.
                            Please read documentation of `ValEventFilter` for more detail
        :param plugin_folders: The value passed in constructor overrules. It is useless to pass value
        :type plugin_folders: list
        :param cons_key: Constrain Key. Not used
        :type cons_key: NoneType
        """
        _ = coll_id
        _ = obs_name
        _ = asmt_state
        _ = plugin_folders
        _ = cons_key

        inc_asmt = False
        inc_attrib = False
        inc_images = False
        self.__event_list = []
        self.__event_inst_created = []
        unit_map = {}

        statement = None
        if filter_cond is not None:
            if self.__filter is not None:
                statement = self.__filter.Load(dbi_val, filtermap_name=filter_cond)
                if statement is None:
                    self._log.error("The map filter was invalid. Events will be loaded without filter")
                elif type(statement) is list:
                    self._log.debug("The map filter was found. Events will be loaded with filter")

        if rd_id is not None:
            rd_list = dbi_val.get_resuls_descriptor_child_list(rd_id)
            if len(rd_list) == 0:
                rd_list = [rd_id]
        else:
            return True

        if level & ValSaveLoadLevel.VAL_DB_LEVEL_2:
            inc_asmt = True

        if level & ValSaveLoadLevel.VAL_DB_LEVEL_3:
            inc_attrib = True
            unit_records = dbi_gbl.get_unit()
            for unit_entry in unit_records:
                unit_map[str(unit_entry[COL_NAME_UNIT_ID])] = unit_entry[COL_NAME_UNIT_NAME]

        if level & ValSaveLoadLevel.VAL_DB_LEVEL_4:
            inc_images = True

        records, image_attribs = dbi_val.get_event_for_testrun(testrun_id, measid=meas_id, beginabsts=beginabsts,
                                                               endabsts=endabsts, rdid=rd_list, cond=None,
                                                               filt_stat=statement,
                                                               inc_asmt=inc_asmt, inc_attrib=inc_attrib,
                                                               inc_images=inc_images)
        col_list = records[0]
        records = records[1]
        self.__event_inst_created = {}
        self._init_event_types()

        while True:
            if len(records) <= 10000:
                self._prepare_events(dbi_val, records, col_list, image_attribs, unit_map,
                                     inc_asmt=inc_asmt, inc_attrib=inc_attrib, inc_images=inc_images)
                records = []
                break
            else:
                self._prepare_events(dbi_val, records[:10000], col_list, image_attribs, unit_map,
                                     inc_asmt=inc_asmt, inc_attrib=inc_attrib, inc_images=inc_images)
                del records[:10000]

        self.__event_inst_created = {}
        return True

    def _prepare_events(self, dbi_val, records, col_list, image_attribs, unit_map,
                        inc_asmt=True, inc_attrib=True, inc_images=True):
        """
        Prepare Event Object list by taking chunks for records from database

        :param dbi_val: DB interface to Validation Database
        :type dbi_val: OracleValResDB or  SQLite3ValResDB
        :param records: List of records as list of dict
        :type records: list
        :param col_list: Column List in records
        :type col_list: list
        :param image_attribs: Event Image attribute Id
        :type image_attribs: list
        :param unit_map: Unit map of Unit Id VS Unit Name
        :type unit_map: Dict
        :param inc_asmt: Flag to include Assessment in Event. Default True
        :type inc_asmt: Bool
        :param inc_attrib: Flag to include Event Attributes. Default True
        :type inc_attrib: Bool
        :param inc_images: Flag to include Event Attribute Images. Default True
        :type inc_images: Bool
        """
        event = ValBaseEvent()  # fix pylint problem, event will be set properly later
        if len(records) > 0:
            seid_eventlistmap = self.__event_inst_created
            sed_idx = col_list.index(COL_NAME_EVENTS_SEID)
            cls_name_idx = col_list.index(COL_NAME_EVENTS_VIEW_CLASSNAME)
            begin_idx = col_list.index(COL_NAME_EVENTS_VIEW_BEGINABSTS)
            start_idx = col_list.index(COL_NAME_EVENTS_VIEW_START_IDX)
            end_idx = col_list.index(COL_NAME_EVENTS_VIEW_ENDABSTS)
            stop_idx = col_list.index(COL_NAME_EVENTS_VIEW_STOP_IDX)
            measid_idx = col_list.index(COL_NAME_EVENTS_MEASID)

            if inc_asmt:
                usr_idx = col_list.index(COL_NAME_ASS_USER_ID)
                wf_idx = col_list.index("WF" + COL_NAME_WORKFLOW_NAME)
                asmtst_idx = col_list.index("ST" + COL_NAME_ASSESSMENT_STATE_NAME)
                comm_idx = col_list.index(COL_NAME_ASS_COMMENT)
                asmt_date_idx = col_list.index(COL_NAME_ASS_DATE)
                issue_idx = col_list.index(COL_NAME_ASS_TRACKING_ID)
                resassid_idx = col_list.index(COL_NAME_EVENTS_RESASSID)
            if inc_attrib:
                unitid_idx = col_list.index(COL_NAME_EVENT_ATTR_TYPES_UNITID)
                arribid_idx = col_list.index(COL_NAME_EVENT_ATTR_ATTRID)
                atrtypeid_idx = col_list.index(COL_NAME_EVENT_ATTR_TYPES_NAME)
                value_idx = col_list.index(COL_NAME_EVENT_ATTR_VALUE)

        for record in records:
            if str(int(record[sed_idx])) not in seid_eventlistmap:

                cls = None
                for etype in self.__event_types_list:
                    if etype['name'] == record[cls_name_idx]:
                        cls = etype['type']
                        break

                if cls is None:
                    e_type = type(record[cls_name_idx], (ValBaseEvent,), {})
                    event = e_type(start_time=record[begin_idx], start_index=record[start_idx],
                                   stop_time=record[end_idx], stop_index=record[stop_idx], seid=record[sed_idx])
                else:
                    event = object.__new__(cls)
                    event.__init__(start_time=record[begin_idx], start_index=record[start_idx],
                                   stop_time=record[end_idx], stop_index=record[stop_idx], seid=record[sed_idx])

                event.SetMeasId(record[measid_idx])

                if inc_asmt:
                    asmt = ValAssessment(user_id=record[usr_idx], wf_state=record[wf_idx],
                                         ass_state=record[asmtst_idx], ass_comment=record[comm_idx],
                                         date_time=record[asmt_date_idx], issue=record[issue_idx])
                    asmt.ass_id = record[resassid_idx]
                    event.AddAssessment(asmt)

                self.__event_list.append(event)
                seid_eventlistmap[str(int(record[sed_idx]))] = len(self.__event_list) - 1

            else:
                event = self.__event_list[seid_eventlistmap[str(int(record[sed_idx]))]]

            if inc_attrib:
                if record[unitid_idx] is not None:
                    unit = unit_map[str(record[unitid_idx])]
                else:
                    unit = str(record[unitid_idx])

                if inc_images and record[arribid_idx] in image_attribs:
                    image = dbi_val.get_event_image(record[arribid_idx])[COL_NAME_EVENT_IMG_IMAGE]
                else:
                    image = None
                event.AddAttribute(record[atrtypeid_idx], value=record[value_idx], unit=unit, image=image)

    def Save(self, dbi_val, dbi_gbl, testrun_id, coll_id, obs_name=None, parent_id=None,  # pylint: disable=C0103
             level=ValSaveLoadLevel.VAL_DB_LEVEL_BASIC, cons_key=None):
        """
        Save Events

        :param dbi_val: Validation Result Database interface
        :type dbi_val: `OracleValResDB` or `SQLite3ValResDB`
        :param dbi_gbl: Validation Global Database interface
        :type dbi_gbl: `OracleGblDB` or `SQLite3GblDB`
        :param testrun_id: Testrun Id
        :type testrun_id: Integer
        :param coll_id: Collection ID
        :type coll_id: Integer
        :param obs_name: Observer Name registered in Global Database
        :type obs_name: String
        :param parent_id: Parent Result Descriptor Id
        :type parent_id: Integer
        :param level: Save level::

                            - VAL_DB_LEVEL_STRUCT: Result Descriptor only,
                            - VAL_DB_LEVEL_BASIC: Result Descriptor and result,
                            - VAL_DB_LEVEL_INFO: Result Descriptor, Result and Assessment
                            - VAL_DB_LEVEL_ALL: Result with images and all messages

        :param cons_key: constraint key -- for future use
        :type cons_key: NoneType
        """
        res = False

        if dbi_val.get_testrun_lock(tr_id=testrun_id) == 1:
            self._log.error("No Event is saved due to locked testrun ")
            return res
        for evt in self.__event_list:
            try:
                res = evt.Save(dbi_val, dbi_gbl, testrun_id, coll_id, evt.GetMeasId(),
                               obs_name, parent_id, level, cons_key)
            except ValEventError, ex:
                self._log.warning("Events %s could not be stored. See details: %s " % (str(evt), ex))
                res = False

            if res is False:
                break

        if res is True:
            pass
            # dbi_val.commit()
            # dbi_gbl.commit()

        return res
Esempio n. 4
0
class AdditionalObjectList(object):
    """
    AdditionalObjectList maps a smaller object list to a larger one, e.g.:

    OBJ_signals: EmGenObjectList (62 elements) -> AOJ_signals: EMPublicObjData (100 elements)

    via a mapping signal AOJ_mapping: ``SIM VFB ALL.DataProcCycle.EmGenObjectList.aObject[%].General.uiID``
    ``EmGenObjectList.aObject[3].General.uiID = 17 -> EMPublicObjData.Objects[17].Private.u_RadarBasisClassInternal``

    the output port is same as for OBJList:
    ``objects = self._data_manager.GetDataPort(OBJECT_PORT_NAME, self.bus_name)``

    Config file example::

           ("OBJ_min_lifetime",        50),
           ("OBJ_number_of_objects",  62),
           ("OOI_number_of_objects",   6),
           ("AOJ_list_size", 100), <--- mandatory
           ("AOJ_mapping", "SIM VFB ALL.DataProcCycle.EmGenObjectList.aObject[%].General.uiID"), <--- mandatory
           ("OBJ_prefix",                 ""),
           ("AOJ_prefix",             ""), <--- optional (needs not be necessarily present in the config, if yes,
                                                    the prefix is added to AOJ_mapping and AOJ_signals)
           ("OBJ_signals", [
           {'SignalName':"SIM VFB ALL.DataProcCycle.EmGenObjectList.aObject[%].General.eMaintenanceState",
           'PortName':'eObjMaintenanceState'},
           {'SignalName':"SIM VFB ALL.DataProcCycle.EmGenObjectList.aObject[%].Kinematic.fDistX",
           'PortName':'DistX'}]),
           ("AOJ_signals", [
           {'SignalName':"SIM VFB ALL.DataProcCycle.EMPublicObjData.Objects[%].Private.u_RadarBasisClassInternal",
           'PortName':'Object_RadarBasisClass'}])
    """
    AOJ_INIT_ERROR = 'mandatory init parameters of AdditionalObjectList are None'

    def __init__(self, sig_read, add_obj_mapping_rule,
                 add_obj_port_and_signal_names, add_obj_list_size,
                 add_obj_prefix):
        """init

        :param sig_read: former binary signal reader, not needed anymore
        :type  sig_read: None
        :param add_obj_mapping_rule: AOJ_mapping signal name
        :type  add_obj_mapping_rule: string
        :param add_obj_port_and_signal_names: AOJ_signals
        :type  add_obj_port_and_signal_names: list of strings
        :param add_obj_list_size: AOJ_list_size
        :type  add_obj_list_size: integer
        :param add_obj_prefix: AOJ_prefix for signal and mapping name
        :type  add_obj_prefix: string
        """
        if (add_obj_mapping_rule is None
                or add_obj_port_and_signal_names is None
                or add_obj_list_size is None):
            raise ValueError(AdditionalObjectList.AOJ_INIT_ERROR)
        self.__sig_read = sig_read
        self.__add_obj_port_and_signal_names = add_obj_port_and_signal_names
        self.__add_obj_mapping_rule = add_obj_mapping_rule
        self.__add_obj_list_size = add_obj_list_size
        if add_obj_prefix is None:
            self.__add_obj_prefix = ""
        else:
            self.__add_obj_prefix = add_obj_prefix.strip()
        # my object list
        self.__my_object_index = None
        self.__mapping_signal = None
        # all obj cache
        self.__object_list = {}
        self.__log = Logger(self.__class__.__name__)

    def clear_cache(self):
        """
        call this in SignalExtractor when the file has been already processed e.g. in PostProcessData
        """
        self.__my_object_index = None
        if self.__mapping_signal is not None:
            del self.__mapping_signal
            self.__mapping_signal = None
        if self.__object_list is not None:
            del self.__object_list
            self.__object_list = {}

    def __get_other_obj(self, object_index):
        """
        Builds an on demand cache with all signals of the object with the given index. All addressed objects are kept.

        :param object_index: object index
        """
        ret_obj = self.__object_list.get(object_index)
        if ret_obj is not None:
            return ret_obj
        else:
            new_obj = {}
            for list_item in self.__add_obj_port_and_signal_names:
                signal_name = self.__add_obj_prefix + list_item[
                    SIGNAL_NAME].replace('%', str(object_index))
                port_name = list_item[PORT_NAME]
                new_obj[port_name] = self.__sig_read[signal_name]
            self.__object_list[object_index] = new_obj
            return new_obj

    def __get_mapping_signal(self, object_index):
        """
        Builds an on demand cache with the mapping signal of the object with the given index.

        New index deletes previous signal.

        :param object_index: object index
        """
        if self.__my_object_index is None:
            # mapping:
            mapping_signal_name = self.__add_obj_prefix + self.__add_obj_mapping_rule.replace(
                '%', str(object_index))
            self.__mapping_signal = self.__sig_read[mapping_signal_name]
            self.__my_object_index = object_index
        elif self.__my_object_index != object_index:
            del self.__mapping_signal[:]
            mapping_signal_name = self.__add_obj_mapping_rule.replace(
                '%', str(object_index))
            self.__mapping_signal = self.__sig_read[mapping_signal_name]
            self.__my_object_index = object_index
        else:
            # self.__my_object_index == object_index
            pass
        return self.__mapping_signal

    def add_additional_object_signals(self, new_obj, object_index,
                                      start_position, end_position, sig_read):
        """
        Builds an on demand cache with the mapping signal of the object with the given index.

        New index deletes previous signal.

        :param new_obj: object represented as a dictionary of list (see SignalExtractor). key is port of sig name,
                        **it extends the passed object new_obj with the other list's signals**
        :type  new_obj: dict
        :param object_index: current object index
        :type  object_index: integer
        :param start_position: start position in the meas file (cycle)
        :type  start_position: integer
        :param end_position: end position in the meas file (cycle)
        :type  end_position: integer
        :param sig_read: signal reader instance to extract a signal
        :type  sig_read: SignalReader
        :return: success (means: mapping object found, otherwise do not use 'new_obj' in signal extractor)
        :rtype:  boolean
        """
        if sig_read is None:
            raise ValueError(AdditionalObjectList.AOJ_INIT_ERROR)
        self.__sig_read = sig_read
        ret_success = False
        mapping_signal = self.__get_mapping_signal(object_index)
        # mapping assumes that the corresponding object id pointing to the other object list
        # is constant between start and end:
        idx_larger_list = mapping_signal[
            start_position] if mapping_signal is not None else -1
        if 0 <= idx_larger_list < self.__add_obj_list_size:
            self.__log.debug("MYOID: " + str(object_index) + " OOID: " +
                             str(idx_larger_list) + " START: " +
                             str(start_position) + " END: " +
                             str(end_position))
            other_obj = self.__get_other_obj(idx_larger_list)
            for sig_name, sig_val in other_obj.iteritems():
                new_obj[sig_name] = sig_val[start_position:end_position]
            ret_success = True
        else:
            self.__log.warning("obj idx: " + str(object_index) + " pos: " +
                               str(start_position) + '..' + str(end_position) +
                               " index: " +
                               str(mapping_signal[start_position]) +
                               " is out of range [0.." +
                               str(self.__add_obj_list_size - 1) + "]: " +
                               ", obj period will be omitted")
        return ret_success
Esempio n. 5
0
class Valf(object):
    """
    class defining methods to easily start validation suites
    by calling a python script without additional option settings (double click in win)

    mandatory settings:

    - outputpath (as instantiation parameter)
    - config file with `LoadConfig`
    - sw version of sw under test with `SetSwVersion`

    see `__init__` for additional options

    returns error level::

      RET_VAL_OK = 0       suite returned without error
      RET_GEN_ERROR = -1   general error
      RET_SYS_EXIT = -2    sys.exit called
      RET_CFG_ERROR = -3   error in direct settings or configuration file

    **Example:**

    .. python::

        # Import valf module
        from stk.valf import valf

        # set output path for logging ect., logging level and directory of plugins (if not subdir of current HEADDIR):
        vsuite = valf.Valf(getenv('HPCTaskDataFolder'), 10)  # logging level DEBUG, default level: INFO

        # mandatory: set config file and version of sw under test
        vsuite.LoadConfig(r'demo\\cfg\\bpl_demo.cfg')
        vsuite.SetSwVersion('AL_STK_V02.00.06')

        # additional defines not already set in config files or to be overwritten:
        vsuite.SetBplFile(r'cfg\\bpl.ini')
        vsuite.SetSimPath(r'\\\\Lifs010.cw01.contiwan.com\\data\\MFC310\\SOD_Development')

        # start validation:
        vsuite.Run()

    :author:        Joachim Hospes
    :date:          29.05.2013

    """
    def __init__(self, outpath, *args, **kwargs):
        """
        initialise all needed variables and settings

          - creates/cleans output folder
          - start process manager
          - start logging of all events, therefore the output path must be given

        :param outpath: path to output directory, can be relative to calling script
        :type outpath: str

        :param args: additional argument list which are also covered by keywords in order of occurrence

        :keyword logging_level: level of details to be displayed. default: info
                                (10=debug, 20=info, 30=warning, 40=error, 50=critical, 60=exception)
        :type logging_level: int [10|20|30|40|50]

        :keyword plugin_search_path: default: parent dir of stk folder, normally parallel to validation scripts
        :type plugin_search_path: str

        :keyword clean_folder:  default ``True``, set to ``False`` if the files in output folder should not be deleted
                                during instantiation of Valf
        :type clean_folder: bool

        :keyword logger_name:   name of logger is used for logfile name and printed in log file as base name,
                                if not set name/filename of calling function/module is used
        :type logger_name: str

        :keyword fail_on_error: Switch to control exception behaviour, if set
                                exceptions will be re-thrown rather than omitted or logged.
        :type fail_on_error: bool

        :keyword deprecations: set me to False to remove any deprecation warning outputs inside log
        :type deprecations: bool
        """
        self.__version = "$Revision: 1.6 $"
        self._uncrepl = UncRepl()

        self.__data_bus_names = [
        ]  # store all names of generated data busses like bus#0
        self.__process_mgr = None

        opts = arg_trans(
            [['logging_level', INFO], ['plugin_search_path', None],
             ['clean_folder', True], ['logger_name', None],
             ['fail_on_error', False], ['deprecations', True]], *args,
            **kwargs)

        self._fail_on_error = opts['fail_on_error']

        # prep output directory: create or clear content
        outpath = self._uncrepl(opath.abspath(outpath))
        clear_folder(outpath, opts['clean_folder'])

        logger_name = opts['logger_name']
        if logger_name is None:
            # get name of calling module
            frm = currentframe().f_back  # : disable=W0212
            if frm.f_code.co_filename:
                logger_name = opath.splitext(
                    opath.basename(frm.f_code.co_filename))[0]
            else:
                logger_name = 'Valf'
        # start logger, first with default level, idea for extension: can be changed later
        self.__logger = Logger(logger_name,
                               opts['logging_level'],
                               filename=opath.join(outpath,
                                                   logger_name + ".log"))
        self.__logger.info("Validation started at %s." %
                           strftime('%H:%M:%S', localtime(time())))
        self.__logger.info("Validation based on %s STK %s-%s of %s, CP: %s." %
                           ("original" if stk_checksum(True) else "adapted",
                            RELEASE, INTVERS, RELDATE, MKS_CP))
        self.__logger.info("Logging level is set to %s." % next(
            i
            for i, k in LEVEL_CALL_MAP.items() if k == opts['logging_level']))
        self.__logger.info("Validation arguments have been:")
        for k, v in opts.iteritems():
            self.__logger.info("    %s: %s" % (k, str(v)))

        if not opts['deprecations']:
            self.__logger.warning(
                "Deprecation warnings have been switched off!")
            DeprecationUsage().status = False

        # find all observers down current path
        plugin_search_path = opts['plugin_search_path']
        plugin_folder_list = []
        if plugin_search_path is None:
            plugin_search_path = [HEAD_DIR]
        # take care of fast connections
        plugin_search_path = [self._uncrepl(i) for i in plugin_search_path]
        for spath in plugin_search_path:
            plugin_folder_list.extend([
                dirPath for dirPath in list_folders(spath)
                if "\\stk\\" not in dirPath
            ])
            # left over from testing??? found in vers.1.14, introduced in 1.6
            # else:
            #     print folder_path

            self.__logger.info('added to plugin search path:' + spath)
        # and add all observers down calling script's path
        stk_plugins = [
            opath.join(HEAD_DIR, "stk", "valf"),
            opath.join(HEAD_DIR, "stk", "valf", "obs"),
            opath.join(HEAD_DIR, "stk", "val")
        ]

        plugin_folder_list.extend(plugin_search_path)

        for spath in stk_plugins:
            plugin_folder_list.append(spath)
            self.__logger.debug('added to plugin search path:' + spath)

        # start process manager
        try:
            self.__process_mgr = ProcessManager(plugin_folder_list,
                                                self._fail_on_error)
        except:  # pylint: disable=W0702
            self.__logger.exception(
                "Couldn't instantiate 'ProcessManager' class.")
            if self._fail_on_error:
                raise
            sys.exit(RET_GEN_ERROR)

        self.__process_mgr.set_data_port(OUTPUTDIRPATH_PORT_NAME, outpath)
        self.__logger.debug("OutputDirPath: '%s'" % outpath)

        # set still needed default settings as have been in valf.main
        self.SetMasterDbPrefix(DEFAULT_MASTER_SCHEMA_PREFIX)
        self.SetErrorTolerance(ERROR_TOLERANCE_NONE)

        # should be activated some day, for now not all validation suites can be parallelised
        # if set on default we should invent a method DeactivateHpcAutoSplit to run the remaining or old suites
        # self.SetDataPort("HpcAutoSplit", True, "Global")

    def _check_mandatory_settings(self):
        """ private method

        check if additional mandatory settings are done

        does not run complete sanity check for config, here we just check additional mandatory settings
        that do not prevent the validation to run if they are missing
        e.g. no test if db connection is defined for cat reader, if not set cat reader will stop the initialisation

        :return:   number of missing settings, 0 if settings completed
        :rtype:    integer
        """
        error_cnt = 0

        if self.GetDataPort("SWVersion", "Global") is None:
            self.__logger.error("version of test sw not defined!")
            error_cnt += 1

        if (self.GetDataPort("HpcAutoSplit", "Global") is True
                and self.GetDataPort("SimSelection", "Global") is not None):
            self.__logger.error(
                "DataPort 'SimSelection' used by HPC, not available if 'HpcAutoSplit' is active!"
            )
            self.__logger.error(
                "Set either 'HpcAutoSplit' to False or don't set 'SimSelection'!"
            )
            error_cnt += 1

        return error_cnt

    def _set_hpc_selection(self):
        """ private method

        if the start script is running as HPC task on an HPC machine then
        set SimSelection to use only the entry given by the task number.

        e.g. for HPC task003: set SimSelection to [2]
        """
        # check HPC usage
        if self.GetDataPort("HpcAutoSplit", "Global") is True:
            task_name = getenv("TaskName")
            try:
                # T0000x task ids start with 1,  bpl list index with 0
                task_id = int(match(r'T(\d+)', str(task_name)).group(1)) - 1
            except AttributeError:
                self.__logger.exception(
                    "can't set Hpc Auto Split value as HPC environment variable Task Id"
                    " is empty or not valid: %s" % task_name)
                if self._fail_on_error:
                    raise
                sys.exit(RET_CFG_ERROR)
            self.__logger.info(
                "HpcAutoSplit: using entry %d of the sim collection" % task_id)
            self.SetDataPort("SimSelection", "[%d]" % task_id, "Global")

    def LoadConfig(self, filepath):  # pylint: disable=C0103
        """
        load configuration from path/filename, path can be relative to calling script

        Valid configuration properties are:

            - version: string defining version of config file, added to dict on port "ConfigFileVersions"
            - ClassName: quoted string to determine observer class to include in run (not in section "Global")
            - PortOut: list of port values (quoted strings) which should be exported to given bus name
            - InputData: pythonic list of tuples/lists which are taken and given as input for observer to be configured
            - ConnectBus: list of bus names to connect / register observer to (first one is taken actually)
            - Active: True/False value weather observer should be enabled or not
            - include: file (quoted) to include herein, chapter should be repeated there,
              if include is used within global scope, all chapters from included file are used

        config file example::

            # valf_basic.cfg
            # config for testing Valf class, based on valf_demo settings,

            [Global]
            ; version string will be added to dict on port "ConfigFileVersions":
            version="$Revision: 1.6 $"
            ;PortOut: Informs the name of the port that are set by the component
            PortOut=["ProjectName", "SWVersion", "FunctionName", "Device_Prefix"]
            ;InputData: Declares all input parameters
            InputData=[('ProjectName', 'VALF-test'),
                       ('FunctionName', 'STK_moduletest'),
                       ('SimName', 'N/A'),
                       ('Multiprocess', True ),
                       ('ValName', 'N/A')]
            ;ConnectBus: Specifies the bus connect to the component
            ConnectBus=["Global"]

            ; db connection is needed for the catalog reader only, **deactivated** here!!
            ; connection parameters passed to validation_main.py as options because it will differ for projects
            [DBConnector]
            ClassName="DBConnector"
            InputData=[("UseAllConnections", "True")]
            PortOut=[ "DataBaseObjects"]
            ConnectBus=["DBBus#1"]
            Active=False
            ;Order: Specifies the calling order
            Order=0

            ; bpl reader can be used to read simulation results, but in future the cat_reader should be used
            ;  to test the difference switch Activate setting for BPLReader and CATReader
            [VALF_BPL_test]
            ClassName="BPLReader"
            PortOut=["CurrentMeasFile", "CurrentSimFile"]
            InputData=[("SimFileExt", "bin")]
            ConnectBus=["bus#1"]
            ; read additional config file data for this section, can overwrite complete setting before
            ; so e.g. InputData needs to list all input values,
            ; the values from include-cfg are not added but replace former set!
            Include="..\..\..\04_Test_Data\01a_Input\valf\valf_include_VALF_BPL_test.cfg"
            Active=True
            ;Order: Specifies the calling order
            Order=1

            ; cat reader needs db connector to setup connection to catalog db!
            [VALF_CAT_REF]
            ClassName="CATReader"
            PortOut=[ "CurrentMeasFile", "CurrentSimFile"]
            InputData=[("SimFileExt", "bsig"),("SimFileBaseName", "") ]
            ConnectBus=["Bus#1"]
            Active=False
            Order=1

        general used ports on bus ``Global`` (set by `ProjectManager`):

            - set "ConfigFileVersions"
                dict with file name as key and version as value for each loaded config file
            - read "FileCount"
                to show progress bar
            - read "IsFinished"
                to continue with next state when all sections of a recording are validated (set by `SignalExtractor`)

        Also setting ports as defined in ``InputData``  for the named bus.


        usage (example):

        .. python::

          from stk.valf import Valf

          vrun = stk.valf.Valf()
          vrun.load_config(r'conf/validation.cfg')

        :param filepath: path and filename of the config file to load
        :type filepath:  string
        """
        absfile = self._uncrepl(opath.abspath(filepath))
        # preset of port ConfigFileName currently not supported!!! what was it used for??
        # config_filename = self.__process_mgr.get_data_port(CFG_FILE_PORT_NAME)
        # if config_filename is None:
        #     config_filename = absfile
        # else:
        #     config_filename += ', ' + absfile
        self.__process_mgr.set_data_port(CFG_FILE_PORT_NAME, absfile)
        if self.__logger is not None:
            self.__logger.info("Using configuration file: '%s'" % absfile)
            try:
                if not self.__process_mgr.load_configuration(absfile):
                    sys.exit(RET_CFG_ERROR)
            except ValfError:
                msg = 'Validation error during configuration load'
                if self.__process_mgr.last_config is not None:
                    msg += (" (%s)" % self.__process_mgr.last_config)
                self.__logger.exception(msg)
                if self._fail_on_error:
                    raise
                sys.exit(RET_SYS_EXIT)
            except SystemExit:
                msg = 'system exit by one module during configuration load'
                if self.__process_mgr.last_config is not None:
                    msg += (" (%s)" % self.__process_mgr.last_config)
                    self.__logger.exception(msg)
                self.__logger.error(msg)
                if self._fail_on_error:
                    raise
                sys.exit(RET_SYS_EXIT)
            except:
                msg = "unexpected error (%s) during configuration load" % str(
                    sys.exc_info)
                if self.__process_mgr.last_config is not None:
                    msg += (" (%s)" % self.__process_mgr.last_config)
                    self.__logger.exception(msg)
                self.__logger.exception(msg)
                if self._fail_on_error:
                    raise
                sys.exit(RET_GEN_ERROR)

    def SetBplFile(self, filepath):  # pylint: disable=C0103
        """
        set data port ``BplFilePath`` to path/filename of bpl file (.ini or .bpl)
        path can be relative to starting script, checks existence of file and stops in case of errors

        :param filepath: path/filename of batch play list
        :type filepath:  string
        """
        absfilepath = self._uncrepl(opath.abspath(filepath))
        self.__logger.debug("BplFilePath: '%s'" % absfilepath)
        if filepath is not None and opath.isfile(absfilepath):
            self.__process_mgr.set_data_port(PLAY_LIST_FILE_PORT_NAME,
                                             absfilepath)
        else:
            self.__logger.error(
                "Missing mts batch play list: can not open bpl file '%s'" %
                absfilepath)
            sys.exit(RET_CFG_ERROR)

    def SetCollectionName(self, collection_name):  # pylint: disable=C0103
        """
        set data port ``RecCatCollectionName`` giving the collection name of rec files in catalog db
        used by the cat reader to select the recording list for a project

        :param collection_name: name of the collection
        :type collection_name:  string
        """
        self.__process_mgr.set_data_port(COLLECTION_NAME_PORT_NAME,
                                         collection_name)
        self.__logger.debug("Rec file cataloge collection name is: '%s'" %
                            collection_name)

    def SetDataPort(self, port_name, value, bus_name='Global'):  # pylint: disable=C0103
        """
        set named valf data port at named bus with given value,
        can be repeated for different ports and bus names

        in general these ports should be set using the config file ``InputData`` entry!

        :param port_name: valf data port name, not case sensitiv
        :type port_name:  string
        :param value:     port value, type depends on port usage
        :type value:      user defined
        :param bus_name:  valf data bus name, default: ``Global``, not case sensitiv
        :type bus_name:   string
        """
        self.__process_mgr.set_data_port(port_name, value, bus_name)
        self.__logger.debug('valf script setting port "%s" :' % port_name +
                            str(value))

    def SetDbFile(self, filepath):  # pylint: disable=C0103
        """
        set data port ``dbfile`` to define name of sqlite data base file to be used instead of oracle db
        checks existence of the file and raises an error if it's not readable

        :param filepath: path/name of the database file
        :type filepath:  string
        """
        database_filename = self._uncrepl(opath.abspath(filepath))
        if not opath.exists(database_filename):
            self.__logger.error("defined db file '%s' not found" %
                                database_filename)
            sys.exit(RET_CFG_ERROR)
        self.__process_mgr.set_data_port(DB_FILE_PORT_NAME, database_filename,
                                         'DBBus#1')

    def SetErrorTolerance(self, tolerance):  # pylint: disable=C0103
        """
        set data port ``ErrorTolerance`` to a value as defined in `db_commmon`

        :param tolerance: error tolerance value
        :type tolerance:  integer
        """
        self.__process_mgr.set_data_port(ERROR_TOLERANCE_PORT_NAME, tolerance,
                                         "Bus#1")

    @deprecated()
    def SetMasterDbDbq(self, dbq):  # pylint: disable=C0103
        """
        set data port "masterdbdbq" (name defined in `valf.db_connector`) to given name
        default value defined in db.db_common by DEFAULT_MASTER_DBQ

        :param dbq: data base qualifier for oracle data bases
        :type dbq:  string
        :note:      don't use together with DSN setting
        """
        self.__process_mgr.set_data_port(MASTER_DB_DBQ_PORT_NAME, dbq,
                                         "DBBus#1")

    @deprecated()
    def SetMasterDbDsn(self, dsn):  # pylint: disable=C0103
        """
        set data port ``masterdbdsn`` (name defined in `valf.db_connector`) to given name
        default value defined in db.db_common by DEFAULT_MASTER_DSN

        :param dsn: data source name for odbc interface connections
        :type dsn:  string
        :note:      don't use together with DBQ setting
        """
        self.__process_mgr.set_data_port(MASTER_DB_DSN_PORT_NAME, dsn,
                                         "DBBus#1")

    def SetMasterDbUser(self, user):  # pylint: disable=C0103
        """
        set data port ``masterdbuser`` (name defined in `valf.db_connector`) to given name

        :param user: name of data base user
        :type user:  string
        """
        self.__process_mgr.set_data_port(MASTER_DB_USR_PORT_NAME, user,
                                         "DBBus#1")

    def SetMasterDbPwd(self, passwd):  # pylint: disable=C0103
        """
        set data port ``masterdbpassword`` (name defined in `valf.db_connector`) to given name

        :param passwd: password for data base user
        :type passwd:  string
        """
        self.__process_mgr.set_data_port(MASTER_DB_PW_PORT_NAME, passwd,
                                         "DBBus#1")

    def SetMasterDbPrefix(self, prefix):  # pylint: disable=C0103
        """
        set data port ``masterdbschemaprefix`` (name defined in `valf.db_connector`) to given name

        :param prefix: schema prefix for data base table
        :type prefix:  string
        """
        self.__process_mgr.set_data_port(MASTER_DB_SPX_PORT_NAME, prefix,
                                         "DBBus#1")

    def SetSimPath(self, pathname, bus_name="Bus#1"):  # pylint: disable=C0103
        """
        set data port ``SimOutputPath`` at named bus (default:``Bus#0``) to given path
        where measurement files are stored

        checks if path exists and raises an `ValfError` if not

        for historical reasons the bus_name is set as default to ``bus#0``
        make sure your config sets the similar busses for bpl/cat reader(s)!

        :param pathname: absolute path where simulation result files are stored
        :type pathname:  string
        :param bus_name: data bus name of the bpl/cat reader, default ``bus#0``, not case sensitiv
        :type bus_name:  string
        """
        pathname = self._uncrepl(pathname)
        if opath.exists(pathname):
            self.__process_mgr.set_data_port(SIM_PATH_PORT_NAME, pathname,
                                             bus_name)
            self.__logger.debug(
                "Setting input data. [ Bus='{0}', "
                "PortName='SimOutputPath', PortValue={1}]".format(
                    bus_name, pathname))
            if bus_name not in self.__data_bus_names:
                self.__data_bus_names.append(bus_name)
                self.__process_mgr.set_data_port(DATA_BUS_NAMES,
                                                 self.__data_bus_names)
        else:
            exception_msg = "Sim Output folder providing bsig/csv files does not exist:\n" +\
                            "{}\nPlease check your setup".format(pathname)
            self.__logger.exception(exception_msg)
            raise ValfError(exception_msg)

    def SetSwVersion(self, version):  # pylint: disable=C0103
        """
        set data port ``SWVersion`` to given value
        currently mandatory setting!!

        :param version: sw version of sw under test
        :type version:  string
        """
        self.__process_mgr.set_data_port(SWVERSION_PORT_NAME, version)

    def SetRefSwVersion(self, version):  # pylint: disable=C0103
        """
        set data port ``SWVersion_REG`` to given value (optional)

        :param version: sw version of regression sw under test
        :type version:  string
        """
        self.__process_mgr.set_data_port(SWVERSION_REG_PORT_NAME, version)

    def SetSaveResults(self, saveit=True):  # pylint: disable=C0103
        """
        set data port ``SaveResultInDB`` to given value (optional)

        :param saveit: Save the results into the database, default = True
        :type saveit:  boolean
        """
        self.__process_mgr.set_data_port(SAVE_RESULT_IN_DB, saveit)

    def GetDataPort(self, port_name, bus_name='Global'):  # pylint: disable=C0103
        """
        get named valf data port at named bus,
        can be repeated for different ports and bus names

        :param port_name: valf data port name, not case sensitiv
        :type port_name:  string

        :param bus_name: valf data bus name, default: ``Global``, not case sensitiv
        :type bus_name:  string

        :return: port data
        :rtype:  undefined
        """
        return self.__process_mgr.get_data_port(port_name, bus_name)

    def ActivateHpcAutoSplit(self):  # pylint: disable=C0103
        r"""
        activate auto splitting of bpl/cat list on HPC

        Running on HPC a validation can run in parallel on several tasks. This method sets data port ``HpcAutoSplit``
        to ``True`` so each validation suite running on one task/machine only reads the sim results of one recording::

              bpl / cat list       HPC TaskID
            ---------------------- ----------
            recording_entry_0.rec    T00001
            recording_entry_1.rec    T00002
            recording_entry_2.rec    T00003
            ...                      ...

        **The tasks must be created during job submit,** this is not done by Valf!!

        Example to create an own task for each bpl entry:

        .. python::

            # Create the Validation Tasks
            reclist = bpl.Bpl(BPL_FILE).read()
            task = hpc.TaskFactory(job)
            for rec in reclist:
                task.create_task(r"D:\data\%JobName%\1_Input\valf_tests\custom\demo\run_valf_demo_bpl.py")

        """
        self.SetDataPort(HPC_AUTO_SPLIT_PORT_NAME, True, 'global')

    def Run(self):
        """ start the validation after all needed preparations

        :return:  success or error value during validation run
        :rtype:   error codes:
          RET_VAL_OK = 0
          RET_GEN_ERROR = -1
          RET_SYS_EXIT = -2
          RET_CFG_ERROR = -3

        """
        if LooseVersion(sqlite_version) <= LooseVersion(MIN_SQLITE_VERSION):
            self.__logger.error(
                "error in setup: please update your sqlite3.dll!\n"
                "Just call batch script listed on Validation wiki -> needed tools."
            )
            sys.exit(RET_CFG_ERROR)

        if self._check_mandatory_settings() is not 0:
            self.__logger.error("error in setup: mandatory settings missing")
            sys.exit(RET_CFG_ERROR)
        tstart = time()
        self._set_hpc_selection()
        try:
            ret_val = self.__process_mgr.run()
        except Exception:
            self.__logger.exception("unexpected runtime error")
            if self._fail_on_error:
                raise
            sys.exit(RET_GEN_ERROR)

        if ret_val is not RET_VAL_OK:
            self.__logger.error(
                "runtime error in validation suite, error level %d" % ret_val)

        self.__logger.info("Test duration(hh:mm:ss): " +
                           strftime('%H:%M:%S', gmtime(time() - tstart)))

        self.__logger.info("Logging statistics: " + ", ".join([
            "%s: %d" % (k, v)
            for k, v in self.__logger.get_statistics().items() if v > 0
        ]))

        print('val run ended with result', ret_val)
        return ret_val
Esempio n. 6
0
def merge_recfile_sequences(lbl_db_conn,
                            recfile_name,
                            project_name=None,
                            function=None,
                            department=None):
    """
    **read and merge all label sequences from label Db for given recfile**

    filtered by project, function and department.

    During labelling process the original label sequences will be completely or partly overwritten
    or extended by additional sections::

        recording:  +------------------------------------------------------+
        1st order:      +--S1-----+       +--------S2-------+  +--S3--+
        revision1:           +-S1.1--+       +--S2.1--+     +--+S3.1
        merged   :      +------------+    +---------------------------+

    This method returns the merged list of all combined sections stored in LabelDb without overlap,
    so the number or returned sections will be smaller than returned by `GetMeasurementSequences`
    directly from LabelDb which provides the unchanged list of sections for a recfile.

    **Attention**: the time stamps in the ordered sections does not have to be a
    valid time stamp of one recording frame, it might be a time value between two adjacent frames!
    *Use the returned values as lower or upper border when filtering recording frames.*

    **Example:**

    .. python::
        recfile_name = 'Continuous_2014.04.15_at_08.24.17.rec'  # or Port "CurrentFile", or ...

        bpl_list_entry = dmt.lbl.merge_recfile_sequences(lbl_db_conn, recfile_name, "MFC300", "sr", "eva")

        # get simple list of start/end tuples:
        sect_list = [(s.start_ts, s.end_ts) for s in bpl_list_entry]

    See `merge_bpl_sections` example how to get sections if no LabelDb connection is available.

    :param lbl_db_conn:  connection to label db
    :type lbl_db_conn:   instance of `DBConnect`
    :param recfile_name: name of recording, leading path will be removed to check Label Db entry
    :type recfile_name:  string
    :param project_name: Project name to filter label sequences
    :type project_name:  string
    :param function:     Function name to filter label sequences
    :type function:      string
    :param department:   department (process) name to filter sequences
    :type department:    string, currently used ['dev'|'eva']

    :returns: bpl list entry with according section list giving start_ts and end_ts of the labelled sections,
              all sections will have absolute time stamps (BplListEntry.rel = False)
    :rtype:  `BplListEntry`

    :author: Joachim Hospes
    :date:   17.07.2014
    """
    logger = Logger(__name__)
    # get labeled sections from db and merge them
    sections, _ = lbl_db_conn.get_measurement_sequences(
        recfile_name, project_name, function, department)
    if not sections:
        logger.warning(
            'no sections stored in LabelDb for %s filtered by %s, %s and %s' %
            (recfile_name, project_name, function, department))
    sections.sort()
    logger.debug('LabelDb returned for %s: %s' % (recfile_name, sections))
    i = 0
    while i < len(sections) - 1:
        while i < len(sections) - 1 and sections[i][1] >= sections[i + 1][0]:
            sections[i] = (sections[i][0],
                           max(sections[i][1], sections[i + 1][1]))
            sections.pop(i + 1)
        i += 1

    bpl_list_entry = BplListEntry(recfile_name)
    for sect in sections:
        bpl_list_entry.append(sect[0], sect[1], False)

    return bpl_list_entry