Ejemplo n.º 1
0
    def __init__(self, sig_read, add_obj_mapping_rule,
                 add_obj_port_and_signal_names, add_obj_list_size,
                 add_obj_prefix):
        """init

        :param sig_read: former binary signal reader, not needed anymore
        :type  sig_read: None
        :param add_obj_mapping_rule: AOJ_mapping signal name
        :type  add_obj_mapping_rule: string
        :param add_obj_port_and_signal_names: AOJ_signals
        :type  add_obj_port_and_signal_names: list of strings
        :param add_obj_list_size: AOJ_list_size
        :type  add_obj_list_size: integer
        :param add_obj_prefix: AOJ_prefix for signal and mapping name
        :type  add_obj_prefix: string
        """
        if (add_obj_mapping_rule is None
                or add_obj_port_and_signal_names is None
                or add_obj_list_size is None):
            raise ValueError(AdditionalObjectList.AOJ_INIT_ERROR)
        self.__sig_read = sig_read
        self.__add_obj_port_and_signal_names = add_obj_port_and_signal_names
        self.__add_obj_mapping_rule = add_obj_mapping_rule
        self.__add_obj_list_size = add_obj_list_size
        if add_obj_prefix is None:
            self.__add_obj_prefix = ""
        else:
            self.__add_obj_prefix = add_obj_prefix.strip()
        # my object list
        self.__my_object_index = None
        self.__mapping_signal = None
        # all obj cache
        self.__object_list = {}
        self.__log = Logger(self.__class__.__name__)
Ejemplo n.º 2
0
    def __init__(self):
        self.__report_level = None
        self.__report_type = None
        self.__testrun_id = None
        self.__reftest_id = None
        self.__outfile = None
        self.__db_gbl = None
        self.__db_val = None
        self.__db_cat = None
        self.__dbfile = None
        self.__dbtech = None
        self.__masterdbdrv = None
        self.__masterdbdsn = None
        self.__masterdbdbq = None
        self.__masterdbuser = None
        self.__masterdbpassword = None
        self.__masterdbschemaprefix = None
        self.__db_connector = None
        self.__logger = Logger(self.__class__.__name__, level=MODE)
        self.excel_header = []

        # addon for testing this script:
        self.__val_obs_name = "UNIT_TEST_DEMO_TYPE"
        self.__coll_id = 0
        self.__coll_name = 'TestResultsAPIDemo'
        self.__meas_ids = []
    def __init__(self,
                 data_source,
                 meas_id,
                 sensor,
                 list_name,
                 object_filter_if,
                 signal_names=None,
                 generic_objects=None):
        """
        :param data_source: = (stk.db.connect.DBConnect()).Connect(stk.db.obj.objdata)
        :param meas_id: measurement identifier
        :type meas_id: integer
        :param sensor: name of the sensor
        :param list_name: name of the list
        :param object_filter_if: ObjectFilterIf, e.g. ObjectByGateFilter
        :param signal_names: list of names of signals to be loaded, default is LABEL_OBJECT_SIGNAL_NAMES
        :param generic_objects:
        """
        if signal_names is None:
            signal_names = LABEL_OBJECT_SIGNAL_NAMES
        BaseObjectList.__init__(self, data_source, sensor, list_name,
                                object_filter_if, signal_names)

        self.__meas_id = meas_id
        if generic_objects is not None:
            self.__fill_objects_from_generic_object_list(
                data_source, generic_objects)
        self._log = Logger(self.__class__.__name__)
Ejemplo n.º 4
0
    def __init__(self,
                 data_source,
                 sensor,
                 list_name,
                 object_filter_if,
                 bus="Bus#1",
                 signal_names=None,
                 objects=None):
        """
        :param data_source: data_manager initialized with binary data.
                            must have e.g. GetDataPort("objects" , "Bus#1")
        :param sensor: name of the sensor
        :param list_name: name of the list
        :param object_filter_if: ObjectFilterIf, e.g. ObjectByGateFilter
        :param bus: bus pertaining to DataManager GetDataPort
        :param signal_names: list of names of signals to be loaded,
                             default is GENERIC_OBJECT_SIGNAL_NAMES
        """
        if signal_names is None:
            signal_names = GENERIC_OBJECT_SIGNAL_NAMES

        BaseObjectList.__init__(self, data_source, sensor, list_name,
                                object_filter_if, signal_names)

        self._logger = Logger(self.__class__.__name__, level=INFO)

        if objects is None:
            self._objects = []
        else:
            self._objects = objects

        self.__bus = bus
    def __init__(self, default=None):
        """datamanager

        :param default: value to return when bus / port doesn't exist (via get_data_port)
        """
        self._logger = Logger(self.__class__.__name__)
        DictWatch.__init__(self)
        self._default = default
Ejemplo n.º 6
0
    def __init__(self, obs_type):
        """Constructor for Assessment class

        :param obs_type: Name of the observer type
        """
        self.__states = []
        self.__type = obs_type
        self.__type_id = None
        self._logger = Logger(self.__class__.__name__)
        self.__default_stateid = None
    def __init__(self, plugin_dir, fail_on_error=False):
        """init essencials

        :param plugin_dir: path or list of paths where to start search for observers
        :type plugin_dir:  string or list of strings

        :param fail_on_error: flag to break immediately if an exception is found
        :type fail_on_error:  boolean
        """
        self._logger = Logger(self.__class__.__name__)
        self._logger.debug()

        self._component_list = []

        self._version = "$Revision: 1.11 $"

        self._progressbar = None
        self._file_count = 0
        self._object_map_list = []
        self._config_file_loaded = False
        self._fail_on_error = fail_on_error
        self._configfiles = []  # used as stack to load configs recursively
        self._config_file_versions = {}

        self._uncrepl = UncRepl()

        plugin_dir.extend([
            self._uncrepl(dir_) for dir_ in OBS_DIRS if dir_ not in plugin_dir
        ])

        self._logger.info("Searching for plug-ins. Please wait...")
        class_map_list, self._plugin_error_list = find_class(
            bci, plugin_dir, with_error_list=True)
        if class_map_list is None:
            self._logger.error("No plug-ins found.")
            return

        self._logger.debug("%d plug-ins found: %s." %
                           (len(class_map_list), ", ".join(
                               [i['name'] for i in class_map_list])))
        self._plugin_map = {
            plugin['name']: plugin["type"]
            for plugin in class_map_list
        }

        # Create data manager object
        try:
            self._data_manager = DataManager()
        except:
            self._logger.exception("Couldn't instantiate 'DataManager' class.")
            if self._fail_on_error:
                raise
            sexit(bci.RET_VAL_ERROR)
Ejemplo n.º 8
0
    def __init__(self, node, jobid):
        """ initialize the incident

        :param jobid: JobId of the HPC job run for the TestRun
        :type jobid:  integer
        """
        self.__node = node
        self.__jobid = jobid
        self.__error_count = 0
        self.__exception_count = 0
        self.__crash_count = 0
        self.__incidents = []
        self._log = Logger(self.__class__.__name__)
Ejemplo n.º 9
0
    def __init__(self, string):
        """Init instance of MtsCfgSection

        :param string: MTS config-like multi-line string containing the MO definition
        :type string: str|unicode
        """
        super(MtsCfgSection, self).__init__()

        self._params = OrderedDict()
        self._tag = None
        self._logger = Logger(self.__class__.__name__)

        self._parse(string)
Ejemplo n.º 10
0
 def __init__(self):
     """ Initialize the workflow class
     """
     #  List of Workflow States
     self.__workflows = []
     self.__workflow_list = [
         ValAssessmentWorkFlows.ASS_WF_AUTO,
         ValAssessmentWorkFlows.ASS_WF_MANUAL,
         ValAssessmentWorkFlows.ASS_WF_REVIEWED,
         ValAssessmentWorkFlows.ASS_WF_REJECTED
     ]
     #  Observer Type
     self.__type = type
     self._logger = Logger(self.__class__.__name__)
    def __init__(self):

        self.__dbfile = None
        self.__masterdbdsn = None
        self.__masterdbdbq = None
        self.__masterdbuser = None
        self.__masterdbpassword = None
        self.__masterdbschemaprefix = None
        self.__db_connector = None
        self.__masterdbdrv = None
        self.__trname = None
        self.__checkpoint = None
        self.__projname = None
        self.__limit = 10
        self.__trids = []
        self.__logger = Logger(self.__class__.__name__, level=MODE)
        self._dbgbl = None
        self._dbval = None
    def __init__(self, folder_path_list, cls):
        """
        initialise a new object, adds existing folders of folder_path_list to sys.path

        :param folder_path_list: list [] of folders to check recursively
        :param cls: base class of which to find subclasses
        """
        self._uncrepl = UncRepl()
        self.__folder_path_list = [
            self._uncrepl(fpl) for fpl in folder_path_list
        ]
        self.__cls = cls

        self.__logger = Logger(self.__class__.__name__)

        self.__folder_path_list = folder_path_list
        for folder_path in self.__folder_path_list:
            if folder_path not in sys.path:
                sys.path.append(folder_path)
Ejemplo n.º 13
0
    def __init__(self,
                 data_manager,
                 bus_name,
                 sig_names=None,
                 distx_sig_name=None,
                 disty_sig_name=None,
                 velx_sig_name=None):
        """
        :param data_manager: data_manager
        :param bus_name: bus_name
        :param sig_names: names of the signals to be extracted, default: [OBJ_DISTX, OBJ_DISTY, OBJ_VELX]
        :param distx_sig_name: distx_sig_name, default: OBJ_DISTX
        :param disty_sig_name: disty_sig_name, default: OBJ_DISTX
        :param velx_sig_name: velx_sig_name, default: OBJ_VELX
        """
        self.objects = []
        self.data_manager = data_manager
        self.bus_name = bus_name
        self.logger = Logger(self.__class__.__name__)

        if sig_names is None:
            self.sig_names = [OBJ_DISTX, OBJ_DISTY, OBJ_VELX]
        else:
            self.sig_names = sig_names
        if distx_sig_name is None:
            self.distx_sig_name = OBJ_DISTX
        else:
            self.distx_sig_name = distx_sig_name
        if disty_sig_name is None:
            self.disty_sig_name = OBJ_DISTY
        else:
            self.disty_sig_name = disty_sig_name
        if velx_sig_name is None:
            self.velx_sig_name = OBJ_VELX
        else:
            self.velx_sig_name = velx_sig_name

        self.disty_sig_name = disty_sig_name
        self.velx_sig_name = velx_sig_name
        self.load()
Ejemplo n.º 14
0
    def __init__(self,
                 global_obj_id,
                 obj_id,
                 startts,
                 stopts,
                 data_source,
                 bus,
                 signal_names=None,
                 signals=None,
                 ignore_error=False,
                 obj=None):
        """
        Constructor creating a rectangular object either from data_source
        or from signals if specified

        :param global_obj_id: global object id from bin file
        :param obj_id: object id from bin file
        :param startts: absolute start time stamp
        :type startts: long
        :param stopts: absolute stop time stamp
        :type stopts: long
        :param data_source: dictionary from the bin file containing data
        :type data_source: DataManager
        :param bus: bus pertaining to DataManager GetDataPort
        for one single obj, e.g. [{'Index': 2, 'VrelX': [-37.20 etc.
        :param signal_names: list of names of signals, default is GENERIC_OBJECT_SIGNAL_NAMES
        :param signals: if this is specified, signals are directly filled with it; data source is not used for filling
        :param obj: Raw object data as dict, as put on the bus by classic signal extractor
        """
        if signal_names is None:
            signal_names = GENERIC_OBJECT_SIGNAL_NAMES
        BaseAdasObject.__init__(self, global_obj_id, data_source, signal_names)
        self.__ignore_error = ignore_error
        self.__obj_id = obj_id
        self.__bus = bus
        self._logger = Logger(self.__class__.__name__)
        if signals is not None:
            self._signals = signals
        else:
            self.__fill_object_data(global_obj_id, startts, stopts, bus, obj)
Ejemplo n.º 15
0
    def __init__(self, plugin_folder_list=None, ev_filter=None):
        """class for loading events form database

        :param plugin_folder_list: list of Plugin folders i.e. location where event class definition are located.
                               If folders are not provided or definition were not found by plugin manager
                               then typed class will be generated runtime inherited from `ValBaseEvent`.
                               **Pass this argument only if you have defined additional method.**
        :type plugin_folder_list: list
        :param ev_filter: Instance of Event Filter
        :type ev_filter: `ValEventFilter`
        """
        self._log = Logger(self.__class__.__name__)

        if plugin_folder_list is not None:
            self.__plugin_folders = plugin_folder_list
        else:
            self.__plugin_folders = None  # EVENT_PLUGIN_FOLDER_LIST
        self.__plugin_manager = None
        self.__event_types_list = None
        self.__event_list = []
        self.__event_inst_created = []
        self.__filter = ev_filter
Ejemplo n.º 16
0
    def __init__(self, config_file):
        """
        read config and prepare update

        :param config_file: path/file name of config file
        :type  config_file: string
        """
        self.error_status = ERR_OK
        self.bpl_top_dir = dirname(config_file)
        self._logger = Logger('BplUpdate', INFO,
                              join(self.bpl_top_dir, 'bpl_update.log'))
        self._config = self._read_config(config_file)
        self.db_conn = None
        self.cat_db = None

        # setup db connection,
        # explicitly set default values for parameters that don't set None as default in DBconnect
        # unused for now: error_tolerance=ERROR_TOLERANCE_NONE, use_cx_oracle=False
        if self._config.get('connection') is None:
            self._logger.error(
                'No parameter "connection" in section "[db_connection]" of %s'
                % config_file)
            self.error_status = ERR_DB_CONNECTION_CONFIG
        else:
            try:
                connection = str(self._config.get('connection'))
                if connection.endswith('.sqlite'):
                    connection = join(self.bpl_top_dir, connection)
                self.cat_db = BaseRecCatalogDB(
                    connection)  # self.db_conn.Connect(cat)
            except Exception as err:
                self.error_status = ERR_DB_CONNECTION_CONFIG
                self._logger.error(
                    'can not setup db connection with configured settings: %s\n%s'
                    % (connection, err))
        # get all bpl files in the top dir and all sub dirs
        self.bpl_dict = self.get_bpl_files()
Ejemplo n.º 17
0
    def __init__(self, *args, **kwargs):
        """(init)

        :keyword user_id: User Id
        :keyword wf_state: Workflow State
        :keyword ass_state: Assessment State
        :keyword ass_comment: Assessment Comment
        :keyword issue: Issue name from MKS
        """
        opts = arg_trans([
            'user_id', 'wf_state', 'ass_state', 'ass_comment', 'date_time',
            'issue'
        ], *args, **kwargs)
        self.__user_id = opts[0]
        self.__wf_state = opts[1]
        self.__ass_state = opts[2]
        self.__ass_comment = opts[3]
        self.__date_time = opts[4]
        self.__issue = opts[5]
        self.__id = None
        self.__ass_states = None
        self.__ass_wf = None
        self.__user_account = None
        self._logger = Logger(self.__class__.__name__)
class PluginManager(object):
    """
    class to search for pluging classes based on 'BaseComponentInterface'
    to be used as observer components

    can check for dublicated class names to throw an error if it finds one
    """
    def __init__(self, folder_path_list, cls):
        """
        initialise a new object, adds existing folders of folder_path_list to sys.path

        :param folder_path_list: list [] of folders to check recursively
        :param cls: base class of which to find subclasses
        """
        self._uncrepl = UncRepl()
        self.__folder_path_list = [
            self._uncrepl(fpl) for fpl in folder_path_list
        ]
        self.__cls = cls

        self.__logger = Logger(self.__class__.__name__)

        self.__folder_path_list = folder_path_list
        for folder_path in self.__folder_path_list:
            if folder_path not in sys.path:
                sys.path.append(folder_path)

    def __get_plugin_list(self, module_name_list):
        """
        returns list with plugins

        :param module_name_list: list of modules to search in
        :return: list of plugin classes

        """
        plugin_list = []

        for module_name in module_name_list:
            self.__logger.debug("Checking: %s.py..." % module_name)
            try:
                # use relative or absolute (for all stk modules) import method
                if isinstance(module_name, (list, tuple)):
                    module = __import__(module_name[0], globals(), locals(),
                                        module_name[1], 0)
                else:
                    module = __import__(module_name)
            except Exception as msg:
                self.__logger.warning(
                    "Couldn't import module '%s' due to '%s'" %
                    (str(module_name), str(msg)))
                continue

            # look through this dictionary for classes
            # that are subclass of PluginInterface but are not PluginInterface itself
            module_candidates = list(module.__dict__.items())

            for class_name, entry in module_candidates:
                if class_name == self.__cls.__name__:
                    continue

                if entry is None:
                    continue

                if str(entry).find("PyQt4") > -1:
                    continue

                try:
                    if issubclass(entry, self.__cls):
                        self.__logger.debug(
                            "Found plugin.[Module: '%s', Class: '%s']." %
                            (module_name, class_name))
                        plugin_list.append({"type": entry, "name": class_name})
                except TypeError:
                    # this happens when a non-type is passed in to issubclass. We
                    # don't care as it can't be a subclass of PluginInterface if
                    # it isn't a type
                    continue

        if len(plugin_list) > 0:
            return plugin_list

        return None

    def get_plugin_class_list(self, remove_duplicates=False):
        """searches stk path to find classes

        :param remove_duplicates: wether duplicates should be removed
        :return: list of classes
        """
        module_name_list = []
        for folder_path in self.__folder_path_list:
            try:
                file_list = os.listdir(folder_path)
            except OSError:
                continue

            # For all modules within the stk use absolute module path to
            # avoid problems with dublicate package names
            lst = []
            stk_found = False
            path = folder_path
            module_path = ""
            while stk_found is False:
                head, tail = os.path.split(path)

                if tail == '':
                    if head != '':
                        lst.insert(0, head)
                    break
                else:
                    lst.insert(0, tail)
                    path = head
                    if tail == 'stk':
                        stk_found = True
                        for p_k in lst:
                            module_path += p_k + "."

            for file_name in file_list:
                if file_name.endswith(".py") and not file_name.startswith(
                        "__") and not file_name.startswith("stk"):
                    module_name = file_name.rsplit('.', 1)[0]
                    if module_path == "":
                        module_name_list.append(module_name)
                    else:
                        # add stk path to module name
                        module_name_list.append(
                            [module_path + module_name, module_name])

        plugin_list = self.__get_plugin_list(module_name_list)
        if len(plugin_list) > 0:
            check_duplicates = self.__check_for_duplicate_classes(plugin_list)
            if check_duplicates == -1 and remove_duplicates is True:
                plugin_list = self.__remove_duplicate_classes(plugin_list)
                return plugin_list
            elif check_duplicates == 0:
                return plugin_list

        return None

    def __check_for_duplicate_classes(self, plugin_list):
        """ Check if there are any duplicates in the class list and throw an error if found.
        @param plugin_list: A list of the plugins found.
        @return: 0 for success and -1 if duplicate is found.
        """
        num_modules = len(plugin_list)
        for idx, module_name in enumerate(plugin_list):
            for i in range(idx + 1, num_modules):
                if module_name["name"] == plugin_list[i]["name"]:
                    self.__logger.error("Duplicate class name found: %s" %
                                        (module_name["name"]))
                    return -1
        return 0

    @staticmethod
    def __remove_duplicate_classes(plugin_list):
        """removes duplicate classes form plugin list
        """
        temp_mem = []
        copy_plugin_list = []

        for idx, module_name in enumerate(plugin_list):
            if module_name['name'] not in temp_mem:
                copy_plugin_list.append(plugin_list[idx])
                temp_mem.append(module_name['name'])

        return copy_plugin_list
Ejemplo n.º 19
0
class GenObjList(object):
    """
    GenObjList is lightweight version of GenericObjectList for fast object matching

    E.g.::
        generic_object_list = GenObjList(data_manager, bus_name, sig_names=MY_BIN_SIGNALS)
        best_obj = generic_object_list.get_best_tracked_object(ref_obj)
    """
    def __init__(self,
                 data_manager,
                 bus_name,
                 sig_names=None,
                 distx_sig_name=None,
                 disty_sig_name=None,
                 velx_sig_name=None):
        """
        :param data_manager: data_manager
        :param bus_name: bus_name
        :param sig_names: names of the signals to be extracted, default: [OBJ_DISTX, OBJ_DISTY, OBJ_VELX]
        :param distx_sig_name: distx_sig_name, default: OBJ_DISTX
        :param disty_sig_name: disty_sig_name, default: OBJ_DISTX
        :param velx_sig_name: velx_sig_name, default: OBJ_VELX
        """
        self.objects = []
        self.data_manager = data_manager
        self.bus_name = bus_name
        self.logger = Logger(self.__class__.__name__)

        if sig_names is None:
            self.sig_names = [OBJ_DISTX, OBJ_DISTY, OBJ_VELX]
        else:
            self.sig_names = sig_names
        if distx_sig_name is None:
            self.distx_sig_name = OBJ_DISTX
        else:
            self.distx_sig_name = distx_sig_name
        if disty_sig_name is None:
            self.disty_sig_name = OBJ_DISTY
        else:
            self.disty_sig_name = disty_sig_name
        if velx_sig_name is None:
            self.velx_sig_name = OBJ_VELX
        else:
            self.velx_sig_name = velx_sig_name

        self.disty_sig_name = disty_sig_name
        self.velx_sig_name = velx_sig_name
        self.load()

    def load(self):
        """
        loads objects from signal extractor objects port
        """
        objects = self.data_manager.GetDataPort(OBJECT_PORT_NAME,
                                                self.bus_name)
        for idx, obj_dict in enumerate(objects):
            self.objects.append(
                GenObj(obj_dict[OBJ_OBJECT_ID], obj_dict[OBJ_GLOBAL_ID], idx,
                       obj_dict[OBJ_TIME_STAMPS][0],
                       obj_dict[OBJ_TIME_STAMPS][-1]))

    @staticmethod
    def get_overlap(ref_startts, ref_stopts, my_startts, my_stopts):
        """
        Gets the overlapping time interval between reference and candidate object

        :param ref_startts: ref_startts
        :param ref_stopts: ref_stopts
        :param my_startts: my_startts
        :param my_stopts: my_stopts
        """
        if my_startts <= ref_startts:
            if my_stopts >= ref_startts:
                startts = ref_startts
                stopts = min(my_stopts, ref_stopts)
                return startts, stopts
        else:
            if my_startts <= ref_stopts:
                startts = my_startts
                stopts = min(my_stopts, ref_stopts)
                return startts, stopts
        return None, None

    def get_best_tracked_object(self,
                                ref_obj,
                                min_nr_ts=50,
                                min_nr_lifetime_full_overlap=50,
                                max_norm=1.0,
                                get_lightweight_obj=False,
                                get_all_objects=False):
        """
        gets a GenericRectObject (,GenObj) with the best track based on best norm an min number of timestamps

        :param ref_obj: ref_oid from the object DB
        :param min_nr_ts: minimum number of overlapping time slots considered for matching
        :param min_nr_lifetime_full_overlap: objects having a full overlap during their whole lifetime are selected.
                                             this parameter limit the minimum required lifetime for this kind of selection
        :param max_norm: maximum norm (root mean square deviation of distance and velocity) considered for matching
        :param get_lightweight_obj: return also lightweight GenObj
        :param get_all_objects: returns all objects which fulfill minimum criteria
        :return: best obj as GenericRectObject/None or if get_lightweight_obj: GenericRectObject, GenObj or None, None
                 if get_all_objects: [(GenericRectObject1, GenObj1), (GenericRectObject2, GenObj2)]
        """
        # The typical accuracy of the sensor may be taken from the OD requirement specification:
        # doors://rbgs854a:40000/?version=2&prodID=0&view=00000001&urn=urn:telelogic::1-503e822e5ec3651e-O-352-000221c5
        std_err_x_off = 0.15
        std_err_y_off = 0.23
        std_err_y_prop_x = 0.0044
        std_err_v_off = 0.2

        ret_objects = []
        rts = ref_obj.get_signal(OBJ_DISTX).GetTimestamps()
        rdx = ref_obj.get_signal(OBJ_DISTX).GetValue()
        rdy = ref_obj.get_signal(OBJ_DISTY).GetValue()
        rvx = ref_obj.get_signal(OBJ_VELX).GetValue()
        ref_timestamp = np.fromiter(rts, np.float)
        ref_distx = np.fromiter(rdx, np.float)
        ref_disty = np.fromiter(rdy, np.float)
        ref_velx = np.fromiter(rvx, np.float)
        ref_startts = ref_obj.get_start_time()
        ref_stopts = ref_obj.get_end_time()

        # compute cycle time from the first 2 timestamps difference
        if len(rts) > 2:
            cycle_time = rts[1] - rts[0]
        else:
            cycle_time = 60000

        min_length_ts = cycle_time * min_nr_ts

        best_obj = None
        best_norm = None
        best_ol_startts = None
        best_ol_stopts = None
        # self.logger.debug("ref oid: " + str(ref_obj.get_id()))
        sig_length_error = False
        for co in self.objects:
            ol_starts, ol_stopts = self.get_overlap(ref_startts, ref_stopts,
                                                    co.startts, co.stopts)
            # Reduce the minimum overlapping time for objects which spent their whole life in the label
            min_time_in_label = max(cycle_time * min_nr_lifetime_full_overlap,
                                    (co.stopts - co.startts) - 1)
            # For other objects a minimum overlapping time is required
            min_time_in_label = min(min_time_in_label, min_length_ts)
            if ol_starts is not None and ol_stopts is not None and (
                    ol_stopts - ol_starts) > min_time_in_label:
                # determine start and stop indexes of reference and candidate objects
                cots, codx, cody, covx = co.get_ts_distx_disty_velx(
                    self.data_manager, self.bus_name)
                obj_timestamp = np.fromiter(cots, np.float)
                r_start_idx = np.where(ref_timestamp == ol_starts)[0]
                r_stop_idx = np.where(ref_timestamp == ol_stopts)[0]
                co_start_idx = np.where(obj_timestamp == ol_starts)[0]
                co_stop_idx = np.where(obj_timestamp == ol_stopts)[0]
                # if indexes were found:
                if r_start_idx.size != 0 and r_stop_idx.size != 0 and co_start_idx.size != 0 and co_stop_idx.size != 0:
                    r_start_idx = r_start_idx[0]
                    r_stop_idx = r_stop_idx[0]
                    co_start_idx = co_start_idx[0]
                    co_stop_idx = co_stop_idx[0]
                    sig_length_ref = r_stop_idx - r_start_idx + 1
                    sig_length_co = co_stop_idx - co_start_idx + 1
                    # if index lengths are the same:
                    if sig_length_ref == sig_length_co:
                        # candidate object signals
                        obj_timestamp = obj_timestamp[
                            co_start_idx:co_stop_idx + 1]
                        co_distx = np.fromiter(
                            codx, np.float)[co_start_idx:co_stop_idx + 1]
                        co_disty = np.fromiter(
                            cody, np.float)[co_start_idx:co_stop_idx + 1]
                        co_velx = np.fromiter(
                            covx, np.float)[co_start_idx:co_stop_idx + 1]
                        # reference object signals
                        r_distx = ref_distx[r_start_idx:r_stop_idx + 1]
                        r_disty = ref_disty[r_start_idx:r_stop_idx + 1]
                        r_velx = ref_velx[r_start_idx:r_stop_idx + 1]
                        if (len(co_distx) != len(r_distx)
                                or len(co_disty) != len(r_disty)
                                or len(co_velx) != len(r_velx)):
                            self.logger.error(
                                "signal length check failed for global oid: " +
                                str(co.global_oid))
                        else:
                            # if ref_obj.get_id() == 161443:
                            #    pass
                            # see formula definition in EM/OD Design specification
                            std_err_x = np.array([std_err_x_off] *
                                                 sig_length_ref)
                            std_err_y = np.array(
                                [std_err_y_off] *
                                sig_length_ref) + std_err_y_prop_x * co_distx
                            std_err_v = np.array([std_err_v_off] *
                                                 sig_length_ref)
                            norm = np.linalg.norm([
                                (co_distx - r_distx) / std_err_x,
                                (co_disty - r_disty) / std_err_y,
                                (co_velx - r_velx) / std_err_v
                            ])
                            norm_norm = norm / np.float(sig_length_ref)
                            is_norm_ok = norm_norm < max_norm
                            if get_all_objects:
                                # self.logger.debug("OK oid: " + str(co.oid) + " goid: " + str(co.global_oid) +
                                #                  " norm: " + str(norm_norm))
                                if is_norm_ok:
                                    # print "ref oid: " + str(ref_obj.get_id())
                                    # print "OK oid: " + str(co.oid) + " goid: " + str(co.global_oid)
                                    # print "norm: " + str(norm_norm)
                                    if get_lightweight_obj:
                                        ret_objects.append(
                                            (co.get(self.sig_names, ol_starts,
                                                    ol_stopts,
                                                    self.data_manager,
                                                    self.bus_name), co))
                                    else:
                                        ret_objects.append(
                                            co.get(self.sig_names, ol_starts,
                                                   ol_stopts,
                                                   self.data_manager,
                                                   self.bus_name))
                            else:
                                if (best_norm is None or
                                        norm_norm < best_norm) and is_norm_ok:
                                    best_norm = norm_norm
                                    best_obj = co
                                    best_ol_startts = ol_starts
                                    best_ol_stopts = ol_stopts
                    else:
                        # self.logger.debug("signal lengths are not equal, reference / candidate obj: " +
                        #                  str(sig_length_co) + '/' + str(sig_length_ref))
                        # self.logger.debug("ref timestamps: " +
                        #                  str(ref_timestamp[r_start_idx:r_stop_idx + 1].tolist()))
                        # self.logger.debug("obj timestamps: " +
                        #                  str(obj_timestamp[co_start_idx:co_stop_idx + 1].tolist()))
                        sig_length_error = True
                else:
                    # self.logger.debug( "no overlap" )
                    pass
        if sig_length_error:
            self.logger.error(
                "length of reference object signals were not equal to the measurement object signals"
                + " use DbObjectList:interpolate_to_time_system() to have" +
                " the same time stamps for the reference objects that the measurement has"
            )
        # return only the best
        if not get_all_objects:
            if best_obj is None:
                if get_lightweight_obj:
                    return None, None
                else:
                    return None
            else:
                if get_lightweight_obj:
                    return best_obj.get(self.sig_names, best_ol_startts,
                                        best_ol_stopts, self.data_manager,
                                        self.bus_name), best_obj
                else:
                    return best_obj.get(self.sig_names, best_ol_startts,
                                        best_ol_stopts, self.data_manager,
                                        self.bus_name)
        # return all
        else:
            return ret_objects
Ejemplo n.º 20
0
class ValEventList(object):
    """
    ValEventLoader Class - loads Event details from Database
    """
    def __init__(self, plugin_folder_list=None, ev_filter=None):
        """class for loading events form database

        :param plugin_folder_list: list of Plugin folders i.e. location where event class definition are located.
                               If folders are not provided or definition were not found by plugin manager
                               then typed class will be generated runtime inherited from `ValBaseEvent`.
                               **Pass this argument only if you have defined additional method.**
        :type plugin_folder_list: list
        :param ev_filter: Instance of Event Filter
        :type ev_filter: `ValEventFilter`
        """
        self._log = Logger(self.__class__.__name__)

        if plugin_folder_list is not None:
            self.__plugin_folders = plugin_folder_list
        else:
            self.__plugin_folders = None  # EVENT_PLUGIN_FOLDER_LIST
        self.__plugin_manager = None
        self.__event_types_list = None
        self.__event_list = []
        self.__event_inst_created = []
        self.__filter = ev_filter

    def __del__(self):
        """clean up
        """
        self.__event_list = []

    def _init_event_types(self, plugin_folders=None):
        """ Init the Plugin """
        new_plugin = False

        if plugin_folders is not None:
            new_plugin = True
            self.__plugin_folders = plugin_folders
        if self.__plugin_manager is None or new_plugin:
            if self.__plugin_folders is not None:
                self.__plugin_manager = PluginManager(self.__plugin_folders, ValBaseEvent)

        if self.__event_types_list is None and self.__plugin_folders is not None:
            self.__event_types_list = self.__plugin_manager.get_plugin_class_list(remove_duplicates=True)
        else:
            self.__event_types_list = []

    def Load(self, dbi_val, dbi_gbl, testrun_id, coll_id=None, meas_id=None,  # pylint: disable=C0103
             rd_id=None, obs_name=None, level=ValSaveLoadLevel.VAL_DB_LEVEL_BASIC,
             beginabsts=None, endabsts=None, asmt_state=None, filter_cond=None, plugin_folders=None, cons_key=None):
        """
        Load Events

        :param dbi_val: Validation Result Database interface
        :type dbi_val: `OracleValResDB` or `SQLite3ValResDB`
        :param dbi_gbl: Validation Global Database interface
        :type dbi_gbl: `OracleGblDB` or `SQLite3GblDB`
        :param testrun_id: Testrun Id as mandatory field
        :type testrun_id: Integer
        :param coll_id:  Not Used. It is useless to pass any values. This information is taken
                        from database using rd_id
        :type coll_id: Integer
        :param meas_id: Measurement Id load event only for specific recording
        :type meas_id: Integer
        :param rd_id: Result Descriptor Id as mandatory field
        :type rd_id: Integer or List
        :param obs_name: Not Used. It is useless to pass any values.
                        This information is taken from database with testrun_id
        :type obs_name: String
        :param level: Load Level to specify to which level the event data should be level
                      with following possibilities::

                        VAL_DB_LEVEL_STRUCT = Events
                        VAL_DB_LEVEL_BASIC = Events + Assessment
                        VAL_DB_LEVEL_INFO = Events + Assessment + Attribute
                        VAL_DB_LEVEL_ALL = Events + Assessment + Attribute + Image

        :type level: `ValSaveLoadLevel`
        :param beginabsts: Basic filter. Begin Absolute Time stamp i.e. Start of the events
        :type beginabsts: Integer
        :param endabsts: End Absolute Time stamp i.e. End of the events
        :type endabsts: Integer
        :param asmt_state: Assessment State
        :type asmt_state: String
        :param filter_cond: Advance filter feature which can filter events based on event attributes;
                            filter map name specified in XML config file of custom filters.
                            Please read documentation of `ValEventFilter` for more detail
        :param plugin_folders: The value passed in constructor overrules. It is useless to pass value
        :type plugin_folders: list
        :param cons_key: Constrain Key. Not used
        :type cons_key: NoneType
        """
        _ = coll_id
        _ = obs_name
        _ = asmt_state
        _ = plugin_folders
        _ = cons_key

        inc_asmt = False
        inc_attrib = False
        inc_images = False
        self.__event_list = []
        self.__event_inst_created = []
        unit_map = {}

        statement = None
        if filter_cond is not None:
            if self.__filter is not None:
                statement = self.__filter.Load(dbi_val, filtermap_name=filter_cond)
                if statement is None:
                    self._log.error("The map filter was invalid. Events will be loaded without filter")
                elif type(statement) is list:
                    self._log.debug("The map filter was found. Events will be loaded with filter")

        if rd_id is not None:
            rd_list = dbi_val.get_resuls_descriptor_child_list(rd_id)
            if len(rd_list) == 0:
                rd_list = [rd_id]
        else:
            return True

        if level & ValSaveLoadLevel.VAL_DB_LEVEL_2:
            inc_asmt = True

        if level & ValSaveLoadLevel.VAL_DB_LEVEL_3:
            inc_attrib = True
            unit_records = dbi_gbl.get_unit()
            for unit_entry in unit_records:
                unit_map[str(unit_entry[COL_NAME_UNIT_ID])] = unit_entry[COL_NAME_UNIT_NAME]

        if level & ValSaveLoadLevel.VAL_DB_LEVEL_4:
            inc_images = True

        records, image_attribs = dbi_val.get_event_for_testrun(testrun_id, measid=meas_id, beginabsts=beginabsts,
                                                               endabsts=endabsts, rdid=rd_list, cond=None,
                                                               filt_stat=statement,
                                                               inc_asmt=inc_asmt, inc_attrib=inc_attrib,
                                                               inc_images=inc_images)
        col_list = records[0]
        records = records[1]
        self.__event_inst_created = {}
        self._init_event_types()

        while True:
            if len(records) <= 10000:
                self._prepare_events(dbi_val, records, col_list, image_attribs, unit_map,
                                     inc_asmt=inc_asmt, inc_attrib=inc_attrib, inc_images=inc_images)
                records = []
                break
            else:
                self._prepare_events(dbi_val, records[:10000], col_list, image_attribs, unit_map,
                                     inc_asmt=inc_asmt, inc_attrib=inc_attrib, inc_images=inc_images)
                del records[:10000]

        self.__event_inst_created = {}
        return True

    def _prepare_events(self, dbi_val, records, col_list, image_attribs, unit_map,
                        inc_asmt=True, inc_attrib=True, inc_images=True):
        """
        Prepare Event Object list by taking chunks for records from database

        :param dbi_val: DB interface to Validation Database
        :type dbi_val: OracleValResDB or  SQLite3ValResDB
        :param records: List of records as list of dict
        :type records: list
        :param col_list: Column List in records
        :type col_list: list
        :param image_attribs: Event Image attribute Id
        :type image_attribs: list
        :param unit_map: Unit map of Unit Id VS Unit Name
        :type unit_map: Dict
        :param inc_asmt: Flag to include Assessment in Event. Default True
        :type inc_asmt: Bool
        :param inc_attrib: Flag to include Event Attributes. Default True
        :type inc_attrib: Bool
        :param inc_images: Flag to include Event Attribute Images. Default True
        :type inc_images: Bool
        """
        event = ValBaseEvent()  # fix pylint problem, event will be set properly later
        if len(records) > 0:
            seid_eventlistmap = self.__event_inst_created
            sed_idx = col_list.index(COL_NAME_EVENTS_SEID)
            cls_name_idx = col_list.index(COL_NAME_EVENTS_VIEW_CLASSNAME)
            begin_idx = col_list.index(COL_NAME_EVENTS_VIEW_BEGINABSTS)
            start_idx = col_list.index(COL_NAME_EVENTS_VIEW_START_IDX)
            end_idx = col_list.index(COL_NAME_EVENTS_VIEW_ENDABSTS)
            stop_idx = col_list.index(COL_NAME_EVENTS_VIEW_STOP_IDX)
            measid_idx = col_list.index(COL_NAME_EVENTS_MEASID)

            if inc_asmt:
                usr_idx = col_list.index(COL_NAME_ASS_USER_ID)
                wf_idx = col_list.index("WF" + COL_NAME_WORKFLOW_NAME)
                asmtst_idx = col_list.index("ST" + COL_NAME_ASSESSMENT_STATE_NAME)
                comm_idx = col_list.index(COL_NAME_ASS_COMMENT)
                asmt_date_idx = col_list.index(COL_NAME_ASS_DATE)
                issue_idx = col_list.index(COL_NAME_ASS_TRACKING_ID)
                resassid_idx = col_list.index(COL_NAME_EVENTS_RESASSID)
            if inc_attrib:
                unitid_idx = col_list.index(COL_NAME_EVENT_ATTR_TYPES_UNITID)
                arribid_idx = col_list.index(COL_NAME_EVENT_ATTR_ATTRID)
                atrtypeid_idx = col_list.index(COL_NAME_EVENT_ATTR_TYPES_NAME)
                value_idx = col_list.index(COL_NAME_EVENT_ATTR_VALUE)

        for record in records:
            if str(int(record[sed_idx])) not in seid_eventlistmap:

                cls = None
                for etype in self.__event_types_list:
                    if etype['name'] == record[cls_name_idx]:
                        cls = etype['type']
                        break

                if cls is None:
                    e_type = type(record[cls_name_idx], (ValBaseEvent,), {})
                    event = e_type(start_time=record[begin_idx], start_index=record[start_idx],
                                   stop_time=record[end_idx], stop_index=record[stop_idx], seid=record[sed_idx])
                else:
                    event = object.__new__(cls)
                    event.__init__(start_time=record[begin_idx], start_index=record[start_idx],
                                   stop_time=record[end_idx], stop_index=record[stop_idx], seid=record[sed_idx])

                event.SetMeasId(record[measid_idx])

                if inc_asmt:
                    asmt = ValAssessment(user_id=record[usr_idx], wf_state=record[wf_idx],
                                         ass_state=record[asmtst_idx], ass_comment=record[comm_idx],
                                         date_time=record[asmt_date_idx], issue=record[issue_idx])
                    asmt.ass_id = record[resassid_idx]
                    event.AddAssessment(asmt)

                self.__event_list.append(event)
                seid_eventlistmap[str(int(record[sed_idx]))] = len(self.__event_list) - 1

            else:
                event = self.__event_list[seid_eventlistmap[str(int(record[sed_idx]))]]

            if inc_attrib:
                if record[unitid_idx] is not None:
                    unit = unit_map[str(record[unitid_idx])]
                else:
                    unit = str(record[unitid_idx])

                if inc_images and record[arribid_idx] in image_attribs:
                    image = dbi_val.get_event_image(record[arribid_idx])[COL_NAME_EVENT_IMG_IMAGE]
                else:
                    image = None
                event.AddAttribute(record[atrtypeid_idx], value=record[value_idx], unit=unit, image=image)

    def Save(self, dbi_val, dbi_gbl, testrun_id, coll_id, obs_name=None, parent_id=None,  # pylint: disable=C0103
             level=ValSaveLoadLevel.VAL_DB_LEVEL_BASIC, cons_key=None):
        """
        Save Events

        :param dbi_val: Validation Result Database interface
        :type dbi_val: `OracleValResDB` or `SQLite3ValResDB`
        :param dbi_gbl: Validation Global Database interface
        :type dbi_gbl: `OracleGblDB` or `SQLite3GblDB`
        :param testrun_id: Testrun Id
        :type testrun_id: Integer
        :param coll_id: Collection ID
        :type coll_id: Integer
        :param obs_name: Observer Name registered in Global Database
        :type obs_name: String
        :param parent_id: Parent Result Descriptor Id
        :type parent_id: Integer
        :param level: Save level::

                            - VAL_DB_LEVEL_STRUCT: Result Descriptor only,
                            - VAL_DB_LEVEL_BASIC: Result Descriptor and result,
                            - VAL_DB_LEVEL_INFO: Result Descriptor, Result and Assessment
                            - VAL_DB_LEVEL_ALL: Result with images and all messages

        :param cons_key: constraint key -- for future use
        :type cons_key: NoneType
        """
        res = False

        if dbi_val.get_testrun_lock(tr_id=testrun_id) == 1:
            self._log.error("No Event is saved due to locked testrun ")
            return res
        for evt in self.__event_list:
            try:
                res = evt.Save(dbi_val, dbi_gbl, testrun_id, coll_id, evt.GetMeasId(),
                               obs_name, parent_id, level, cons_key)
            except ValEventError, ex:
                self._log.warning("Events %s could not be stored. See details: %s " % (str(evt), ex))
                res = False

            if res is False:
                break

        if res is True:
            pass
            # dbi_val.commit()
            # dbi_gbl.commit()

        return res
class DbTestRunDelete(object):
    """
        Db TestRun Data Delete Utility Class
    """
    def __init__(self):

        self.__dbfile = None
        self.__masterdbdsn = None
        self.__masterdbdbq = None
        self.__masterdbuser = None
        self.__masterdbpassword = None
        self.__masterdbschemaprefix = None
        self.__db_connector = None
        self.__masterdbdrv = None
        self.__trname = None
        self.__checkpoint = None
        self.__projname = None
        self.__limit = 10
        self.__trids = []
        self.__logger = Logger(self.__class__.__name__, level=MODE)
        self._dbgbl = None
        self._dbval = None

    def __initialize(self, line=None):
        """
        Initialize Export process with Establishing connection and parsing argument
        """
        self.__parse_arguements(line)

        if self.__dbfile is None:
            self._dbval = BaseValResDB(
                "uid=%s;pwd=%s" %
                (self.__masterdbuser, self.__masterdbpassword),
                table_prefix="%s." % (self.__masterdbuser),
                error_tolerance=ERROR_TOLERANCE_NONE)
            self._dbgbl = BaseGblDB(
                "uid=%s;pwd=%s" %
                (self.__masterdbuser, self.__masterdbpassword),
                table_prefix="%s." % (self.__masterdbuser),
                error_tolerance=ERROR_TOLERANCE_NONE)
        else:
            self._dbval = BaseValResDB(self.__dbfile,
                                       error_tolerance=ERROR_TOLERANCE_NONE)
            self._dbgbl = BaseGblDB(self.__dbfile,
                                    table_prefix="%s." % (self.__masterdbuser),
                                    error_tolerance=ERROR_TOLERANCE_NONE)

    def __terminate(self):
        """
        Terminating method with closing database connections
        """
        self._dbval.close()
        self._dbgbl.close()

    def __parse_arguements(self, line=None):
        """
        Parsing commandline arguements
        """
        optparser = OptionParser(usage="usage: %prog [options] command")
        optparser.add_option("-f",
                             "--dbfile",
                             dest="dbfile",
                             help="The name of the Sqlite database file. ")
        optparser.add_option("-b",
                             "--master-db-dsn",
                             dest="masterdbdsn",
                             help="The name of the DSN.")
        optparser.add_option("-q",
                             "--master-db-dbq",
                             dest="masterdbdbq",
                             help="The name of the DBQ.")
        optparser.add_option("-u",
                             "--master-db-user",
                             dest="masterdbuser",
                             help="The name of the oracle database user.")
        optparser.add_option("-p",
                             "--master-db-password",
                             dest="masterdbpassword",
                             help="The name of the oracle database password.")
        optparser.add_option(
            "-c",
            "--master-db-schema-prefix",
            dest="masterdbschemaprefix",
            help="The name of the oracle database schema prefix.")
        optparser.add_option(
            "-l",
            "--limit",
            dest="limit",
            help=
            "MAX no. of parent testrun deleted e.g. default:10, -1 all deleted testrun"
        )
        optparser.add_option("-t",
                             "--trname",
                             dest="trname",
                             help="Testrun to import export")
        optparser.add_option("-v",
                             "--checkpoint",
                             dest="checkpoint",
                             help="Checkpoint")
        optparser.add_option("-n",
                             "--prname",
                             dest="prname",
                             help="Project Name e.g. ARS400_PR")

        if line is not None:
            cmd_options = optparser.parse_args(line.split())
        else:
            cmd_options = optparser.parse_args()

        self.__dbfile = cmd_options[0].dbfile
        self.__masterdbdsn = cmd_options[0].masterdbdsn
        self.__masterdbdbq = cmd_options[0].masterdbdbq
        self.__masterdbuser = cmd_options[0].masterdbuser
        self.__masterdbpassword = cmd_options[0].masterdbpassword
        self.__masterdbschemaprefix = cmd_options[0].masterdbschemaprefix
        if cmd_options[0].limit is not None:
            self.__limit = int(cmd_options[0].limit)
        self.__trname = cmd_options[0].trname
        self.__checkpoint = cmd_options[0].checkpoint
        self.__projname = cmd_options[0].prname

    def delete_test_run_data(self, line=None):
        """
        Main function of DB Delete Testrun
        """
        start_date = datetime.now()
        self.__logger.info("Starting TestRun Delete at %s" %
                           start_date.strftime("%d-%m-%Y %H:%M:%S"))
        self.__initialize(line)
        if self.__projname is not None:
            pid = self._dbgbl.GetProjectId(self.__projname.upper())
        else:
            pid = None
        self.__trids = self._dbval.get_deleted_testrun_ids(
            name=self.__trname,
            checkpoint=self.__checkpoint,
            pid=pid,
            limit=self.__limit,
            distinct=False)
        for trid in self.__trids:
            self._dbval.delete_testrun(tr_id=trid)

        for trid in reversed(self.__trids):
            tr_rec = self._dbval.get_testrun(tr_id=trid)
            if len(tr_rec) > 0:
                self.__logger.error(
                    "Testrun with Id = %d delete attempt failed" % trid)
                self.__logger.error(
                    "Delete operation Aborted with no Commit Changes in Database"
                )
                raise StandardError("Operation Aborted")

        end_date = datetime.now()
        duration = end_date - start_date
        self._dbval.commit()
        print str(tuple(self.__trids))
        self.__logger.info("Delete Finshed with Total Duration = %s " %
                           str(duration))
        self.__logger.info("Total Testrun deleted = %s " %
                           str(len(self.__trids)))
        print "exit"
Ejemplo n.º 22
0
class ValAssessmentWorkFlows(object):
    """ Base class for assessments workflows
    """
    ASS_WF_AUTO = "automatic"
    ASS_WF_MANUAL = "manual"
    ASS_WF_REVIEWED = "verified"
    ASS_WF_REJECTED = "rejected"

    def __init__(self):
        """ Initialize the workflow class
        """
        #  List of Workflow States
        self.__workflows = []
        self.__workflow_list = [
            ValAssessmentWorkFlows.ASS_WF_AUTO,
            ValAssessmentWorkFlows.ASS_WF_MANUAL,
            ValAssessmentWorkFlows.ASS_WF_REVIEWED,
            ValAssessmentWorkFlows.ASS_WF_REJECTED
        ]
        #  Observer Type
        self.__type = type
        self._logger = Logger(self.__class__.__name__)

    def load(self, dbi_gbl):
        """ Load the assessment states

        :param dbi_gbl: global db interface
        :return: True on passed, False on Error
        """
        if not issubclass(dbi_gbl.__class__, db_gbl.BaseGblDB):
            self._logger.error("GBL Database interface undefined")
            return False

        for wf_name in self.__workflow_list:
            wflow = dbi_gbl.get_workflow(wf_name)
            if db_gbl.COL_NAME_WORKFLOW_WFID in wflow:
                self.__workflows.append(wflow)
        return True

    def get_states(self):
        """ Return the list of workflow states
        """
        state_list = []
        for state in self.__workflows:
            state.append(state[db_gbl.COL_NAME_WORKFLOW_NAME])

        return state_list

    def get_state_id(self, wf_name):
        """ Get Workflow State

        :param wf_name: name of workflow
        """
        for state in self.__workflows:
            if wf_name.lower() == state[db_gbl.COL_NAME_WORKFLOW_NAME].lower():
                return state[db_gbl.COL_NAME_WORKFLOW_WFID]

        return None

    def get_state_name(self, wf_id):
        """ Get Workflow State

        :param wf_id: id of workflow
        """
        for state in self.__workflows:
            if wf_id == state[db_gbl.COL_NAME_WORKFLOW_WFID]:
                return state[db_gbl.COL_NAME_WORKFLOW_NAME]

        return None

    @deprecated('load')
    def Load(self, dbi_gbl):  # pylint: disable=C0103
        """deprecated"""
        return self.load(dbi_gbl)

    @deprecated('get_states')
    def GetStates(self):  # pylint: disable=C0103
        """deprecated"""
        return self.get_states()

    @deprecated('get_state_id')
    def GetStateId(self, wf_name):  # pylint: disable=C0103
        """deprecated"""
        return self.get_state_id(wf_name)

    @deprecated('get_state_name')
    def GetStateName(self, wf_id):  # pylint: disable=C0103
        """deprecated"""
        return self.get_state_name(wf_id)
Ejemplo n.º 23
0
def copy_collection_data(**kwargs):
    """start copying from base collection by initializing DB access

    :keyword source: source database
    :keyword destination: destination database
    :keyword name: name of collection to copy recursivly
    :keyword purge: purge destination colleciton tables before starting to copy, default: False
    :keyword rectobj: also copy related rectangual objects, default: False
    :keyword scenarios: also copy related FCT scenarios, default: False
    :keyword labels: also copy related label information, default: False
    :keyword coll_label: label (checkpoint) of the collection, default: None
    """
    logger = DummyLogger(True) if kwargs.pop(
        "use_print", False) else Logger("collection copy:")
    try:
        with BaseDB(kwargs["destination"], autocommit=False, journal=False) as ddb, \
                BaseDB(kwargs["source"], autocommit=True) as sdb:
            if kwargs.get("purge"):
                ddb.execute("DELETE FROM CAT_COLLECTIONMAP")
                ddb.execute("DELETE FROM CAT_SHAREDCOLLECTIONMAP")
                ddb.execute("DELETE FROM CAT_COLLECTIONS")
                ddb.commit()

            prios = TableDict(sdb, ddb, "GBL_PRIORITIES", pkey="PRID")
            users = UserTableDict(sdb,
                                  ddb,
                                  "GBL_USERS",
                                  pkey="USERID",
                                  dontcare=["COLL_ADMIN", "COLL_USER"])
            colls = TableDict(sdb,
                              ddb,
                              "CAT_COLLECTIONS",
                              pkey="COLLID",
                              PRID=prios,
                              USERID=users,
                              CP_LABEL=NoneDict(),
                              recurse="PARENTID",
                              dontcare=["CREATED"])
            recs = TableDict(sdb,
                             ddb,
                             "CAT_FILES",
                             pkey="MEASID",
                             dontcare=["DRIVERID", "IMPORTBY", "BASEPATH"])

            with Collection(sdb,
                            name=kwargs["name"],
                            label=kwargs.get("coll_label", None)) as cmgr:
                if ddb.db_type == -1:
                    print("copy to oracle not permitted by now!")
                    return -1

                try:
                    _copy_collection(cmgr,
                                     ddb,
                                     cmgr.parent,
                                     colls,
                                     recs=recs,
                                     start=True)
                except Exception as ex:
                    raise AdasDBError(
                        "target DB too old, please use latest SQLite version available\n{0}"
                        .format(ex))
                recs.fix()
                ddb.commit()
                logger.info("copied %d collections and recordings" % len(recs))

                def copy_simple(sdct, ddct, sql):
                    """copy simple"""
                    for i in sdct.keys():
                        cnt = 0
                        for k in sdb.executex(
                                sql, **{search(r":(\w+)\b", sql).group(1): i}):
                            ddct[k[0]]
                            cnt += 1
                        if cnt > 0:
                            ddb.commit()

                if kwargs.get("scenarios", False) or kwargs.get(
                        "genlabels", False):
                    wflow = TableDict(sdb, ddb, "GBL_WORKFLOW", pkey="WFID")
                if kwargs.get("rectobj", False) or kwargs.get(
                        "scenarios", False):
                    assoc = TableDict(sdb, ddb, "OBJ_ASSOCIATIONTYPES")
                    clstp = TableDict(sdb, ddb, "OBJ_CLASSTYPES")
                    label = TableDict(sdb, ddb, "OBJ_LBLSTATE")
                    rectobj = {
                        "MEASID": recs,
                        "ASSOCTYPEID": assoc,
                        "CLSID": clstp,
                        "CLSLBLSTATEID": label,
                        "DIMLBLSTATEID": label,
                        "KINLBLSTATEID": label,
                        "ZOLBLSTATEID": label,
                        "ZOLBLBY": users,
                        "DIMLBLBY": users,
                        "CLSLBLBY": users,
                        "KINLBLBY": users
                    }
                    objs = TableDict(sdb, ddb, "OBJ_RECTANGULAROBJECT",
                                     **rectobj)
                    copy_simple(
                        recs, objs,
                        "SELECT RECTOBJID FROM OBJ_RECTANGULAROBJECT WHERE MEASID = :meas"
                    )
                    logger.info("copied %d rectangular objects" % len(objs))

                if kwargs.get("scenarios", False):
                    project = TableDict(sdb,
                                        ddb,
                                        "GBL_PROJECT",
                                        pkey="PID",
                                        dontcare=["SOFTQUOTA", "HARDQUOTA"])
                    fenv = TableDict(sdb, ddb, "FCT_ENVIRONMENT")
                    fcrit = TableDict(sdb, ddb, "FCT_CRITICALITY")

                    fscens = {
                        "MEASID": recs,
                        "ENV_INFRASTRUCTURE": fenv,
                        "ENV_LIGHT_CONDITION": fenv,
                        "ENV_WEATHER_CONDITION": fenv,
                        "ENV_DATAINTEGRITY": fenv,
                        "LABELER_CRITICALITY": fcrit,
                        "VEHICLE_CRITICALITY": fcrit,
                        "DRIVER_CRITICALITY": fcrit,
                        "LBLSTATEID": wflow,
                        "PID": project,
                        "RECTOBJID": objs,
                        "EGO_BEHAVIOR": fenv,
                        "REL_EGO_BEHAVIOR": fenv,
                        "OBJ_DYNAMIC": fenv,
                        "OBJ_TYPE": fenv,
                        "OBJ_BEHAVIOR": fenv,
                        "LBLBY": users,
                        "EVASION_RIGHT": fenv,
                        "EVASION_LEFT": fenv
                    }
                    scens = TableDict(sdb, ddb, "FCT_SCENARIO", **fscens)
                    copy_simple(
                        recs, scens,
                        "SELECT SCENARIOID FROM FCT_SCENARIO WHERE MEASID = :meas"
                    )
                    logger.info("copied %d scenario objects" % len(scens))

                if kwargs.get("genlabels", False):
                    # attention: we're not taking care of lb_types.parent, as even inside ARS4xx DB, relation
                    # is invalid as of writing right now: 14.06.2016 (SMe)
                    ltypes = TableDict(sdb,
                                       ddb,
                                       "LB_TYPES",
                                       dontcare=["PARENT"])
                    lstates = TableDict(sdb, ddb, "LB_STATES", TYPEID=ltypes)
                    lbids = TableDict(sdb,
                                      ddb,
                                      "LB_LABELS",
                                      MEASID=recs,
                                      USERID=users,
                                      WFID=wflow,
                                      STATEID=lstates,
                                      TYPEID=ltypes)
                    copy_simple(
                        recs, lbids,
                        "SELECT LBID FROM LB_LABELS WHERE MEASID = :meas")

                    # now we need addtionalinfo as well, due to too bad designed table space :-(
                    for i, k in lbids.iteritems():
                        for desc in sdb.executex(
                                "SELECT DESCRIPTION FROM LB_ADDITIONALINFO WHERE LBID = :lid",
                                lid=i):
                            if len(desc) > 0:
                                ddb.execute(
                                    "INSERT INTO LB_ADDITIONALINFO (LBID, DESCRIPTION) VALUES (:lid, :dscr)",
                                    lid=k,
                                    dscr=desc[0][0])
                    # and on top we need the attributes...
                    valtypes = TableDict(sdb, ddb, "GBL_VALTYPES", pkey="VTID")
                    units = TableDict(sdb, ddb, "GBL_UNITS", pkey="UNITID")
                    attrname = TableDict(sdb, ddb, "LB_ATTRIBUTENAMES")
                    attribs = TableDict(sdb,
                                        ddb,
                                        "LB_ATTRIBUTES",
                                        LBID=lbids,
                                        VTID=valtypes,
                                        LBATTNAMEID=attrname,
                                        UNITID=units)
                    copy_simple(
                        lbids, attribs,
                        "SELECT LBATTRID FROM LB_ATTRIBUTES WHERE LBID = :lid")
                    logger.info("copied %d label objects" % len(lbids))

                if kwargs.get("rectobj", False) and kwargs.get(
                        "genlabels", False):
                    rectlb = TableDict(sdb,
                                       ddb,
                                       "LB_RECTOBJIDMAP",
                                       RECTOBJID=objs,
                                       LBID=lbids)
                    cnt = rectlb.copy()
                    ddb.commit()
                    logger.info(
                        "copied %d rectangular object / label mappings" % cnt)

                if kwargs.get("camlabels", None):
                    algo = BaseDB('algo')
                    for i in ("CD", "LDROI", "LDSS"):
                        table = "%s_%s_%s" % (kwargs["camlabels"][0],
                                              kwargs["camlabels"][1], i)
                        cols = _create_label_table(algo, ddb, table)
                        # lbls = TableDict(algo, ddb, table)
                        # lbls.copy(casesens=True)
                        for v in recs.keys():
                            ins = algo.execute(
                                'SELECT %s FROM %s WHERE LOWER("RecIdFileName") = :rfid'
                                % (", ".join([('"%s"' % c)
                                              for c in cols]), table),
                                rfid=recs.value(v, "RECFILEID").lower())
                            if len(ins) > 0:
                                ddb.execute(
                                    "INSERT INTO %s (%s) VALUES(%s)" %
                                    (table, ", ".join(cols), ", ".join(
                                        ["?" for _ in xrange(len(cols))])),
                                    insertmany=ins)

    except Exception as ex:
        logger.error(str(ex))
        return -2

    return 0
Ejemplo n.º 24
0
class GenericRectObject(BaseAdasObject):
    """
    Generic rectangular object from a binary file
    """
    def __init__(self,
                 global_obj_id,
                 obj_id,
                 startts,
                 stopts,
                 data_source,
                 bus,
                 signal_names=None,
                 signals=None,
                 ignore_error=False,
                 obj=None):
        """
        Constructor creating a rectangular object either from data_source
        or from signals if specified

        :param global_obj_id: global object id from bin file
        :param obj_id: object id from bin file
        :param startts: absolute start time stamp
        :type startts: long
        :param stopts: absolute stop time stamp
        :type stopts: long
        :param data_source: dictionary from the bin file containing data
        :type data_source: DataManager
        :param bus: bus pertaining to DataManager GetDataPort
        for one single obj, e.g. [{'Index': 2, 'VrelX': [-37.20 etc.
        :param signal_names: list of names of signals, default is GENERIC_OBJECT_SIGNAL_NAMES
        :param signals: if this is specified, signals are directly filled with it; data source is not used for filling
        :param obj: Raw object data as dict, as put on the bus by classic signal extractor
        """
        if signal_names is None:
            signal_names = GENERIC_OBJECT_SIGNAL_NAMES
        BaseAdasObject.__init__(self, global_obj_id, data_source, signal_names)
        self.__ignore_error = ignore_error
        self.__obj_id = obj_id
        self.__bus = bus
        self._logger = Logger(self.__class__.__name__)
        if signals is not None:
            self._signals = signals
        else:
            self.__fill_object_data(global_obj_id, startts, stopts, bus, obj)

    def get_subset(self, startts=None, stopts=None):
        """
        Makes a subset of the signals within the time interval

        :param startts: start time stamp
        :param stopts: stop time stamp
        """
        return GenericRectObject(self.get_id(), self.__obj_id, startts, stopts,
                                 self._data_source, self.__bus,
                                 self._signal_names,
                                 self._get_subset_of_signals(startts, stopts))

    def get_object_id(self):
        """
        Get Object Id
        """
        return self.__obj_id

    def __fill_object_data(self, obj_id, startts, stopts, bus, obj=None):
        """Fills in signals from bin file within the time interval
        :param obj_id: object id
        :param startts: start time slot
        :param stopts: stop time slot
        :param bus: name of the bus
        """

        self._signals = {}
        if obj:
            # Used when loaded through signal extractor gen obj loader
            myobj = obj
        else:
            objects = self._data_source.GetDataPort(OBJECT_PORT_NAME, bus)
            myobj = None
            for obj in objects:
                if obj[OBJ_GLOBAL_ID] == obj_id:
                    myobj = obj
                    break
        if myobj is None:
            raise AdasObjectLoadError("Binary file does not contain object id")
        tstamp = myobj[OBJ_TIME_STAMPS]

        for sig in self._signal_names:
            if sig in myobj:
                sig_vec = myobj[sig]
                time_stamp = tstamp
                if self.__ignore_error:
                    if len(sig_vec) != len(tstamp):
                        self._logger.error("Fixing signal: " + sig +
                                           " length of timestamp vector: " +
                                           str(len(tstamp)) +
                                           " length of signal value vector: " +
                                           str(len(sig_vec)))
                        min_length = min(len(tstamp), len(sig_vec))
                        sig_vec = sig_vec[0:min_length]
                        time_stamp = tstamp[0:min_length]
                        self._logger.error("Fixed signal: " + sig +
                                           " length of timestamp vector: " +
                                           str(len(time_stamp)) +
                                           " length of signal value vector: " +
                                           str(len(sig_vec)))

                if time_stamp is not None and sig_vec is not None and len(
                        sig_vec) == len(time_stamp):
                    complete_signal = Signal(sig, None, sig_vec, time_stamp,
                                             min(sig_vec), max(sig_vec))
                    self._signals[
                        sig] = complete_signal.GetSubsetForTimeInterval(
                            startts, stopts)
                else:
                    self._logger.error("Signal: " + sig +
                                       " length of timestamp vector: " +
                                       str(len(time_stamp)) +
                                       " length of signal value vector: " +
                                       str(len(sig_vec)))
                    raise AdasObjectLoadError(
                        "Length of signal values and time_stamp is not equal")
            else:
                raise AdasObjectLoadError(
                    "Required Signal" + sig +
                    " not found. Please check the configuration")

        return True

    @deprecated('get_subset')
    def GetSubset(self, startts=None, stopts=None):  # pylint: disable=C0103
        """
        :deprecated: use `get_subset` instead
        """
        return self.get_subset(startts, stopts)

    @deprecated('get_object_id')
    def GetObjectId(self):  # pylint: disable=C0103
        """
        :deprecated: use `get_object_id` instead
        """
        return self.get_object_id()

    @deprecated('__fill_object_data')
    def __FillObjectData(self, obj_id, startts, stopts, bus, obj=None):  # pylint: disable=C0103
        """
        :deprecated: use `__fill_object_data` instead
        """
        return self.__fill_object_data(obj_id, startts, stopts, bus, obj)
Ejemplo n.º 25
0
class ValAssessmentStates(object):
    """ Base class for assessments states
    """
    PASSED = "Passed"
    FAILED = "Failed"
    INVESTIGATE = "Investigate"
    NOT_ASSESSED = "Not Assessed"

    def __init__(self, obs_type):
        """Constructor for Assessment class

        :param obs_type: Name of the observer type
        """
        self.__states = []
        self.__type = obs_type
        self.__type_id = None
        self._logger = Logger(self.__class__.__name__)
        self.__default_stateid = None

    def load(self, dbi_gbl):
        """Load the assessment states

        :param dbi_gbl: global db interface
        :return: True on passed, False on Error
        """
        if not issubclass(dbi_gbl.__class__, db_gbl.BaseGblDB):
            self._logger.error("GBL Database interface undefined")
            return False
        self.__type_id = dbi_gbl.get_val_observer_type_id(self.__type)
        self.__states = dbi_gbl.get_assessment_state(
            observer_type_id=self.__type_id)
        self.__default_stateid = dbi_gbl.get_assessment_state_id(
            self.NOT_ASSESSED)
        return True

    def save(self, dbi_gbl):
        """Save the assessment states

        :param dbi_gbl: GBL Database interface
        """
        if not issubclass(dbi_gbl.__class__, db_gbl.BaseGblDB):
            self._logger.error("GBL Database interface undefined")
            return False

        self.__type_id = dbi_gbl.get_val_observer_type_id(self.__type)
        for state in self.__states:
            if db_gbl.COL_NAME_ASSESSMENT_STATE_ASSID not in state:
                state[db_gbl.
                      COL_NAME_ASSESSMENT_STATE_VALOBS_TYPEID] = self.__type_id
                dbi_gbl.add_assessment_state(state)

        return True

    def add_state(self, name, desc):
        """ Get the Result Name

        :param name: name of assessment state
        :param desc: description of assessment state
        """
        for state in self.__states:
            if name.lower() == state[db_gbl.COL_NAME_ASSESSMENT_STATE_NAME]:
                return False
        self.__states.append({
            db_gbl.COL_NAME_ASSESSMENT_STATE_NAME: name,
            db_gbl.COL_NAME_ASSESSMENT_STATE_DESCRIPTION: desc
        })
        return True

    @property
    def type(self):
        """ Get the Result Name
        """
        return self.__type

    def get_states(self, with_id=False):
        """ Return the list of assessment states or a key / value dictonary with ID and Name

        :param with_id:
        """
        if with_id is False:
            state_list = []
            for state in self.__states:
                state_list.append(state[db_gbl.COL_NAME_ASSESSMENT_STATE_NAME])

        else:
            state_list = {}
            for state in self.__states:
                state_list[state[db_gbl.COL_NAME_ASSESSMENT_STATE_ASSID]] = \
                    state[db_gbl.COL_NAME_ASSESSMENT_STATE_NAME]

        return state_list

    def get_state_id(self, state_name):
        """ Get State Identifier of the given Assessment

        :param state_name: Assessment State name
        """
        obs_typeids = [None]
        if self.__type_id is not None:
            obs_typeids.append(self.__type_id)
        for state in self.__states:
            if (state_name.lower()
                    == state[db_gbl.COL_NAME_ASSESSMENT_STATE_NAME].lower()
                    and state[db_gbl.COL_NAME_ASSESSMENT_STATE_VALOBS_TYPEID]
                    in obs_typeids):
                return state[db_gbl.COL_NAME_ASSESSMENT_STATE_ASSID]
        return self.__default_stateid

    def get_state_name(self, state_id):
        """ Get Assessment State by given Identifier

        :param state_id: Assessment State Identifier
        """
        for state in self.__states:
            if state_id == state[db_gbl.COL_NAME_ASSESSMENT_STATE_ASSID]:
                return state[db_gbl.COL_NAME_ASSESSMENT_STATE_NAME]

    @deprecated('load')
    def Load(self, dbi_gbl):  # pylint: disable=C0103
        """deprecated"""
        return self.load(dbi_gbl)

    @deprecated('save')
    def Save(self, dbi_gbl):  # pylint: disable=C0103
        """deprecated"""
        return self.save(dbi_gbl)

    @deprecated('add_state')
    def AddState(self, name, desc):  # pylint: disable=C0103
        """deprecated"""
        return self.add_state(name, desc)

    @deprecated('type (property)')
    def GetType(self):  # pylint: disable=C0103
        """deprecated"""
        return self.type

    @deprecated('get_states')
    def GetStates(self, with_id=False):  # pylint: disable=C0103
        """deprecated"""
        return self.get_states(with_id)

    @deprecated('get_state_id')
    def GetStateId(self, state_name):  # pylint: disable=C0103
        """deprecated"""
        return self.get_state_id(state_name)

    @deprecated('get_state_name')
    def GetStateName(self, state_id):  # pylint: disable=C0103
        """deprecated"""
        return self.get_state_name(state_id)
Ejemplo n.º 26
0
class ValAssessment(object):
    """ Base class for assessments
    """
    def __init__(self, *args, **kwargs):
        """(init)

        :keyword user_id: User Id
        :keyword wf_state: Workflow State
        :keyword ass_state: Assessment State
        :keyword ass_comment: Assessment Comment
        :keyword issue: Issue name from MKS
        """
        opts = arg_trans([
            'user_id', 'wf_state', 'ass_state', 'ass_comment', 'date_time',
            'issue'
        ], *args, **kwargs)
        self.__user_id = opts[0]
        self.__wf_state = opts[1]
        self.__ass_state = opts[2]
        self.__ass_comment = opts[3]
        self.__date_time = opts[4]
        self.__issue = opts[5]
        self.__id = None
        self.__ass_states = None
        self.__ass_wf = None
        self.__user_account = None
        self._logger = Logger(self.__class__.__name__)

    def __str__(self):
        """ Return the Assessment as String
        """
        txt = "ValAssessment:\n"
        if self.__id is not None:
            txt += str(" ID: %s" % self.__id)
        else:
            txt += str(" ID: -")

        txt += str(" Status: '%s'" % self.__wf_state)
        txt += str(" Result: '%s'" % self.__ass_state)
        if self.__issue is not None:
            txt += str(" Issue: %s" % self.__issue)

        txt += str(" Date: %s" % self.__date_time)
        txt += str(" Info: '%s'" % self.__ass_comment)
        return txt

    def load(self, ass_id, dbi_val, dbi_gbl, val_obs_name):
        """ The Assessment from DB

        :param ass_id: Assessment ID
        :param dbi_val: VAL Database interface
        :param dbi_gbl:  GBL Database interface
        :param val_obs_name: name of observer
        """
        if not issubclass(dbi_val.__class__, db_val.BaseValResDB):
            self._logger.error("VAL Database interface undefined")
            return False

        if not issubclass(dbi_gbl.__class__, db_gbl.BaseGblDB):
            self._logger.error("GBL Database interface undefined")
            return False

        self.__load_states(dbi_gbl, val_obs_name)

        entries = dbi_val.get_assessment(ass_id)
        if len(entries) == 0:
            self._logger.error("No result for Assessment ID was not found")
            return False
        elif len(entries) == 1:
            record = entries[0]
            self.__id = record[db_val.COL_NAME_ASS_ID]
            self.__user_id = record[db_val.COL_NAME_ASS_USER_ID]
            self.__ass_comment = record[db_val.COL_NAME_ASS_COMMENT]
            self.__date_time = record[db_val.COL_NAME_ASS_DATE]
            self.__issue = record[db_val.COL_NAME_ASS_TRACKING_ID]
            wf_id = record[db_val.COL_NAME_ASS_WFID]
            self.__wf_state = self.__ass_wf.get_state_name(wf_id)

            self.__user_account = dbi_gbl.get_user(
                user_id=self.__user_id)[db_gbl.COL_NAME_USER_LOGIN]

            assst_id = record[db_val.COL_NAME_ASS_ASSSTID]
            self.__ass_state = self.__ass_states.get_state_name(assst_id)
            return True

        return False

    def save(self, dbi_val, dbi_gbl, val_obs_name):
        """ Save the result

        :type dbi_val: validation DB connection
        :param dbi_gbl: global DB connection
        :param val_obs_name: name of observer
        """
        record = {}
        if not issubclass(dbi_val.__class__, db_val.BaseValResDB):
            self._logger.error("VAL Database interface undefined")
            return False

        if not issubclass(dbi_gbl.__class__, db_gbl.BaseGblDB):
            self._logger.error("GBL Database interface undefined")
            return False

        self.__load_states(dbi_gbl, val_obs_name)
        if self.__user_id is None:
            self.__user_id = dbi_gbl.current_gbluserid

        record[db_val.COL_NAME_ASS_USER_ID] = self.__user_id
        record[db_val.COL_NAME_ASS_COMMENT] = self.__ass_comment
        record[db_val.COL_NAME_ASS_TRACKING_ID] = self.__issue
        wf_id = self.__ass_wf.get_state_id(self.__wf_state)
        record[db_val.COL_NAME_ASS_WFID] = wf_id

        record[db_val.COL_NAME_ASS_DATE] = self.__date_time
        assst_id = self.__ass_states.get_state_id(self.__ass_state)
        record[db_val.COL_NAME_ASS_ASSSTID] = assst_id

        self.__id = dbi_val.add_assessment(record)
        # by default db sets current db date to assessment date entries if nothing is passed
        # so setting it for further work with the assessment (e.g. in report) has to be done after adding
        # because the db returns date time in different format as it expects for setting
        if self.__date_time is None:
            self.__date_time = dbi_gbl.curr_date_time()

        return True

    def update(self, dbi_val, dbi_gbl, val_obs_name):
        """ Update the Assessment

        :param dbi_val: validation db connection
        :param dbi_gbl: global db connection
        :type val_obs_name: observer name
        """
        if not issubclass(dbi_val.__class__, db_val.BaseValResDB):
            self._logger.error("VAL Database interface undefined")
            return False

        if not issubclass(dbi_gbl.__class__, db_gbl.BaseGblDB):
            self._logger.error("GBL Database interface undefined")
            return False
        if self.__id is None:
            self._logger.error("Cannot Update Unsaved/Unloaded Assessment")
            return False
        else:
            if dbi_val.is_assessment_locked(self.__id):
                self._logger.error(
                    "Cannot Update Assessment due to Locked Testrun")
                return False
            record = {}
            self.__load_states(dbi_gbl, val_obs_name)
            user = dbi_gbl.get_user(login=environ["USERNAME"])
            self.__user_id = user[db_gbl.COL_NAME_USER_ID]
            record[db_val.COL_NAME_ASS_ID] = self.__id
            record[db_val.COL_NAME_ASS_USER_ID] = self.__user_id
            record[db_val.COL_NAME_ASS_COMMENT] = self.__ass_comment
            record[db_val.COL_NAME_ASS_TRACKING_ID] = self.__issue
            assst_id = self.__ass_states.get_state_id(self.__ass_state)
            record[db_val.COL_NAME_ASS_ASSSTID] = assst_id
            self.__date_time = dbi_gbl.curr_date_time()

            record[db_val.COL_NAME_ASS_DATE] = self.__date_time
            wf_id = self.__ass_wf.get_state_id(
                ValAssessmentWorkFlows.ASS_WF_MANUAL)
            record[db_val.COL_NAME_ASS_WFID] = wf_id
            dbi_val.update_assessment(record)
            self.load(self.__id, dbi_val, dbi_gbl, val_obs_name)

    def __load_states(self, dbi_gbl, val_obs_name):
        """ Load the States """

        if self.__ass_states is None:
            self.__ass_states = ValAssessmentStates(val_obs_name)
            self.__ass_states.load(dbi_gbl)

        if self.__ass_wf is None:
            self.__ass_wf = ValAssessmentWorkFlows()
            self.__ass_wf.load(dbi_gbl)

    @property
    def user_id(self):
        """ Get the User Name
        """
        return self.__user_id

    @property
    def user_account(self):
        """ return the account name of the user
        """
        return self.__user_account

    @property
    def ass_id(self):
        """Get the Assessment Identifier
        """
        return self.__id

    @ass_id.setter
    def ass_id(self, value):
        """setter property for Assessment ID

        :param value: id of assessment
        """
        self.__id = value

    @property
    def wf_state(self):
        """Get the Assessment WorkFlow State
        """
        return self.__wf_state

    def __get_wf_state(self):
        """
        getter for workflow state
        """
        return self.__wf_state

    def __set_wf_state(self, value):
        """
        setter property for workflow state
        """
        self.__wf_state = value

    wf_state = property(__get_wf_state, __set_wf_state)

    @property
    def comment(self):
        """ getter for property `comment` """
        return self.__ass_comment

    @comment.setter
    def comment(self, value):
        """ setter for property `comment`

        :param value: comment of assessment
        """
        self.__ass_comment = value

    @property
    def ass_state(self):
        """ getter for property `comment` """
        return self.__ass_state

    @ass_state.setter
    def ass_state(self, value):
        """ setter for property `comment`

        :param value: state of assessment
        """
        self.__ass_state = value

    @property
    def issue(self):
        """ getter for property `comment` """
        return self.__issue

    @issue.setter
    def issue(self, value):
        """ setter for property `comment`

        :param value: MKS issue of assessment
        """
        self.__issue = value

    @property
    def date(self):
        """ Get Assessment Date when last time it was inserted/modified
        """
        return self.__date_time

    @deprecated('date (property)')
    def GetDate(self):  # pylint: disable=C0103
        """deprecated"""
        return self.date

    @deprecated('load')
    def Load(self, ass_id, dbi_val, dbi_gbl, val_obs_name):  # pylint: disable=C0103
        """deprecated"""
        return self.load(ass_id, dbi_val, dbi_gbl, val_obs_name)

    @deprecated('save')
    def Save(self, dbi_val, dbi_gbl, val_obs_name):  # pylint: disable=C0103
        """deprecated"""
        return self.save(dbi_val, dbi_gbl, val_obs_name)

    @deprecated('update')
    def Update(self, dbi_val, dbi_gbl, val_obs_name):  # pylint: disable=C0103
        """deprecated"""
        return self.update(dbi_val, dbi_gbl, val_obs_name)

    @deprecated('user_id (property)')
    def GetUserId(self):  # pylint: disable=C0103
        """deprecated"""
        return self.user_id

    @deprecated('user_account (property)')
    def GetUserAccount(self):  # pylint: disable=C0103
        """deprecated"""
        return self.user_account

    @deprecated('ass_id (property)')
    def GetId(self):  # pylint: disable=C0103
        """deprecated"""
        return self.ass_id

    @deprecated('comment (property)')
    def GetComment(self):  # pylint: disable=C0103
        """deprecated"""
        return self.comment

    @deprecated('comment (property)')
    def SetComment(self, comment):  # pylint: disable=C0103
        """deprecated"""
        self.comment = comment

    @deprecated('ass_state (property)')
    def GetAssesmentState(self):  # pylint: disable=C0103
        """deprecated"""
        return self.ass_state

    @deprecated('ass_state (property)')
    def SetAssesmentState(self, ass_state):  # pylint: disable=C0103
        """deprecated"""
        self.ass_state = ass_state

    @deprecated('issue (property)')
    def GetIssue(self):  # pylint: disable=C0103
        """deprecated"""
        return self.issue

    @deprecated('issue (property)')
    def SetIssue(self, issue):  # pylint: disable=C0103
        """deprecated"""
        self.issue = issue
Ejemplo n.º 27
0
def main():
    """main function"""
    logger = Logger(str(sys._getframe().f_code.co_name), INFO)

    # Parse command line parameters
    tmp = 'usage: %prog [options] <cfg_files_in> \n   with <cfg_files_in> = '
    tmp += '"<path\\filename>, <path\\filename>, ..."'
    optparser = OptionParser(usage=tmp)
    tmp = "The output files to write. [default=<cfg_file_in>_sorted.cfg]"
    optparser.add_option("-o", "--out-file", dest="outfiles", help=tmp)
    tmp = "The sort mode to use. [0 = default = only sections, 1 = sections + properties]"
    optparser.add_option("-m", "--mode", dest="mode", default='0', help=tmp)

    cmd_options = optparser.parse_args()

    if not cmd_options[1]:
        # call help
        optparser.print_help()
    else:
        # prepare infiles
        infiles = split_strip_string(cmd_options[1][0], ',')

        if cmd_options[0].mode not in list(MODES.keys()):
            logger.error("Sort mode %s unknown, possible modes: \n %s!" %
                         (cmd_options[0].mode, MODES))
        else:

            # prepare outfiles
            if cmd_options[0].outfiles is None:
                outfiles = []
            else:
                outfiles = split_strip_string(cmd_options[0].outfiles, ',')

            # start
            for filecount in range(len(infiles)):
                logger.info("Start sorting file %d: %s\n   ..." %
                            (filecount, infiles[filecount]))
                # outfile name
                if not outfiles or (len(outfiles) < filecount + 1):
                    split_result = infiles[filecount].rsplit('.', 1)
                    outfiles.append(split_result[0] + '_sorted.' +
                                    split_result[1])

                # check outfile name
                if outfiles[filecount] in infiles:
                    # never overwrite infiles
                    logger.error(
                        'Overwrite existing infile is not allowed: %s.' %
                        infiles[filecount])
                    # exc_type, exc_value, exc_traceback = sys.exc_info()
                    logger.error('The original problem occured here: %s' %
                                 str(sys.exc_info()))
                    raise IOError(
                        'Overwrite existing infile is not allowed: %s.' %
                        infiles[filecount])

                elif isfile(outfiles[filecount]):
                    # ask to overwrite if oufile already exists
                    print('   You are going to overwrite the file %s.' %
                          outfiles[filecount])
                    print('   Do you really want to continue?')
                    go_on = str(
                        input(
                            '   press Enter to continue or any key to break\n')
                    )
                    if go_on:
                        print('stopped by user')
                        continue

                # sorting
                mts_cfg = MtsConfig(infiles[filecount], outfiles[filecount],
                                    logger)
                mts_cfg.sort(cmd_options[0].mode)

            # done
            logger.info("Done.")
class ProcessManager(object):
    r"""
    valf internal class to provide essential processing for observers

    - initialize

        - start logger
        - initialize data_manager
        - search classes based on class BaseComponentInterface

    - load configuration

        - import declared observer modules
        - set data ports

    - run validation

        - call all methods of all observers sequentially
        - use bpl_reader or similar to run through all recordings

    This class also is responsible to read out configuration and interpretation from config file.

    general used ports on bus ``Global``:

        - set "ConfigFileVersions"
            dict with file name as key and version as value for each loaded config file
        - read "FileCount"
            to show progress bar
        - read "IsFinished"
            to continue with next state when all sections of a recording are validated (set by `SignalExtractor`)

    Also setting ports as defined in ``InputData``  for the named bus.

    """
    def __init__(self, plugin_dir, fail_on_error=False):
        """init essencials

        :param plugin_dir: path or list of paths where to start search for observers
        :type plugin_dir:  string or list of strings

        :param fail_on_error: flag to break immediately if an exception is found
        :type fail_on_error:  boolean
        """
        self._logger = Logger(self.__class__.__name__)
        self._logger.debug()

        self._component_list = []

        self._version = "$Revision: 1.11 $"

        self._progressbar = None
        self._file_count = 0
        self._object_map_list = []
        self._config_file_loaded = False
        self._fail_on_error = fail_on_error
        self._configfiles = []  # used as stack to load configs recursively
        self._config_file_versions = {}

        self._uncrepl = UncRepl()

        plugin_dir.extend([
            self._uncrepl(dir_) for dir_ in OBS_DIRS if dir_ not in plugin_dir
        ])

        self._logger.info("Searching for plug-ins. Please wait...")
        class_map_list, self._plugin_error_list = find_class(
            bci, plugin_dir, with_error_list=True)
        if class_map_list is None:
            self._logger.error("No plug-ins found.")
            return

        self._logger.debug("%d plug-ins found: %s." %
                           (len(class_map_list), ", ".join(
                               [i['name'] for i in class_map_list])))
        self._plugin_map = {
            plugin['name']: plugin["type"]
            for plugin in class_map_list
        }

        # Create data manager object
        try:
            self._data_manager = DataManager()
        except:
            self._logger.exception("Couldn't instantiate 'DataManager' class.")
            if self._fail_on_error:
                raise
            sexit(bci.RET_VAL_ERROR)

    def _initialize(self):
        """calls initialize and post_initialize of ordered observers
        """
        self._logger.debug()

        # Calls Initialize for each component in the list
        for component in self._component_list:
            try:
                if component.Initialize() != bci.RET_VAL_OK:
                    self._logger.error(
                        "Class '%s' returned with error from Initialize() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except:
                self._logger.exception(
                    'EXCEPTION during Initialize of %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        # Calls PostInitialize for each component in the list
        for component in self._component_list:
            try:
                if component.PostInitialize() != bci.RET_VAL_OK:
                    self._logger.error(
                        "Class '%s' returned with error from PostInitialize() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except:
                self._logger.exception(
                    'EXCEPTION during PostInitialize of %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        self._file_count = self.get_data_port("FileCount")
        if self._file_count > 0:
            self._progressbar = ProgressBar(0,
                                            self._file_count,
                                            multiline=True)
        else:
            self._file_count = 0

        self._logger.debug("all components ready to run!")
        self._logger.mem_usage()
        return bci.RET_VAL_OK

    def _process_data(self):
        """calls load_data, process_data as well as post_process_data of ordered observers
        """
        self._logger.debug()

        if self._file_count == 0:
            self._logger.debug(
                str(_getframe().f_code.co_name) + "No files to process.")
            return RET_VAL_OK

        ret = bci.RET_VAL_ERROR
        counter = 0

        while not self.get_data_port("IsFinished"):
            # update progressbar position
            self._progressbar(counter)

            counter += 1

            # Calls LoadData for each component in the list
            for component in self._component_list:
                try:
                    ret = component.LoadData()
                    if ret is bci.RET_VAL_ERROR:
                        self._logger.error(
                            "Class '%s' returned with error from LoadData() method, "
                            "continue with next sim file." %
                            component.__class__.__name__)
                        break
                except:
                    self._logger.exception(
                        'exception raised during LoadData of %s:\n%s, '
                        'continue with next sim file.' %
                        (component.__class__.__name__, format_exc()))
                    ret = bci.RET_VAL_ERROR
                    if self._fail_on_error:
                        raise
                    break

            if ret is bci.RET_VAL_ERROR:
                continue

            # Calls ProcessData for each component in the list
            for component in self._component_list:
                try:
                    ret = component.ProcessData()
                    if ret is bci.RET_VAL_ERROR:
                        self._logger.error(
                            "Class '%s' returned with error from ProcessData() method, "
                            "continue with next sim file." %
                            component.__class__.__name__)
                        break
                except:
                    self._logger.exception(
                        'EXCEPTION during ProcessData of %s:\n%s, '
                        'continue with next sim file.' %
                        (component.__class__.__name__, format_exc()))
                    ret = bci.RET_VAL_ERROR
                    if self._fail_on_error:
                        raise
                    break

            if ret is bci.RET_VAL_ERROR:
                continue

            # Calls PostProcessData for each component in the list
            for component in self._component_list:
                try:
                    ret = component.PostProcessData()
                    if ret is bci.RET_VAL_ERROR:
                        self._logger.error(
                            "Class '%s' returned with error from PostProcessData() method, "
                            "continue with next sim file." %
                            component.__class__.__name__)
                        break
                except:
                    self._logger.exception(
                        'EXCEPTION during PostProcessData of %s:\n%s, '
                        'continue with next sim file.' %
                        (component.__class__.__name__, format_exc()))
                    ret = bci.RET_VAL_ERROR
                    if self._fail_on_error:
                        raise
                    break

            if ret is bci.RET_VAL_ERROR:
                continue

            # we have processed correctly at least a file,
            # set _process_data return value to OK in order to finish it's process

            self._logger.mem_usage()
            ret = bci.RET_VAL_OK

        if counter > 0:
            self._progressbar(counter)

        return ret

    def _terminate(self):
        """calls pre_terminate and terminate of ordered observers
        """
        self._logger.debug()

        # Calls PreTerminate for each component in the list
        for component in self._component_list:
            try:
                if component.PreTerminate() != bci.RET_VAL_OK:
                    self._logger.error(
                        "Class '%s' returned with error from PreTerminate() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except Exception:
                self._logger.exception(
                    'EXCEPTION during PreTerminate of observer %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        # Calls Terminate for each component in the list
        for component in self._component_list:
            try:
                if component.Terminate() != bci.RET_VAL_OK:
                    self._logger.exception(
                        "Class '%s' returned with error from Terminate() method."
                        % component.__class__.__name__)
                    return bci.RET_VAL_ERROR
            except:
                self._logger.exception(
                    'EXCEPTION during Terminate of observer %s:\n%s' %
                    (component.__class__.__name__, format_exc()))
                if self._fail_on_error:
                    raise
                return bci.RET_VAL_ERROR

        return bci.RET_VAL_OK

    def get_data_port(self, port_name, bus_name="Global"):
        """gets data from a bus/port

        :param port_name: port name to use
        :param bus_name: bus name to use
        :return: data from bus/port
        """
        return self._data_manager.get_data_port(port_name, bus_name)

    def set_data_port(self, port_name, port_value, bus_name="Global"):
        """sets data to a bus/port

        :param port_name: port name to use
        :param port_value: data value to be set
        :param bus_name: bus name to use
        :return: data from bus/port
        """
        self._data_manager.set_data_port(port_name, port_value, bus_name)

    def _get_err_trace(self):
        """returns error trace from error list
        """
        if self._plugin_error_list:
            err_trace = '\n'.join('++ file: {0}.py -- {1}\n'.format(
                e[0], e[1].replace('\n', '\n--> '))
                                  for e in self._plugin_error_list)
        else:
            err_trace = 'no detailed info about failure'

        return err_trace

    def load_configuration(self, configfile):
        """loads configuration from cfg-file

        see more details in `Valf.LoadConfig`

        :param configfile: path/to/file.cfg
        :return: success (bool)
        """
        configfile = self._uncrepl(configfile)
        cls_obj = None

        if not opath.exists(configfile):
            raise ValfError(
                "Configuration file '%s' doesn't exist or is invalid." %
                configfile)
            # self._logger.error("Configuration file '%s' doesn't exist or is invalid." % configfile)
            # return False

        self.set_data_port(CFG_FILE_VERSION_PORT_NAME,
                           self._config_file_versions)
        autoorder = [-1]
        component_map = self._read_config(configfile)
        self._logger.info(
            "loading version: '%s' of config file '%s'" %
            (self._config_file_versions.get(configfile, ""), configfile))
        for componentname in component_map:
            try:  # retrieve details
                class_name = eval(component_map[componentname].get(
                    "ClassName", "None"))
                # port_in_list = component_map[componentname].get("PortIn")
                port_out_list = eval(component_map[componentname].get(
                    "PortOut", "[]"))
                input_data_list = eval(component_map[componentname].get(
                    "InputData", "[]"))
                connect_bus_list = eval(component_map[componentname].get(
                    "ConnectBus", "Bus#1"))
                order = component_map[componentname].get(
                    "Order",
                    max(autoorder) + 1)
                if order in autoorder:
                    self._logger.info(
                        "order %d for component %s already in use!" %
                        (order, componentname))
                autoorder.append(order)
                # check them, they should be there all!
                if (componentname != "Global" and
                    (class_name is None or port_out_list is None
                     or input_data_list is None or connect_bus_list is None)):
                    msg = "Invalid port value or syntax wrong on component: '%s' with parsed settings\n" \
                          "ClassName: %s, PortOut: %s,\n" \
                          "InputData: %s, \n" \
                          "ConnectBus: %s\n"\
                          "  only ClassName for 'Global' can be None, compare parsed settings with defines in config." \
                          % (componentname, class_name, port_out_list, input_data_list, connect_bus_list)
                    raise ValueError(msg)
            except Exception, err:
                self._logger.error(err)
                if self._fail_on_error:
                    raise
                continue

            if type(connect_bus_list) not in (list, tuple):
                connect_bus_list = [connect_bus_list]

            if class_name in self._plugin_map:
                # Observer can be loaded -> Everything fine.
                # self._logger.debug("Loading plug-in: '%s'." % componentname)
                cls_obj = self._plugin_map[class_name](self._data_manager,
                                                       componentname,
                                                       connect_bus_list)
            elif componentname != "Global":
                # Observer can NOT be loaded -> Create Log Entry and raise Exception !
                err_trace = self._get_err_trace()

                # Create Log Entry
                self._logger.error('some python modules have coding errors')
                self._logger.error(
                    'Please check following list for more details:')
                self._logger.error(err_trace)

                msg = "Observer with ClassName %s not found, please check log for more info!" % class_name
                self._logger.error(msg)
                self._logger.error("File: \"valf.log\"")
                raise ValfError(msg, ValfError.ERR_OBSERVER_CLASS_NOT_FOUND)

            for port_out in port_out_list:
                for bus_name in connect_bus_list:
                    tmp = "Register port: Provider="
                    tmp += "'%s', PortName='%s', Bus='%s'." % (
                        componentname, port_out, bus_name)
                    self._logger.debug(tmp)
                    self.set_data_port(port_out, None, bus_name)

            if type(input_data_list) == list:  # do it the usual way
                for input_data in input_data_list:
                    param_name = input_data[0]
                    param_value = input_data[1]
                    for bus_name in connect_bus_list:
                        tmp = "Setting input data.[Component='%s', " % componentname
                        tmp += "Bus='%s', PortName='%s', " % (bus_name,
                                                              param_name)
                        tmp += "PortValue=%s]" % str(param_value)
                        self._logger.debug(tmp)
                        self.set_data_port(param_name, param_value, bus_name)
            elif type(input_data_list
                      ) == dict:  # we've got key value pairs already
                for param_name, param_value in input_data_list.iteritems():
                    for bus_name in connect_bus_list:
                        tmp = "Setting input data.[Component='%s', " % componentname
                        tmp += "Bus='%s', PortName='%s', " % (bus_name,
                                                              param_name)
                        tmp += "PortValue=%s]" % str(param_value)
                        self._logger.debug(tmp)
                        self.set_data_port(param_name, param_value, bus_name)

            if componentname != "Global":
                self._object_map_list.append({
                    "Order": order,
                    "ComponentName": componentname,
                    "ClsObj": cls_obj
                })

        # If whole Observer loading is done successfully,
        # we write anyway all found coding errors into the Log File as warnings
        if self._plugin_error_list:
            err_trace = self._get_err_trace()
            self._logger.warning('some python modules have coding errors')
            self._logger.warning(
                'Please check following list for more details:')
            self._logger.warning(err_trace)

        self._component_list = []
        if len(self._object_map_list):
            self._object_map_list.sort(key=lambda x: x["Order"])

            for object_map in self._object_map_list:
                self._component_list.append(object_map["ClsObj"])

        if not self._component_list:
            self._logger.error(
                "No component loaded. Please check config file '%s'." %
                str(configfile))
            return False

        self._config_file_loaded = True

        return True
Ejemplo n.º 29
0
class GenericObjectList(BaseObjectList):
    """
    Generic object list loaded from a binary file
    """
    def __init__(self,
                 data_source,
                 sensor,
                 list_name,
                 object_filter_if,
                 bus="Bus#1",
                 signal_names=None,
                 objects=None):
        """
        :param data_source: data_manager initialized with binary data.
                            must have e.g. GetDataPort("objects" , "Bus#1")
        :param sensor: name of the sensor
        :param list_name: name of the list
        :param object_filter_if: ObjectFilterIf, e.g. ObjectByGateFilter
        :param bus: bus pertaining to DataManager GetDataPort
        :param signal_names: list of names of signals to be loaded,
                             default is GENERIC_OBJECT_SIGNAL_NAMES
        """
        if signal_names is None:
            signal_names = GENERIC_OBJECT_SIGNAL_NAMES

        BaseObjectList.__init__(self, data_source, sensor, list_name,
                                object_filter_if, signal_names)

        self._logger = Logger(self.__class__.__name__, level=INFO)

        if objects is None:
            self._objects = []
        else:
            self._objects = objects

        self.__bus = bus

    @staticmethod
    def __load_needed(ref_startts, ref_stopts, my_startts, my_stopts):
        """
        if there is an overlap between reference and candidate object time intervals

        :param ref_startts: reference startts
        :type ref_stopts: reference stopts
        :param my_startts: my startts
        :type my_stopts: my stopts
        """
        if ref_startts is None and ref_stopts is None:
            return True
        elif ref_startts is not None and ref_stopts is None:
            if my_stopts >= ref_startts:
                return True
            else:
                return False
        elif ref_startts is None and ref_stopts is not None:
            if my_startts <= ref_stopts:
                return True
            else:
                return False
        else:  # ref_startts is not None and ref_stopts is not None:
            if my_startts <= ref_stopts and my_stopts >= ref_startts:
                return True
            else:
                return False

    def load_objects(self, startts=None, stopts=None, ignore_error=False):
        """
        LoadObjects into GenericObjectList. It may raise AdasObjectLoadError

        :param startts: absolute start time stamp
        :type startts: long
        :param stopts: absolute stop time stamp
        :type stopts: long
        :param ignore_error: TODO
        :type ignore_error: TODO
        """
        # clear objects:
        del self._objects[:]
        self._objects = []

        # get objects
        objects = self._data_source.GetDataPort(OBJECT_PORT_NAME, self.__bus)

        if objects is None:
            raise AdasObjectLoadError("Binary file query returned None")

        for obj_dict in objects:
            try:
                my_startts = obj_dict[OBJ_TIME_STAMPS][0]
                my_stopts = obj_dict[OBJ_TIME_STAMPS][-1]
                if self.__load_needed(startts, stopts, my_startts, my_stopts):
                    self._objects.append(
                        GenericRectObject(obj_dict[OBJ_GLOBAL_ID],
                                          obj_dict[OBJ_OBJECT_ID], startts,
                                          stopts, self._data_source,
                                          self.__bus, self._signal_names, None,
                                          ignore_error, obj_dict))
            except AdasObjectLoadError, ex:
                msg = "Object %s could not be loaded from binary. EX:" % str(
                    obj_dict[OBJ_GLOBAL_ID])
                msg += str(ex)
                self._logger.error(msg)

        return True
Ejemplo n.º 30
0
class MtsCfgSection(object):
    """
    MTS config section class to model a measurement object (MO)
    """

    def __init__(self, string):
        """Init instance of MtsCfgSection

        :param string: MTS config-like multi-line string containing the MO definition
        :type string: str|unicode
        """
        super(MtsCfgSection, self).__init__()

        self._params = OrderedDict()
        self._tag = None
        self._logger = Logger(self.__class__.__name__)

        self._parse(string)

    def _parse(self, string):
        """
        Parses the given string and stores all information in the instance

        :param string: MTS config-like multi-line string containing the MO definition
        :type string: str|unicode
        """
        # Get MO tag. e.g. [SIM VFB]
        try:
            self._tag = match(r'\[(.+)\]\s*\n', string).group(1)
        except AttributeError:
            raise MtsSectionError("The given string to be parsed does not specify a correct tag for the section.")

        # Get body
        body = resub(r'\\\s*\n\s*', '', resub(r'.+\]\s*\n', '', string))
        sub = lambda value: resub(r'^"', '', resub(r'"$', '', value))

        # Get parameters from within the body
        params_list = split(r'\s*\n\s*', body)
        for param in params_list:
            # If not is an empty line
            if not match(r'\s*$', param):
                # print param
                var, values = match(r'^(.+?)=(.+)$', param).groups()

                # Split values into a list
                values_list = split(r',\s*', values)

                # Store the parameter
                self._add_param(var, [sub(i) for i in values_list])

    def _add_param(self, var, values_list):
        """Add a new parameter to the instance

        :param var: Name of the parameter
        :type var: str|unicode
        :param values_list: List of values for the given parameter
        :type values_list: list
        """
        self._params[var] = values_list if len(values_list) > 1 else values_list[0]

    @property
    def tag(self):
        """MO name
        """
        return self._tag

    @property
    def params(self):
        """Dict of parameters
        """
        return self._params

    @property
    def mo_class(self):
        """Class name of the MO. None if not known.
        """
        try:
            return self._params["Class"]
        except KeyError:
            self._logger.info("Section {tag} does not provide 'Class' info".format(tag=self.tag))

        return None

    def __getitem__(self, item):
        return self.params[item]

    def __len__(self):
        return len(self.params)

    def __iter__(self):
        return self.params.__iter__()

    def __str__(self):
        return self.tag + ": " + str(self.params)

    def __ne__(self, other):
        return self.tag != other.tag or self.params != other.params

    def __eq__(self, other):
        return not self.__ne__(other)